mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
Fixed bug in WebStats not deleting correct key + fixed typo in CurveSetManager preventing connecting to redis + modified display in moduleInfo
This commit is contained in:
parent
611454ba82
commit
9146feab88
4 changed files with 71 additions and 27 deletions
|
@ -106,9 +106,9 @@ if __name__ == '__main__':
|
||||||
cfg.read(configfile)
|
cfg.read(configfile)
|
||||||
|
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
||||||
|
|
||||||
publisher.info("Script Curve_manage_top_set started")
|
publisher.info("Script Curve_manage_top_set started")
|
||||||
|
|
||||||
|
@ -119,10 +119,6 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
# Get one message from the input queue (module only work if linked with a queue)
|
# Get one message from the input queue (module only work if linked with a queue)
|
||||||
if message is None:
|
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||||
publisher.debug("{} queue is empty, waiting".format(config_section))
|
manage_top_set()
|
||||||
print 'sleeping'
|
|
||||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
|
||||||
manage_top_set()
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,20 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python2
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import redis
|
import redis
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import argparse
|
||||||
from subprocess import PIPE, Popen
|
from subprocess import PIPE, Popen
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
import json
|
import json
|
||||||
from prettytable import PrettyTable
|
from terminaltables import AsciiTable
|
||||||
|
import textwrap
|
||||||
|
|
||||||
# CONFIG VARIABLES
|
# CONFIG VARIABLES
|
||||||
threshold_stucked_module = 60*60*1 #1 hour
|
threshold_stucked_module = 60*60*1 #1 hour
|
||||||
refreshRate = 1
|
|
||||||
log_filename = "../logs/moduleInfo.log"
|
log_filename = "../logs/moduleInfo.log"
|
||||||
command_search_pid = "ps a -o pid,cmd | grep {}"
|
command_search_pid = "ps a -o pid,cmd | grep {}"
|
||||||
command_restart_module = "screen -S \"Script\" -X screen -t \"{}\" bash -c \"./{}.py; read x\""
|
command_restart_module = "screen -S \"Script\" -X screen -t \"{}\" bash -c \"./{}.py; read x\""
|
||||||
|
@ -59,6 +59,12 @@ def kill_module(module):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Show info concerning running modules and log suspected stucked modules. May be use to automatically kill and restart stucked one.')
|
||||||
|
parser.add_argument('-r', '--refresh', type=int, required=False, default=1, help='Refresh rate')
|
||||||
|
parser.add_argument('-k', '--autokill', type=int, required=True, default=1, help='Enable auto kill option (1 for TRUE, anything else for FALSE)')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||||
if not os.path.exists(configfile):
|
if not os.path.exists(configfile):
|
||||||
raise Exception('Unable to find the configuration file. \
|
raise Exception('Unable to find the configuration file. \
|
||||||
|
@ -76,9 +82,9 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
table1 = PrettyTable(['#', 'Queue', 'Amount', 'Paste start time', 'Processing time for current paste (H:M:S)', 'Paste hash'], sortby="Processing time for current paste (H:M:S)", reversesort=True)
|
|
||||||
table2 = PrettyTable(['#', 'Queue', 'Amount', 'Paste start time', 'Time since idle (H:M:S)', 'Last paste hash'], sortby="Time since idle (H:M:S)", reversesort=True)
|
|
||||||
num = 0
|
num = 0
|
||||||
|
printarray1 = []
|
||||||
|
printarray2 = []
|
||||||
for queue, card in server.hgetall("queues").iteritems():
|
for queue, card in server.hgetall("queues").iteritems():
|
||||||
key = "MODULE_" + queue
|
key = "MODULE_" + queue
|
||||||
value = server.get(key)
|
value = server.get(key)
|
||||||
|
@ -93,17 +99,57 @@ if __name__ == "__main__":
|
||||||
if int((datetime.datetime.now() - startTime_readable).total_seconds()) > threshold_stucked_module:
|
if int((datetime.datetime.now() - startTime_readable).total_seconds()) > threshold_stucked_module:
|
||||||
log = open(log_filename, 'a')
|
log = open(log_filename, 'a')
|
||||||
log.write(json.dumps([queue, card, str(startTime_readable), str(processed_time_readable), path]) + "\n")
|
log.write(json.dumps([queue, card, str(startTime_readable), str(processed_time_readable), path]) + "\n")
|
||||||
kill_module(queue)
|
if args.autokill == 1:
|
||||||
|
kill_module(queue)
|
||||||
|
|
||||||
table1.add_row([num, queue, card, startTime_readable, processed_time_readable, path])
|
printarray1.append([str(num), str(queue), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
table2.add_row([num, queue, card, startTime_readable, processed_time_readable, path])
|
printarray2.append([str(num), str(queue), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||||
|
|
||||||
|
printarray1.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||||
|
printarray2.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||||
|
printarray1.insert(0,["#", "Queue", "Amount", "Paste start time", "Processing time for current paste (H:M:S)", "Paste hash"])
|
||||||
|
printarray2.insert(0,["#", "Queue", "Amount", "Paste start time", "Time since idle (H:M:S)", "Last paste hash"])
|
||||||
|
|
||||||
os.system('clear')
|
os.system('clear')
|
||||||
print 'Working queues:\n'
|
t1 = AsciiTable(printarray1, title="Working queues")
|
||||||
print table1
|
t1.column_max_width(1)
|
||||||
|
if not t1.ok:
|
||||||
|
longest_col = t1.column_widths.index(max(t1.column_widths))
|
||||||
|
max_length_col = t1.column_max_width(longest_col)
|
||||||
|
if max_length_col > 0:
|
||||||
|
for i, content in enumerate(t1.table_data):
|
||||||
|
if len(content[longest_col]) > max_length_col:
|
||||||
|
temp = ''
|
||||||
|
for l in content[longest_col].splitlines():
|
||||||
|
if len(l) > max_length_col:
|
||||||
|
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||||
|
else:
|
||||||
|
temp += l + '\n'
|
||||||
|
content[longest_col] = temp.strip()
|
||||||
|
t1.table_data[i] = content
|
||||||
|
|
||||||
|
t2 = AsciiTable(printarray2, title="Iddeling queues")
|
||||||
|
t2.column_max_width(1)
|
||||||
|
if not t2.ok:
|
||||||
|
longest_col = t2.column_widths.index(max(t2.column_widths))
|
||||||
|
max_length_col = t2.column_max_width(longest_col)
|
||||||
|
if max_length_col > 0:
|
||||||
|
for i, content in enumerate(t2.table_data):
|
||||||
|
if len(content[longest_col]) > max_length_col:
|
||||||
|
temp = ''
|
||||||
|
for l in content[longest_col].splitlines():
|
||||||
|
if len(l) > max_length_col:
|
||||||
|
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||||
|
else:
|
||||||
|
temp += l + '\n'
|
||||||
|
content[longest_col] = temp.strip()
|
||||||
|
t2.table_data[i] = content
|
||||||
|
|
||||||
|
|
||||||
|
print t1.table
|
||||||
print '\n'
|
print '\n'
|
||||||
print 'Ideling queues:\n'
|
print t2.table
|
||||||
print table2
|
|
||||||
time.sleep(refreshRate)
|
time.sleep(args.refresh)
|
||||||
|
|
|
@ -77,12 +77,14 @@ def compute_progression(server, field_name, num_day, url_parsed):
|
||||||
member_set = []
|
member_set = []
|
||||||
for keyw in server.smembers(redis_progression_name_set):
|
for keyw in server.smembers(redis_progression_name_set):
|
||||||
member_set.append((keyw, int(server.hget(redis_progression_name, keyw))))
|
member_set.append((keyw, int(server.hget(redis_progression_name, keyw))))
|
||||||
print member_set
|
|
||||||
member_set.sort(key=lambda tup: tup[1])
|
member_set.sort(key=lambda tup: tup[1])
|
||||||
if member_set[0][1] < keyword_increase:
|
if member_set[0][1] < keyword_increase:
|
||||||
|
print 'removing', member_set[0][0] + '('+str(member_set[0][1])+')', 'and adding', keyword, str(keyword_increase)
|
||||||
#remove min from set and add the new one
|
#remove min from set and add the new one
|
||||||
server.srem(redis_progression_name_set, member_set[0])
|
server.srem(redis_progression_name_set, member_set[0][0])
|
||||||
server.sadd(redis_progression_name_set, keyword)
|
server.sadd(redis_progression_name_set, keyword)
|
||||||
|
server.hdel(redis_progression_name, member_set[0][0])
|
||||||
|
server.hset(redis_progression_name, keyword, keyword_increase)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -10,7 +10,7 @@ textblob
|
||||||
numpy
|
numpy
|
||||||
matplotlib
|
matplotlib
|
||||||
networkx
|
networkx
|
||||||
prettytable
|
terminaltables
|
||||||
|
|
||||||
#Tokeniser
|
#Tokeniser
|
||||||
nltk
|
nltk
|
||||||
|
|
Loading…
Reference in a new issue