mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
Fixed bug in WebStats not deleting correct key + fixed typo in CurveSetManager preventing connecting to redis + modified display in moduleInfo
This commit is contained in:
parent
611454ba82
commit
9146feab88
4 changed files with 71 additions and 27 deletions
|
@ -106,9 +106,9 @@ if __name__ == '__main__':
|
|||
cfg.read(configfile)
|
||||
|
||||
server_term = redis.StrictRedis(
|
||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
||||
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
||||
|
||||
publisher.info("Script Curve_manage_top_set started")
|
||||
|
||||
|
@ -119,10 +119,6 @@ if __name__ == '__main__':
|
|||
|
||||
while True:
|
||||
# Get one message from the input queue (module only work if linked with a queue)
|
||||
if message is None:
|
||||
publisher.debug("{} queue is empty, waiting".format(config_section))
|
||||
print 'sleeping'
|
||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||
manage_top_set()
|
||||
continue
|
||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||
manage_top_set()
|
||||
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
#!/usr/bin/env python2
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import redis
|
||||
import os
|
||||
import signal
|
||||
import argparse
|
||||
from subprocess import PIPE, Popen
|
||||
import ConfigParser
|
||||
import json
|
||||
from prettytable import PrettyTable
|
||||
from terminaltables import AsciiTable
|
||||
import textwrap
|
||||
|
||||
# CONFIG VARIABLES
|
||||
threshold_stucked_module = 60*60*1 #1 hour
|
||||
refreshRate = 1
|
||||
log_filename = "../logs/moduleInfo.log"
|
||||
command_search_pid = "ps a -o pid,cmd | grep {}"
|
||||
command_restart_module = "screen -S \"Script\" -X screen -t \"{}\" bash -c \"./{}.py; read x\""
|
||||
|
@ -59,6 +59,12 @@ def kill_module(module):
|
|||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description='Show info concerning running modules and log suspected stucked modules. May be use to automatically kill and restart stucked one.')
|
||||
parser.add_argument('-r', '--refresh', type=int, required=False, default=1, help='Refresh rate')
|
||||
parser.add_argument('-k', '--autokill', type=int, required=True, default=1, help='Enable auto kill option (1 for TRUE, anything else for FALSE)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
if not os.path.exists(configfile):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
|
@ -76,9 +82,9 @@ if __name__ == "__main__":
|
|||
|
||||
while True:
|
||||
|
||||
table1 = PrettyTable(['#', 'Queue', 'Amount', 'Paste start time', 'Processing time for current paste (H:M:S)', 'Paste hash'], sortby="Processing time for current paste (H:M:S)", reversesort=True)
|
||||
table2 = PrettyTable(['#', 'Queue', 'Amount', 'Paste start time', 'Time since idle (H:M:S)', 'Last paste hash'], sortby="Time since idle (H:M:S)", reversesort=True)
|
||||
num = 0
|
||||
printarray1 = []
|
||||
printarray2 = []
|
||||
for queue, card in server.hgetall("queues").iteritems():
|
||||
key = "MODULE_" + queue
|
||||
value = server.get(key)
|
||||
|
@ -93,17 +99,57 @@ if __name__ == "__main__":
|
|||
if int((datetime.datetime.now() - startTime_readable).total_seconds()) > threshold_stucked_module:
|
||||
log = open(log_filename, 'a')
|
||||
log.write(json.dumps([queue, card, str(startTime_readable), str(processed_time_readable), path]) + "\n")
|
||||
kill_module(queue)
|
||||
if args.autokill == 1:
|
||||
kill_module(queue)
|
||||
|
||||
table1.add_row([num, queue, card, startTime_readable, processed_time_readable, path])
|
||||
printarray1.append([str(num), str(queue), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||
|
||||
else:
|
||||
table2.add_row([num, queue, card, startTime_readable, processed_time_readable, path])
|
||||
printarray2.append([str(num), str(queue), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||
|
||||
printarray1.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||
printarray2.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||
printarray1.insert(0,["#", "Queue", "Amount", "Paste start time", "Processing time for current paste (H:M:S)", "Paste hash"])
|
||||
printarray2.insert(0,["#", "Queue", "Amount", "Paste start time", "Time since idle (H:M:S)", "Last paste hash"])
|
||||
|
||||
os.system('clear')
|
||||
print 'Working queues:\n'
|
||||
print table1
|
||||
t1 = AsciiTable(printarray1, title="Working queues")
|
||||
t1.column_max_width(1)
|
||||
if not t1.ok:
|
||||
longest_col = t1.column_widths.index(max(t1.column_widths))
|
||||
max_length_col = t1.column_max_width(longest_col)
|
||||
if max_length_col > 0:
|
||||
for i, content in enumerate(t1.table_data):
|
||||
if len(content[longest_col]) > max_length_col:
|
||||
temp = ''
|
||||
for l in content[longest_col].splitlines():
|
||||
if len(l) > max_length_col:
|
||||
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||
else:
|
||||
temp += l + '\n'
|
||||
content[longest_col] = temp.strip()
|
||||
t1.table_data[i] = content
|
||||
|
||||
t2 = AsciiTable(printarray2, title="Iddeling queues")
|
||||
t2.column_max_width(1)
|
||||
if not t2.ok:
|
||||
longest_col = t2.column_widths.index(max(t2.column_widths))
|
||||
max_length_col = t2.column_max_width(longest_col)
|
||||
if max_length_col > 0:
|
||||
for i, content in enumerate(t2.table_data):
|
||||
if len(content[longest_col]) > max_length_col:
|
||||
temp = ''
|
||||
for l in content[longest_col].splitlines():
|
||||
if len(l) > max_length_col:
|
||||
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||
else:
|
||||
temp += l + '\n'
|
||||
content[longest_col] = temp.strip()
|
||||
t2.table_data[i] = content
|
||||
|
||||
|
||||
print t1.table
|
||||
print '\n'
|
||||
print 'Ideling queues:\n'
|
||||
print table2
|
||||
time.sleep(refreshRate)
|
||||
print t2.table
|
||||
|
||||
time.sleep(args.refresh)
|
||||
|
|
|
@ -77,12 +77,14 @@ def compute_progression(server, field_name, num_day, url_parsed):
|
|||
member_set = []
|
||||
for keyw in server.smembers(redis_progression_name_set):
|
||||
member_set.append((keyw, int(server.hget(redis_progression_name, keyw))))
|
||||
print member_set
|
||||
member_set.sort(key=lambda tup: tup[1])
|
||||
if member_set[0][1] < keyword_increase:
|
||||
print 'removing', member_set[0][0] + '('+str(member_set[0][1])+')', 'and adding', keyword, str(keyword_increase)
|
||||
#remove min from set and add the new one
|
||||
server.srem(redis_progression_name_set, member_set[0])
|
||||
server.srem(redis_progression_name_set, member_set[0][0])
|
||||
server.sadd(redis_progression_name_set, keyword)
|
||||
server.hdel(redis_progression_name, member_set[0][0])
|
||||
server.hset(redis_progression_name, keyword, keyword_increase)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -10,7 +10,7 @@ textblob
|
|||
numpy
|
||||
matplotlib
|
||||
networkx
|
||||
prettytable
|
||||
terminaltables
|
||||
|
||||
#Tokeniser
|
||||
nltk
|
||||
|
|
Loading…
Reference in a new issue