mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-27 00:07:16 +00:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
68c763dea1
27 changed files with 720 additions and 793 deletions
|
@ -66,7 +66,7 @@ if __name__ == "__main__":
|
|||
publisher.warning('{}Checked {} valid number(s)'.format(
|
||||
to_print, len(creditcard_set)))
|
||||
#Send to duplicate
|
||||
p.populate_set_out(filepath, 'Duplicate')
|
||||
p.populate_set_out(filename, 'Duplicate')
|
||||
#send to Browse_warning_paste
|
||||
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
|
||||
else:
|
|
@ -22,8 +22,8 @@ from pubsublogger import publisher
|
|||
from packages import lib_words
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
from Helper import Process
|
||||
import os
|
||||
import ConfigParser
|
||||
|
||||
# Config Variables
|
||||
Refresh_rate = 60*5 #sec
|
||||
|
@ -96,13 +96,19 @@ if __name__ == '__main__':
|
|||
# Script is the default channel used for the modules.
|
||||
publisher.channel = 'Script'
|
||||
|
||||
config_section = 'CurveManageTopSets'
|
||||
p = Process(config_section)
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
if not os.path.exists(configfile):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
Did you set environment variables? \
|
||||
Or activate the virtualenv.')
|
||||
|
||||
cfg = ConfigParser.ConfigParser()
|
||||
cfg.read(configfile)
|
||||
|
||||
server_term = redis.StrictRedis(
|
||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
||||
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
||||
|
||||
publisher.info("Script Curve_manage_top_set started")
|
||||
|
||||
|
@ -113,11 +119,6 @@ if __name__ == '__main__':
|
|||
|
||||
while True:
|
||||
# Get one message from the input queue (module only work if linked with a queue)
|
||||
message = p.get_from_set()
|
||||
if message is None:
|
||||
publisher.debug("{} queue is empty, waiting".format(config_section))
|
||||
print 'sleeping'
|
||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||
manage_top_set()
|
||||
continue
|
||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||
manage_top_set()
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
#!/usr/bin/env python2
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
"""
|
||||
The Duplicate module
|
||||
====================
|
||||
|
||||
This huge module is, in short term, checking duplicates.
|
||||
|
||||
Requirements:
|
||||
-------------
|
||||
|
||||
|
||||
"""
|
||||
import redis
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import json
|
||||
import ssdeep
|
||||
from packages import Paste
|
||||
from pubsublogger import publisher
|
||||
|
||||
from Helper import Process
|
||||
|
||||
if __name__ == "__main__":
|
||||
publisher.port = 6380
|
||||
publisher.channel = "Script"
|
||||
|
||||
config_section = 'Duplicates'
|
||||
save_dico_and_reload = 1 #min
|
||||
time_1 = time.time()
|
||||
flag_reload_from_disk = True
|
||||
flag_write_to_disk = False
|
||||
|
||||
p = Process(config_section)
|
||||
|
||||
# REDIS #
|
||||
# DB OBJECT & HASHS ( DISK )
|
||||
# FIXME increase flexibility
|
||||
dico_redis = {}
|
||||
for year in xrange(2013, datetime.date.today().year+1):
|
||||
for month in xrange(0, 16):
|
||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
||||
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
||||
db=month)
|
||||
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
|
||||
|
||||
# FUNCTIONS #
|
||||
publisher.info("Script duplicate started")
|
||||
|
||||
dicopath = os.path.join(os.environ['AIL_HOME'],
|
||||
p.config.get("Directories", "dicofilters"))
|
||||
|
||||
dico_path_set = set()
|
||||
while True:
|
||||
try:
|
||||
hash_dico = {}
|
||||
dupl = []
|
||||
|
||||
x = time.time()
|
||||
|
||||
message = p.get_from_set()
|
||||
if message is not None:
|
||||
path = message
|
||||
PST = Paste.Paste(path)
|
||||
else:
|
||||
publisher.debug("Script Attribute is idling 10s")
|
||||
time.sleep(10)
|
||||
continue
|
||||
|
||||
PST._set_p_hash_kind("ssdeep")
|
||||
|
||||
# Assignate the correct redis connexion
|
||||
r_serv1 = dico_redis[PST.p_date.year + PST.p_date.month]
|
||||
|
||||
# Creating the dicor name: dicoyyyymm
|
||||
filedicopath = os.path.join(dicopath, 'dico' + PST.p_date.year +
|
||||
PST.p_date.month)
|
||||
filedicopath_today = filedicopath
|
||||
|
||||
# Save I/O
|
||||
if time.time() - time_1 > save_dico_and_reload*60:
|
||||
flag_write_to_disk = True
|
||||
|
||||
if os.path.exists(filedicopath):
|
||||
if flag_reload_from_disk == True:
|
||||
flag_reload_from_disk = False
|
||||
print 'Reloading'
|
||||
with open(filedicopath, 'r') as fp:
|
||||
today_dico = json.load(fp)
|
||||
else:
|
||||
today_dico = {}
|
||||
with open(filedicopath, 'w') as fp:
|
||||
json.dump(today_dico, fp)
|
||||
|
||||
# For now, just use monthly dico
|
||||
dico_path_set.add(filedicopath)
|
||||
|
||||
# UNIQUE INDEX HASHS TABLE
|
||||
yearly_index = str(datetime.date.today().year)+'00'
|
||||
r_serv0 = dico_redis[yearly_index]
|
||||
r_serv0.incr("current_index")
|
||||
index = r_serv0.get("current_index")+str(PST.p_date)
|
||||
|
||||
# For each dico
|
||||
opened_dico = []
|
||||
for dico in dico_path_set:
|
||||
# Opening dico
|
||||
if dico == filedicopath_today:
|
||||
opened_dico.append([dico, today_dico])
|
||||
else:
|
||||
with open(dico, 'r') as fp:
|
||||
opened_dico.append([dico, json.load(fp)])
|
||||
|
||||
|
||||
#retrieve hash from paste
|
||||
paste_hash = PST._get_p_hash()
|
||||
|
||||
# Go throught the Database of the dico (of the month)
|
||||
threshold_dup = 99
|
||||
for dico_name, dico in opened_dico:
|
||||
for dico_key, dico_hash in dico.items():
|
||||
percent = ssdeep.compare(dico_hash, paste_hash)
|
||||
if percent > threshold_dup:
|
||||
db = dico_name[-6:]
|
||||
# Go throught the Database of the dico filter (month)
|
||||
r_serv_dico = dico_redis[db]
|
||||
|
||||
# index of paste
|
||||
index_current = r_serv_dico.get(dico_hash)
|
||||
paste_path = r_serv_dico.get(index_current)
|
||||
if paste_path != None:
|
||||
hash_dico[dico_hash] = (paste_path, percent)
|
||||
|
||||
#print 'comparing: ' + str(dico_hash[:20]) + ' and ' + str(paste_hash[:20]) + ' percentage: ' + str(percent)
|
||||
print ' '+ PST.p_path[44:] +', '+ paste_path[44:] + ', ' + str(percent)
|
||||
|
||||
# Add paste in DB to prevent its analyse twice
|
||||
# HASHTABLES PER MONTH (because of r_serv1 changing db)
|
||||
r_serv1.set(index, PST.p_path)
|
||||
r_serv1.sadd("INDEX", index)
|
||||
# Adding the hash in Redis
|
||||
r_serv1.set(paste_hash, index)
|
||||
r_serv1.sadd("HASHS", paste_hash)
|
||||
##################### Similarity found #######################
|
||||
|
||||
# if there is data in this dictionnary
|
||||
if len(hash_dico) != 0:
|
||||
for dico_hash, paste_tuple in hash_dico.items():
|
||||
paste_path, percent = paste_tuple
|
||||
dupl.append((paste_path, percent))
|
||||
|
||||
# Creating the object attribute and save it.
|
||||
to_print = 'Duplicate;{};{};{};'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name)
|
||||
if dupl != []:
|
||||
PST.__setattr__("p_duplicate", dupl)
|
||||
PST.save_attribute_redis("p_duplicate", dupl)
|
||||
publisher.info('{}Detected {}'.format(to_print, len(dupl)))
|
||||
print '{}Detected {}'.format(to_print, len(dupl))
|
||||
|
||||
y = time.time()
|
||||
|
||||
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
|
||||
|
||||
|
||||
# Adding the hash in the dico of the month
|
||||
today_dico[index] = paste_hash
|
||||
|
||||
if flag_write_to_disk:
|
||||
time_1 = time.time()
|
||||
flag_write_to_disk = False
|
||||
flag_reload_from_disk = True
|
||||
print 'writing'
|
||||
with open(filedicopath, 'w') as fp:
|
||||
json.dump(today_dico, fp)
|
||||
except IOError:
|
||||
to_print = 'Duplicate;{};{};{};'.format(
|
||||
PST.p_source, PST.p_date, PST.p_name)
|
||||
print "CRC Checksum Failed on :", PST.p_path
|
||||
publisher.error('{}CRC Checksum Failed'.format(to_print))
|
|
@ -16,6 +16,7 @@ import ConfigParser
|
|||
import os
|
||||
import zmq
|
||||
import time
|
||||
import datetime
|
||||
import json
|
||||
|
||||
|
||||
|
@ -107,6 +108,7 @@ class Process(object):
|
|||
self.modules = ConfigParser.ConfigParser()
|
||||
self.modules.read(modulesfile)
|
||||
self.subscriber_name = conf_section
|
||||
|
||||
self.pubsub = None
|
||||
if self.modules.has_section(conf_section):
|
||||
self.pubsub = PubSub()
|
||||
|
@ -117,6 +119,15 @@ class Process(object):
|
|||
port=self.config.get('RedisPubSub', 'port'),
|
||||
db=self.config.get('RedisPubSub', 'db'))
|
||||
|
||||
self.moduleNum = 1
|
||||
for i in range(1, 50):
|
||||
curr_num = self.r_temp.get("MODULE_"+self.subscriber_name + "_" + str(i))
|
||||
if curr_num is None:
|
||||
self.moduleNum = i
|
||||
break
|
||||
|
||||
|
||||
|
||||
def populate_set_in(self):
|
||||
# monoproc
|
||||
src = self.modules.get(self.subscriber_name, 'subscribe')
|
||||
|
@ -132,7 +143,28 @@ class Process(object):
|
|||
in_set = self.subscriber_name + 'in'
|
||||
self.r_temp.hset('queues', self.subscriber_name,
|
||||
int(self.r_temp.scard(in_set)))
|
||||
return self.r_temp.spop(in_set)
|
||||
message = self.r_temp.spop(in_set)
|
||||
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
||||
dir_name = os.environ['AIL_HOME']+self.config.get('Directories', 'pastes')
|
||||
|
||||
if message is None:
|
||||
return None
|
||||
|
||||
else:
|
||||
try:
|
||||
if ".gz" in message:
|
||||
path = message.split(".")[-2].split("/")[-1]
|
||||
else:
|
||||
path = "?"
|
||||
value = str(timestamp) + ", " + path
|
||||
self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value)
|
||||
return message
|
||||
|
||||
except:
|
||||
path = "?"
|
||||
value = str(timestamp) + ", " + path
|
||||
self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value)
|
||||
return message
|
||||
|
||||
def populate_set_out(self, msg, channel=None):
|
||||
# multiproc
|
||||
|
|
|
@ -112,33 +112,35 @@ function launching_scripts {
|
|||
|
||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
||||
|
||||
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModuleInformation.py -k 0 -c 1; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Duplicate" bash -c './Duplicate_ssdeep_v2.py; read x'
|
||||
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Attribute" bash -c './Attribute.py; read x'
|
||||
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Line" bash -c './Line.py; read x'
|
||||
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "DomainClassifier" bash -c './DomClassifier.py; read x'
|
||||
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "CreditCard" bash -c './CreditCard.py; read x'
|
||||
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Url" bash -c './Url.py; read x'
|
||||
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Curve_topsets_manager" bash -c './Curve_manage_top_sets.py; read x'
|
||||
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||
sleep 0.1
|
||||
|
@ -158,7 +160,7 @@ function launching_scripts {
|
|||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SentimentAnalyser" bash -c './SentimentAnalyser.py; read x'
|
||||
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||
|
||||
}
|
||||
|
||||
|
|
198
bin/ModuleInformation.py
Executable file
198
bin/ModuleInformation.py
Executable file
|
@ -0,0 +1,198 @@
|
|||
#!/usr/bin/env python2
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
|
||||
This module can be use to see information of running modules.
|
||||
These information are logged in "logs/moduleInfo.log"
|
||||
|
||||
It can also try to manage them by killing inactive one.
|
||||
However, it does not support mutliple occurence of the same module
|
||||
(It will kill the first one obtained by get)
|
||||
|
||||
|
||||
'''
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import redis
|
||||
import os
|
||||
import signal
|
||||
import argparse
|
||||
from subprocess import PIPE, Popen
|
||||
import ConfigParser
|
||||
import json
|
||||
from terminaltables import AsciiTable
|
||||
import textwrap
|
||||
|
||||
# CONFIG VARIABLES
|
||||
threshold_stucked_module = 60*60*1 #1 hour
|
||||
log_filename = "../logs/moduleInfo.log"
|
||||
command_search_pid = "ps a -o pid,cmd | grep {}"
|
||||
command_restart_module = "screen -S \"Script\" -X screen -t \"{}\" bash -c \"./{}.py; read x\""
|
||||
|
||||
|
||||
def getPid(module):
|
||||
p = Popen([command_search_pid.format(module+".py")], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||
for line in p.stdout:
|
||||
splittedLine = line.split()
|
||||
if 'python2' in splittedLine:
|
||||
return int(splittedLine[0])
|
||||
else:
|
||||
return None
|
||||
|
||||
def clearRedisModuleInfo():
|
||||
for k in server.keys("MODULE_*"):
|
||||
server.delete(k)
|
||||
|
||||
def kill_module(module):
|
||||
print ''
|
||||
print '-> trying to kill module:', module
|
||||
|
||||
pid = getPid(module)
|
||||
if pid is not None:
|
||||
os.kill(pid, signal.SIGUSR1)
|
||||
time.sleep(1)
|
||||
if getPid(module) is None:
|
||||
print module, 'has been killed'
|
||||
print 'restarting', module, '...'
|
||||
p2 = Popen([command_restart_module.format(module, module)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||
|
||||
else:
|
||||
print 'killing failed, retrying...'
|
||||
time.sleep(3)
|
||||
os.kill(pid, signal.SIGUSR1)
|
||||
time.sleep(1)
|
||||
if getPid(module) is None:
|
||||
print module, 'has been killed'
|
||||
print 'restarting', module, '...'
|
||||
p2 = Popen([command_restart_module.format(module, module)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||
else:
|
||||
print 'killing failed!'
|
||||
time.sleep(7)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description='Show info concerning running modules and log suspected stucked modules. May be use to automatically kill and restart stucked one.')
|
||||
parser.add_argument('-r', '--refresh', type=int, required=False, default=1, help='Refresh rate')
|
||||
parser.add_argument('-k', '--autokill', type=int, required=True, default=1, help='Enable auto kill option (1 for TRUE, anything else for FALSE)')
|
||||
parser.add_argument('-c', '--clear', type=int, required=False, default=1, help='Clear the current module information (Used to clear data from old launched modules)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
if not os.path.exists(configfile):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
Did you set environment variables? \
|
||||
Or activate the virtualenv.')
|
||||
|
||||
cfg = ConfigParser.ConfigParser()
|
||||
cfg.read(configfile)
|
||||
|
||||
# REDIS #
|
||||
server = redis.StrictRedis(
|
||||
host=cfg.get("Redis_Queues", "host"),
|
||||
port=cfg.getint("Redis_Queues", "port"),
|
||||
db=cfg.getint("Redis_Queues", "db"))
|
||||
|
||||
if args.clear == 1:
|
||||
clearRedisModuleInfo()
|
||||
|
||||
|
||||
module_file_array = set()
|
||||
with open('../doc/all_modules.txt', 'r') as module_file:
|
||||
for line in module_file:
|
||||
module_file_array.add(line[:-1])
|
||||
|
||||
while True:
|
||||
|
||||
all_queue = set()
|
||||
curr_range = 50
|
||||
printarray1 = []
|
||||
printarray2 = []
|
||||
printarray3 = []
|
||||
for queue, card in server.hgetall("queues").iteritems():
|
||||
all_queue.add(queue)
|
||||
key = "MODULE_" + queue + "_"
|
||||
for i in range(1, 50):
|
||||
curr_num = server.get("MODULE_"+ queue + "_" + str(i))
|
||||
if curr_num is None:
|
||||
curr_range = i
|
||||
break
|
||||
|
||||
for moduleNum in range(1, curr_range):
|
||||
value = server.get(key + str(moduleNum))
|
||||
if value is not None:
|
||||
timestamp, path = value.split(", ")
|
||||
if timestamp is not None and path is not None:
|
||||
startTime_readable = datetime.datetime.fromtimestamp(int(timestamp))
|
||||
processed_time_readable = str((datetime.datetime.now() - startTime_readable)).split('.')[0]
|
||||
|
||||
if int(card) > 0:
|
||||
if int((datetime.datetime.now() - startTime_readable).total_seconds()) > threshold_stucked_module:
|
||||
log = open(log_filename, 'a')
|
||||
log.write(json.dumps([queue, card, str(startTime_readable), str(processed_time_readable), path]) + "\n")
|
||||
if args.autokill == 1:
|
||||
kill_module(queue)
|
||||
|
||||
printarray1.append([str(queue), str(moduleNum), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||
|
||||
else:
|
||||
printarray2.append([str(queue), str(moduleNum), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||
|
||||
for curr_queue in module_file_array:
|
||||
if curr_queue not in all_queue:
|
||||
printarray3.append([curr_queue, "Not running"])
|
||||
|
||||
printarray1.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||
printarray2.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||
printarray1.insert(0,["Queue", "#", "Amount", "Paste start time", "Processing time for current paste (H:M:S)", "Paste hash"])
|
||||
printarray2.insert(0,["Queue", "#","Amount", "Paste start time", "Time since idle (H:M:S)", "Last paste hash"])
|
||||
printarray3.insert(0,["Queue", "State"])
|
||||
|
||||
os.system('clear')
|
||||
t1 = AsciiTable(printarray1, title="Working queues")
|
||||
t1.column_max_width(1)
|
||||
if not t1.ok:
|
||||
longest_col = t1.column_widths.index(max(t1.column_widths))
|
||||
max_length_col = t1.column_max_width(longest_col)
|
||||
if max_length_col > 0:
|
||||
for i, content in enumerate(t1.table_data):
|
||||
if len(content[longest_col]) > max_length_col:
|
||||
temp = ''
|
||||
for l in content[longest_col].splitlines():
|
||||
if len(l) > max_length_col:
|
||||
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||
else:
|
||||
temp += l + '\n'
|
||||
content[longest_col] = temp.strip()
|
||||
t1.table_data[i] = content
|
||||
|
||||
t2 = AsciiTable(printarray2, title="Idling queues")
|
||||
t2.column_max_width(1)
|
||||
if not t2.ok:
|
||||
longest_col = t2.column_widths.index(max(t2.column_widths))
|
||||
max_length_col = t2.column_max_width(longest_col)
|
||||
if max_length_col > 0:
|
||||
for i, content in enumerate(t2.table_data):
|
||||
if len(content[longest_col]) > max_length_col:
|
||||
temp = ''
|
||||
for l in content[longest_col].splitlines():
|
||||
if len(l) > max_length_col:
|
||||
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||
else:
|
||||
temp += l + '\n'
|
||||
content[longest_col] = temp.strip()
|
||||
t2.table_data[i] = content
|
||||
|
||||
t3 = AsciiTable(printarray3, title="Not running queues")
|
||||
t3.column_max_width(1)
|
||||
|
||||
print t1.table
|
||||
print '\n'
|
||||
print t2.table
|
||||
print '\n'
|
||||
print t3.table
|
||||
|
||||
time.sleep(args.refresh)
|
|
@ -66,23 +66,7 @@ def compute_progression(server, field_name, num_day, url_parsed):
|
|||
# filter
|
||||
if (keyword_total_sum > threshold_total_sum) and (keyword_increase > threshold_increase):
|
||||
|
||||
if server.sismember(redis_progression_name_set, keyword): #if keyword is in the set
|
||||
server.hset(redis_progression_name, keyword, keyword_increase) #update its value
|
||||
|
||||
elif (server.scard(redis_progression_name_set) < max_set_cardinality):
|
||||
server.sadd(redis_progression_name_set, keyword)
|
||||
|
||||
else: #not in the set
|
||||
#Check value for all members
|
||||
member_set = []
|
||||
for keyw in server.smembers(redis_progression_name_set):
|
||||
member_set.append((keyw, int(server.hget(redis_progression_name, keyw))))
|
||||
print member_set
|
||||
member_set.sort(key=lambda tup: tup[1])
|
||||
if member_set[0][1] < keyword_increase:
|
||||
#remove min from set and add the new one
|
||||
server.srem(redis_progression_name_set, member_set[0])
|
||||
server.sadd(redis_progression_name_set, keyword)
|
||||
server.zadd("z_top_progression_"+field_name, float(keyword_increase), keyword)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -8,50 +8,52 @@ sleep 0.1
|
|||
|
||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
||||
|
||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Duplicate" bash -c './Duplicate_ssdeep_v2.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Attribute" bash -c './Attribute.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Line" bash -c './Line.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "DomainClassifier" bash -c './DomClassifier.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "CreditCard" bash -c './CreditCard.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Url" bash -c './Url.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Curve_topsets_manager" bash -c './Curve_manage_top_sets.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SentimentAnalyser" bash -c './SentimentAnalyser.py; read x'
|
||||
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModuleInformation.py -k 0 -c 1; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
||||
sleep 0.1
|
||||
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||
|
|
|
@ -34,7 +34,7 @@ subscribe = Redis_Global
|
|||
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve
|
||||
|
||||
[CreditCards]
|
||||
subscribe = Redis_CreditCard
|
||||
subscribe = Redis_CreditCards
|
||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
||||
|
||||
[Mail]
|
||||
|
|
65
doc/generate_graph_data.py
Executable file
65
doc/generate_graph_data.py
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env python2
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
content = ""
|
||||
modules = {}
|
||||
all_modules = []
|
||||
curr_module = ""
|
||||
streamingPub = {}
|
||||
streamingSub = {}
|
||||
|
||||
with open('../bin/packages/modules.cfg', 'r') as f:
|
||||
for line in f:
|
||||
if line[0] != '#':
|
||||
if line[0] == '[':
|
||||
curr_name = line.replace('[','').replace(']','').replace('\n', '').replace(' ', '')
|
||||
all_modules.append(curr_name)
|
||||
modules[curr_name] = {'sub': [], 'pub': []}
|
||||
curr_module = curr_name
|
||||
elif curr_module != "": # searching for sub or pub
|
||||
if line.startswith("subscribe"):
|
||||
curr_subscribers = [w for w in line.replace('\n', '').replace(' ', '').split('=')[1].split(',')]
|
||||
modules[curr_module]['sub'] = curr_subscribers
|
||||
for sub in curr_subscribers:
|
||||
streamingSub[sub] = curr_module
|
||||
|
||||
elif line.startswith("publish"):
|
||||
curr_publishers = [w for w in line.replace('\n', '').replace(' ', '').split('=')[1].split(',')]
|
||||
modules[curr_module]['pub'] = curr_publishers
|
||||
for pub in curr_publishers:
|
||||
streamingPub[pub] = curr_module
|
||||
else:
|
||||
continue
|
||||
|
||||
output_set_graph = set()
|
||||
with open('all_modules.txt', 'w') as f2:
|
||||
for e in all_modules:
|
||||
f2.write(e+"\n")
|
||||
|
||||
for module in modules.keys():
|
||||
for stream_in in modules[module]['sub']:
|
||||
if stream_in not in streamingPub.keys():
|
||||
output_set_graph.add("\"" + stream_in + "\" [color=darkorange1] ;\n")
|
||||
output_set_graph.add("\"" + stream_in + "\"" + "->" + module + ";\n")
|
||||
else:
|
||||
output_set_graph.add("\"" + streamingPub[stream_in] + "\"" + "->" + module + ";\n")
|
||||
|
||||
for stream_out in modules[module]['pub']:
|
||||
if stream_out not in streamingSub.keys():
|
||||
output_set_graph.add("\"" + stream_out + "\" [color=darkorange1] ;\n")
|
||||
output_set_graph.add("\"" + stream_out + "\"" + "->" + module + ";\n")
|
||||
else:
|
||||
output_set_graph.add("\"" + module + "\"" + "->" + streamingSub[stream_out] + ";\n")
|
||||
|
||||
|
||||
output_text_graph = ""
|
||||
output_text_graph += "digraph unix {\n"\
|
||||
"graph [pad=\"0.5\"];\n"\
|
||||
"size=\"25,25\";\n"\
|
||||
"node [color=lightblue2, style=filled];\n"
|
||||
|
||||
for elem in output_set_graph:
|
||||
output_text_graph += elem
|
||||
|
||||
output_text_graph += "}"
|
||||
print output_text_graph
|
3
doc/generate_modules_data_flow_graph.sh
Executable file
3
doc/generate_modules_data_flow_graph.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
python generate_graph_data.py | dot -T png -o module-data-flow.png
|
BIN
doc/module-data-flow.png
Normal file
BIN
doc/module-data-flow.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 152 KiB |
Binary file not shown.
Before Width: | Height: | Size: 56 KiB |
|
@ -17,6 +17,9 @@ sudo apt-get install libadns1 libadns1-dev
|
|||
#Needed for redis-lvlDB
|
||||
sudo apt-get install libev-dev libgmp-dev
|
||||
|
||||
#Need for generate-data-flow graph
|
||||
sudo apt-get install graphviz
|
||||
|
||||
#needed for mathplotlib
|
||||
test ! -L /usr/include/ft2build.h && sudo ln -s freetype2/ft2build.h /usr/include/
|
||||
sudo easy_install -U distribute
|
||||
|
|
|
@ -10,6 +10,7 @@ textblob
|
|||
numpy
|
||||
matplotlib
|
||||
networkx
|
||||
terminaltables
|
||||
|
||||
#Tokeniser
|
||||
nltk
|
||||
|
|
|
@ -81,48 +81,37 @@ def event_stream():
|
|||
|
||||
def get_queues(r):
|
||||
# We may want to put the llen in a pipeline to do only one query.
|
||||
return [(queue, int(card)) for queue, card in
|
||||
r.hgetall("queues").iteritems()]
|
||||
data = [(queue, int(card)) for queue, card in r.hgetall("queues").iteritems()]
|
||||
newData = []
|
||||
|
||||
curr_range = 50
|
||||
for queue, card in data:
|
||||
key = "MODULE_" + queue + "_"
|
||||
for i in range(1, 50):
|
||||
curr_num = r.get("MODULE_"+ queue + "_" + str(i))
|
||||
if curr_num is None:
|
||||
curr_range = i
|
||||
break
|
||||
|
||||
for moduleNum in range(1, curr_range):
|
||||
value = r.get(key + str(moduleNum))
|
||||
if value is not None:
|
||||
timestamp, path = value.split(", ")
|
||||
if timestamp is not None:
|
||||
startTime_readable = datetime.datetime.fromtimestamp(int(timestamp))
|
||||
processed_time_readable = str((datetime.datetime.now() - startTime_readable)).split('.')[0]
|
||||
seconds = int((datetime.datetime.now() - startTime_readable).total_seconds())
|
||||
newData.append( (queue, card, seconds, moduleNum) )
|
||||
else:
|
||||
newData.append( (queue, cards, 0, moduleNum) )
|
||||
|
||||
return newData
|
||||
|
||||
|
||||
def list_len(s):
|
||||
return len(s)
|
||||
app.jinja_env.filters['list_len'] = list_len
|
||||
|
||||
def parseStringToList(the_string):
|
||||
strList = ""
|
||||
elemList = []
|
||||
for c in the_string:
|
||||
if c != ']':
|
||||
if c != '[' and c !=' ' and c != '"':
|
||||
strList += c
|
||||
else:
|
||||
the_list = strList.split(',')
|
||||
if len(the_list) == 3:
|
||||
elemList = elemList + the_list
|
||||
elif len(the_list) == 2:
|
||||
elemList.append(the_list)
|
||||
elif len(the_list) > 1:
|
||||
elemList.append(the_list[1:])
|
||||
strList = ""
|
||||
return elemList
|
||||
|
||||
def parseStringToList2(the_string):
|
||||
if the_string == []:
|
||||
return []
|
||||
else:
|
||||
res = []
|
||||
tab_str = the_string.split('], [')
|
||||
tab_str[0] = tab_str[0][1:]+']'
|
||||
tab_str[len(tab_str)-1] = '['+tab_str[len(tab_str)-1][:-1]
|
||||
res.append(parseStringToList(tab_str[0]))
|
||||
for i in range(1, len(tab_str)-2):
|
||||
tab_str[i] = '['+tab_str[i]+']'
|
||||
res.append(parseStringToList(tab_str[i]))
|
||||
if len(tab_str) > 1:
|
||||
res.append(parseStringToList(tab_str[len(tab_str)-1]))
|
||||
return res
|
||||
|
||||
|
||||
def showpaste(content_range):
|
||||
requested_path = request.args.get('paste', '')
|
||||
|
@ -136,7 +125,7 @@ def showpaste(content_range):
|
|||
p_mime = paste.p_mime
|
||||
p_lineinfo = paste.get_lines_info()
|
||||
p_content = paste.get_p_content().decode('utf-8', 'ignore')
|
||||
p_duplicate_full_list = parseStringToList2(paste._get_p_duplicate())
|
||||
p_duplicate_full_list = json.loads(paste._get_p_duplicate())
|
||||
p_duplicate_list = []
|
||||
p_simil_list = []
|
||||
p_hashtype_list = []
|
||||
|
@ -160,7 +149,7 @@ def showpaste(content_range):
|
|||
hash_types = []
|
||||
comp_vals = []
|
||||
for i in indices:
|
||||
hash_types.append(p_duplicate_full_list[i][0])
|
||||
hash_types.append(p_duplicate_full_list[i][0].encode('utf8'))
|
||||
comp_vals.append(p_duplicate_full_list[i][2])
|
||||
dup_list_removed.append(i)
|
||||
|
||||
|
@ -267,19 +256,9 @@ def progressionCharts():
|
|||
return jsonify(bar_values)
|
||||
|
||||
else:
|
||||
redis_progression_name = 'top_progression_'+trending_name
|
||||
redis_progression_name_set = 'top_progression_'+trending_name+'_set'
|
||||
|
||||
# Iterate over element in top_x_set and retreive their value
|
||||
member_set = []
|
||||
for keyw in r_serv_charts.smembers(redis_progression_name_set):
|
||||
keyw_value = r_serv_charts.hget(redis_progression_name, keyw)
|
||||
keyw_value = keyw_value if keyw_value is not None else 0
|
||||
member_set.append((keyw, int(keyw_value)))
|
||||
member_set.sort(key=lambda tup: tup[1], reverse=True)
|
||||
if len(member_set) == 0:
|
||||
member_set.append(("No relevant data", int(100)))
|
||||
return jsonify(member_set)
|
||||
redis_progression_name = "z_top_progression_" + trending_name
|
||||
keyw_value = r_serv_charts.zrevrangebyscore(redis_progression_name, '+inf', '-inf', withscores=True, start=0, num=10)
|
||||
return jsonify(keyw_value)
|
||||
|
||||
@app.route("/_moduleCharts", methods=['GET'])
|
||||
def modulesCharts():
|
||||
|
@ -458,7 +437,7 @@ def sentiment_analysis_trending():
|
|||
return render_template("sentiment_analysis_trending.html")
|
||||
|
||||
|
||||
@app.route("/sentiment_analysis_getplotdata/")
|
||||
@app.route("/sentiment_analysis_getplotdata/", methods=['GET'])
|
||||
def sentiment_analysis_getplotdata():
|
||||
# Get the top providers based on number of pastes
|
||||
oneHour = 60*60
|
||||
|
@ -467,19 +446,27 @@ def sentiment_analysis_getplotdata():
|
|||
dateStart = dateStart.replace(minute=0, second=0, microsecond=0)
|
||||
dateStart_timestamp = calendar.timegm(dateStart.timetuple())
|
||||
|
||||
to_return = {}
|
||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
||||
# if empty, get yesterday top providers
|
||||
print 'providers_set_'+ get_date_range(1)[1]
|
||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers
|
||||
# if still empty, takes from all providers
|
||||
if range_providers == []:
|
||||
print 'today provider empty'
|
||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||
getAllProviders = request.args.get('getProviders')
|
||||
provider = request.args.get('provider')
|
||||
allProvider = request.args.get('all')
|
||||
if getAllProviders == 'True':
|
||||
if allProvider == "True":
|
||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||
return jsonify(list(range_providers))
|
||||
else:
|
||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
||||
# if empty, get yesterday top providers
|
||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers
|
||||
# if still empty, takes from all providers
|
||||
if range_providers == []:
|
||||
print 'today provider empty'
|
||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||
return jsonify(range_providers)
|
||||
|
||||
for cur_provider in range_providers:
|
||||
print cur_provider
|
||||
cur_provider_name = cur_provider + '_'
|
||||
elif provider is not None:
|
||||
to_return = {}
|
||||
|
||||
cur_provider_name = provider + '_'
|
||||
list_date = {}
|
||||
for cur_timestamp in range(int(dateStart_timestamp), int(dateStart_timestamp)-sevenDays-oneHour, -oneHour):
|
||||
cur_set_name = cur_provider_name + str(cur_timestamp)
|
||||
|
@ -489,9 +476,10 @@ def sentiment_analysis_getplotdata():
|
|||
cur_value = r_serv_sentiment.get(cur_id)
|
||||
list_value.append(cur_value)
|
||||
list_date[cur_timestamp] = list_value
|
||||
to_return[cur_provider] = list_date
|
||||
to_return[provider] = list_date
|
||||
|
||||
return jsonify(to_return)
|
||||
return jsonify(to_return)
|
||||
return "Bad request"
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -221,11 +221,24 @@ function create_queue_table() {
|
|||
|
||||
for(i = 0; i < (glob_tabvar.row1).length;i++){
|
||||
var tr = document.createElement('TR')
|
||||
for(j = 0; j < (glob_tabvar.row1[i]).length; j++){
|
||||
for(j = 0; j < 2; j++){
|
||||
var td = document.createElement('TD')
|
||||
td.appendChild(document.createTextNode(glob_tabvar.row1[i][j]));
|
||||
var moduleNum = j == 0 ? "." + glob_tabvar.row1[i][3] : "";
|
||||
td.appendChild(document.createTextNode(glob_tabvar.row1[i][j] + moduleNum));
|
||||
tr.appendChild(td)
|
||||
}
|
||||
// Used to decide the color of the row
|
||||
// We have glob_tabvar.row1[][j] with:
|
||||
// - j=0: ModuleName
|
||||
// - j=1: queueLength
|
||||
// - j=2: LastProcessedPasteTime
|
||||
// - j=3: Number of the module belonging in the same category
|
||||
if (parseInt(glob_tabvar.row1[i][2]) > 60*2 && parseInt(glob_tabvar.row1[i][1]) > 2)
|
||||
tr.className += " danger";
|
||||
else if (parseInt(glob_tabvar.row1[i][2]) > 60*1)
|
||||
tr.className += " warning";
|
||||
else
|
||||
tr.className += " success";
|
||||
tableBody.appendChild(tr);
|
||||
}
|
||||
Tablediv.appendChild(table);
|
||||
|
|
|
@ -1,4 +1,42 @@
|
|||
|
||||
/* Functions and config */
|
||||
function add_new_graph_today(id) {
|
||||
return "<div id=\"panel-today\" class=\"panel panel-default pannelToday"+id+"\">" +
|
||||
"<div class=\"panel-heading\">" +
|
||||
"<strong class=\"sparkLineStatsToday"+id+"t\">Graph "+id+"</strong>" +
|
||||
"<strong class=\"sparkLineStatsToday"+id+"s pull-right\">Avg</strong>" +
|
||||
"</div>" +
|
||||
"<div class=\"panel-body panelInside\">" +
|
||||
"<table class=\"table\">" +
|
||||
"<tbody>" +
|
||||
"<tr>" +
|
||||
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsToday"+id+"\"></div></td> " +
|
||||
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsToday"+id+"b\"></div></td> " +
|
||||
"</tr>" +
|
||||
"</tbody>" +
|
||||
"</table>" +
|
||||
"</div>" +
|
||||
"</div>";
|
||||
};
|
||||
function add_new_graph_week(id) {
|
||||
return "<div id=\"panel-week\" class=\"panel panel-default pannelWeek"+id+"\">" +
|
||||
"<div class=\"panel-heading\">" +
|
||||
"<strong class=\"sparkLineStatsWeek"+id+"t\">Graph "+id+"</strong>" +
|
||||
"<strong class=\"sparkLineStatsWeek"+id+"s pull-right\">Avg</strong>" +
|
||||
"</div>" +
|
||||
"<div class=\"panel-body panelInside\">" +
|
||||
"<table class=\"table\">" +
|
||||
"<tbody>" +
|
||||
"<tr>" +
|
||||
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsWeek"+id+"\"></div></td> " +
|
||||
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsWeek"+id+"b\"></div></td> " +
|
||||
"</tr>" +
|
||||
"</tbody>" +
|
||||
"</table>" +
|
||||
"</div>" +
|
||||
"</div>";
|
||||
}
|
||||
|
||||
function generate_offset_to_time(num){
|
||||
var to_ret = {};
|
||||
for(i=0; i<=num; i++) {
|
||||
|
@ -15,7 +53,7 @@
|
|||
var to_ret = {};
|
||||
for(i=day; i>=0; i--){
|
||||
for(j=0; j<24; j++){
|
||||
var t1 =now.getDate()-i + ":";
|
||||
var t1 =now.getDate()-i + ":";
|
||||
var t2 =now.getHours()-(23-j);
|
||||
t2 = t2 < 0 ? 24+t2 : t2;
|
||||
t2 += "h";
|
||||
|
@ -40,34 +78,79 @@
|
|||
barColor: '#00bf5f',
|
||||
negBarColor: '#f22929',
|
||||
zeroColor: '#ffff00',
|
||||
|
||||
|
||||
tooltipFormat: '<span style="color: {{color}}">●</span> {{offset:names}}, {{value}} </span>',
|
||||
};
|
||||
|
||||
|
||||
$.getJSON("/sentiment_analysis_getplotdata/",
|
||||
function(data) {
|
||||
var all_data = [];
|
||||
var plot_data = [];
|
||||
var graph_avg = [];
|
||||
var array_provider = Object.keys(data);
|
||||
var dates_providers = Object.keys(data[array_provider[0]]);
|
||||
var dateStart = parseInt(dates_providers[0]);
|
||||
var oneHour = 60*60;
|
||||
var oneWeek = oneHour*24*7;
|
||||
/* Plot and queries */
|
||||
|
||||
var all_graph_day_sum = 0.0;
|
||||
var all_graph_hour_sum = 0.0;
|
||||
var all_graph_hour_maxVal = 0.0;
|
||||
var all_day_avg = 0.0;
|
||||
var all_day_avg_maxVal = 0.0;
|
||||
var all_graph_day_sum = 0.0;
|
||||
var all_graph_hour_sum = 0.0;
|
||||
var all_graph_hour_sum_minus = 0.0;
|
||||
var all_graph_hour_maxVal = 0.0;
|
||||
var all_day_avg = 0.0;
|
||||
var all_day_avg_maxVal = 0.0;
|
||||
var graph_avg = [];
|
||||
var all_data = [];
|
||||
var provider_already_loaded = [];
|
||||
var totNumGraph = 0;
|
||||
|
||||
// Query all providers name then launch the query and plot process for each of them.
|
||||
// When everything is terminated, plot the widgets (Gauge, canvasJS, table)
|
||||
// input: all - set to 'True' if you take all providers
|
||||
function draw_page(all) {
|
||||
$.getJSON("/sentiment_analysis_getplotdata/?getProviders=True&all="+all,
|
||||
function(data) {
|
||||
var promises = [];
|
||||
|
||||
var the_length = provider_already_loaded.length == 0 ? 0 : provider_already_loaded.length;
|
||||
for(i=0; i<data.length; i++) {
|
||||
if(provider_already_loaded.indexOf(data[i]) != -1) {
|
||||
continue;
|
||||
} else {
|
||||
totNumGraph++;
|
||||
if(i % 2 == 0) {
|
||||
$("#today_divl").append(add_new_graph_today(i+the_length+1));
|
||||
$("#week_divl").append(add_new_graph_week(i+the_length+1));
|
||||
}
|
||||
else {
|
||||
$("#today_divr").append(add_new_graph_today(i+the_length+1));
|
||||
$("#week_divr").append(add_new_graph_week(i+the_length+1));
|
||||
}
|
||||
provider_already_loaded.push(data[i])
|
||||
promises.push(query_and_plot(data[i], i+the_length));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
$.when.apply($, promises).done( function (arg) {
|
||||
draw_widgets();
|
||||
$("#LoadAll").show('fast');
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Query data and plot it for a given provider
|
||||
// input - provider: The povider name to be plotted
|
||||
// input - graphNum: The number of the graph (Use to plot on correct div)
|
||||
function query_and_plot(provider, graphNum) {
|
||||
var query_plot = $.getJSON("/sentiment_analysis_getplotdata/?provider="+provider,
|
||||
function(data) {
|
||||
var plot_data = [];
|
||||
var array_provider = Object.keys(data);
|
||||
var dates_providers = Object.keys(data[array_provider[0]]);
|
||||
var dateStart = parseInt(dates_providers[0]);
|
||||
var oneHour = 60*60;
|
||||
var oneWeek = oneHour*24*7;
|
||||
|
||||
for (graphNum=0; graphNum<8; graphNum++) {
|
||||
var max_value = 0.0;
|
||||
var max_value_day = 0.0;
|
||||
var graph_data = [];
|
||||
var spark_data = [];
|
||||
var curr_provider = array_provider[graphNum];
|
||||
var curr_provider = array_provider[0];
|
||||
var curr_sum = 0.0;
|
||||
var curr_sum_elem = 0.0;
|
||||
var day_sum = 0.0;
|
||||
|
@ -88,7 +171,6 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
var neu = 0;
|
||||
|
||||
for(i=0; i<data_array.length; i++){
|
||||
//console.log(data_array[i].replace(/\'/g, '\"'));
|
||||
var curr_data = jQuery.parseJSON(data_array[i].replace(/\'/g, '\"'));
|
||||
compPosAvg += curr_data['compoundPos'];
|
||||
compNegAvg += curr_data['compoundNeg'];
|
||||
|
@ -109,8 +191,7 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
max_value = Math.abs(pos-neg) > max_value ? Math.abs(pos-neg) : max_value;
|
||||
|
||||
if(curr_date >= dateStart+oneWeek-23*oneHour){
|
||||
max_value_day = Math.abs(pos-neg) > max_value_day ? Math.abs(pos-neg) : max_value_day;
|
||||
day_sum += (pos-neg);
|
||||
max_value_day = Math.abs(pos-neg) > max_value_day ? Math.abs(pos-neg) : max_value_day; day_sum += (pos-neg);
|
||||
day_sum_elem++;
|
||||
}
|
||||
if(curr_date > dateStart+oneWeek-2*oneHour && curr_date <=dateStart+oneWeek-oneHour){
|
||||
|
@ -121,17 +202,16 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
}
|
||||
all_graph_day_sum += day_sum;
|
||||
all_graph_hour_sum += hour_sum;
|
||||
all_graph_hour_sum_minus += hour_sum > 0 ? 0 : 1;
|
||||
all_graph_hour_maxVal = Math.abs(hour_sum) > all_graph_hour_maxVal ? Math.abs(hour_sum) : all_graph_hour_maxVal;
|
||||
|
||||
var curr_avg = curr_sum / (curr_sum_elem);
|
||||
var curr_avg = curr_sum / (curr_sum_elem);
|
||||
if(isNaN(curr_avg))
|
||||
curr_avg = 0.0
|
||||
//var curr_avg = curr_sum / (oneWeek/oneHour);
|
||||
//var curr_avg = curr_sum / (spark_data.length);
|
||||
graph_avg.push([curr_provider, curr_avg]);
|
||||
plot_data.push(spark_data);
|
||||
all_data.push(graph_data);
|
||||
|
||||
|
||||
|
||||
sparklineOptions.chartRangeMax = max_value;
|
||||
sparklineOptions.chartRangeMin = -max_value;
|
||||
|
@ -141,11 +221,11 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
var num = graphNum + 1;
|
||||
var placeholder = '.sparkLineStatsWeek' + num;
|
||||
sparklineOptions.barWidth = 2;
|
||||
$(placeholder).sparkline(plot_data[graphNum], sparklineOptions);
|
||||
$(placeholder).sparkline(plot_data[0], sparklineOptions);
|
||||
$(placeholder+'t').text(curr_provider);
|
||||
var curr_avg_text = isNaN(curr_avg) ? "No data" : curr_avg.toFixed(5);
|
||||
var curr_avg_text = isNaN(curr_avg) ? "No data" : curr_avg.toFixed(5);
|
||||
$(placeholder+'s').text(curr_avg_text);
|
||||
|
||||
|
||||
sparklineOptions.barWidth = 18;
|
||||
sparklineOptions.tooltipFormat = '<span style="color: {{color}}">●</span> Avg: {{value}} </span>'
|
||||
$(placeholder+'b').sparkline([curr_avg], sparklineOptions);
|
||||
|
@ -169,8 +249,8 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
|
||||
|
||||
// print today
|
||||
var data_length = plot_data[graphNum].length;
|
||||
var data_today = plot_data[graphNum].slice(data_length-24, data_length);
|
||||
var data_length = plot_data[0].length;
|
||||
var data_today = plot_data[0].slice(data_length-24, data_length);
|
||||
|
||||
placeholder = '.sparkLineStatsToday' + num;
|
||||
sparklineOptions.barWidth = 14;
|
||||
|
@ -199,155 +279,124 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
$(avgName).addClass("panel-warning")
|
||||
}
|
||||
|
||||
}//for loop
|
||||
|
||||
|
||||
|
||||
/* ---------------- Gauge ---------------- */
|
||||
var gaugeOptions = {
|
||||
animateEasing: true,
|
||||
|
||||
elementWidth: 200,
|
||||
elementHeight: 125,
|
||||
|
||||
arcFillStart: 10,
|
||||
arcFillEnd: 12,
|
||||
arcFillTotal: 20,
|
||||
incTot: 1.0,
|
||||
|
||||
arcBgColorLight: 200,
|
||||
arcBgColorSat: 0,
|
||||
arcStrokeFg: 20,
|
||||
arcStrokeBg: 30,
|
||||
|
||||
colorArcFg: '#FF3300',
|
||||
animateSpeed: 1,
|
||||
|
||||
};
|
||||
// Clone object
|
||||
var gaugeOptions2 = jQuery.extend(true, {}, gaugeOptions);
|
||||
var gaugeOptions3 = jQuery.extend(true, {}, gaugeOptions);
|
||||
|
||||
|
||||
|
||||
gaugeOptions.appendTo = '#gauge_today_last_hour';
|
||||
gaugeOptions.dialLabel = 'Last hour';
|
||||
gaugeOptions.elementId = 'gauge1';
|
||||
var piePercent = (all_graph_hour_sum / 8) / all_graph_hour_maxVal;
|
||||
gaugeOptions.inc = piePercent;
|
||||
var gauge_today_last_hour = new FlexGauge(gaugeOptions);
|
||||
|
||||
gaugeOptions2.appendTo = '#gauge_today_last_days';
|
||||
gaugeOptions2.dialLabel = 'Today';
|
||||
gaugeOptions2.elementId = 'gauge2';
|
||||
//piePercent = (all_graph_day_sum / (8*24)) / max_value;
|
||||
piePercent = (all_day_avg / 8) / all_day_avg_maxVal;
|
||||
gaugeOptions2.inc = piePercent;
|
||||
var gauge_today_last_days = new FlexGauge(gaugeOptions2);
|
||||
|
||||
gaugeOptions3.appendTo = '#gauge_week';
|
||||
gaugeOptions3.dialLabel = 'Week';
|
||||
gaugeOptions3.elementId = 'gauge3';
|
||||
|
||||
var graph_avg_sum = 0.0;
|
||||
var temp_max_val = 0.0;
|
||||
for (i=0; i<graph_avg.length; i++){
|
||||
graph_avg_sum += graph_avg[i][1];
|
||||
temp_max_val = Math.abs(graph_avg[i][1]) > temp_max_val ? Math.abs(graph_avg[i][1]) : temp_max_val;
|
||||
}
|
||||
|
||||
piePercent = (graph_avg_sum / graph_avg.length) / temp_max_val;
|
||||
gaugeOptions3.inc = piePercent;
|
||||
var gauge_today_last_days = new FlexGauge(gaugeOptions3);
|
||||
);
|
||||
return query_plot
|
||||
}
|
||||
|
||||
|
||||
/* --------- Sort providers -------- */
|
||||
|
||||
graph_avg.sort(function(a, b){return b[1]-a[1]});
|
||||
function draw_widgets() {
|
||||
|
||||
for (i=1; i<6; i++){
|
||||
$('.worst'+i).text(graph_avg[7-(i-1)][0]);
|
||||
$('.best'+i).text(graph_avg[i-1][0]);
|
||||
}
|
||||
/* ---------------- Gauge ---------------- */
|
||||
var gaugeOptions = {
|
||||
animateEasing: true,
|
||||
|
||||
/* ----------- CanvasJS ------------ */
|
||||
elementWidth: 200,
|
||||
elementHeight: 125,
|
||||
|
||||
var comp_sum_day_pos = 0.0;
|
||||
var comp_sum_day_neg = 0.0;
|
||||
var comp_sum_hour_pos = 0.0;
|
||||
var comp_sum_hour_neg = 0.0;
|
||||
for(graphNum=0; graphNum<8; graphNum++){
|
||||
curr_graphData = all_data[graphNum];
|
||||
var gauge_data = curr_graphData.slice(curr_graphData.length-24, curr_graphData.length);
|
||||
for (i=1; i< gauge_data.length; i++){
|
||||
comp_sum_day_pos += gauge_data[i].compoundPos;
|
||||
comp_sum_day_neg += gauge_data[i].compoundNeg;
|
||||
arcFillStart: 10,
|
||||
arcFillEnd: 12,
|
||||
arcFillTotal: 20,
|
||||
incTot: 1.0,
|
||||
|
||||
if(i == 23){
|
||||
comp_sum_hour_pos += gauge_data[i].compoundPos;
|
||||
comp_sum_hour_neg += gauge_data[i].compoundNeg;
|
||||
}
|
||||
arcBgColorLight: 200,
|
||||
arcBgColorSat: 0,
|
||||
arcStrokeFg: 20,
|
||||
arcStrokeBg: 30,
|
||||
|
||||
colorArcFg: '#FF3300',
|
||||
animateSpeed: 1,
|
||||
|
||||
};
|
||||
// Clone object
|
||||
var gaugeOptions2 = jQuery.extend(true, {}, gaugeOptions);
|
||||
var gaugeOptions3 = jQuery.extend(true, {}, gaugeOptions);
|
||||
|
||||
|
||||
|
||||
gaugeOptions.appendTo = '#gauge_today_last_hour';
|
||||
gaugeOptions.dialLabel = 'Last hour';
|
||||
gaugeOptions.elementId = 'gauge1';
|
||||
var piePercent = (all_graph_hour_sum / (totNumGraph - all_graph_hour_sum_minus)) / all_graph_hour_maxVal;
|
||||
gaugeOptions.inc = piePercent;
|
||||
var gauge_today_last_hour = new FlexGauge(gaugeOptions);
|
||||
|
||||
gaugeOptions2.appendTo = '#gauge_today_last_days';
|
||||
gaugeOptions2.dialLabel = 'Today';
|
||||
gaugeOptions2.elementId = 'gauge2';
|
||||
piePercent = (all_day_avg / totNumGraph) / all_day_avg_maxVal;
|
||||
gaugeOptions2.inc = piePercent;
|
||||
var gauge_today_last_days = new FlexGauge(gaugeOptions2);
|
||||
|
||||
gaugeOptions3.appendTo = '#gauge_week';
|
||||
gaugeOptions3.dialLabel = 'Week';
|
||||
gaugeOptions3.elementId = 'gauge3';
|
||||
|
||||
var graph_avg_sum = 0.0;
|
||||
var temp_max_val = 0.0;
|
||||
for (i=0; i<graph_avg.length; i++){
|
||||
graph_avg_sum += graph_avg[i][1];
|
||||
temp_max_val = Math.abs(graph_avg[i][1]) > temp_max_val ? Math.abs(graph_avg[i][1]) : temp_max_val;
|
||||
}
|
||||
|
||||
piePercent = (graph_avg_sum / graph_avg.length) / temp_max_val;
|
||||
gaugeOptions3.inc = piePercent;
|
||||
var gauge_today_last_days = new FlexGauge(gaugeOptions3);
|
||||
|
||||
|
||||
/* --------- Sort providers -------- */
|
||||
|
||||
graph_avg.sort(function(a, b){return b[1]-a[1]});
|
||||
|
||||
for (i=1; i<6; i++){
|
||||
$('.worst'+i).text(graph_avg[7-(i-1)][0]);
|
||||
$('.best'+i).text(graph_avg[i-1][0]);
|
||||
}
|
||||
|
||||
/* ----------- CanvasJS ------------ */
|
||||
|
||||
var comp_sum_day_pos = 0.0;
|
||||
var comp_sum_day_neg = 0.0;
|
||||
var comp_sum_hour_pos = 0.0;
|
||||
var comp_sum_hour_neg = 0.0;
|
||||
for(graphNum=0; graphNum<totNumGraph; graphNum++){
|
||||
curr_graphData = all_data[graphNum];
|
||||
var gauge_data = curr_graphData.slice(curr_graphData.length-24, curr_graphData.length);
|
||||
for (i=1; i< gauge_data.length; i++){
|
||||
comp_sum_day_pos += gauge_data[i].compoundPos;
|
||||
comp_sum_day_neg += gauge_data[i].compoundNeg;
|
||||
|
||||
if(i == 23){
|
||||
comp_sum_hour_pos += gauge_data[i].compoundPos;
|
||||
comp_sum_hour_neg += gauge_data[i].compoundNeg;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var options_canvasJS_1 = {
|
||||
|
||||
animationEnabled: true,
|
||||
axisY: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
valueFormatString: " ",
|
||||
gridThickness: 0
|
||||
},
|
||||
axisX: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
labelFontSize: 0.1,
|
||||
},
|
||||
data: [
|
||||
{
|
||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
||||
type: "bar",
|
||||
color: "green",
|
||||
dataPoints: [
|
||||
{y: comp_sum_hour_pos/8}
|
||||
]
|
||||
},
|
||||
{
|
||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Negative: </strong></span><span><strong>{y}</strong></span>",
|
||||
type: "bar",
|
||||
color: "red",
|
||||
dataPoints: [
|
||||
{y: comp_sum_hour_neg/8}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var chart_canvas1 = new CanvasJS.Chart("bar_today_last_hour", options_canvasJS_1);
|
||||
}
|
||||
|
||||
var options_canvasJS_2 = {
|
||||
|
||||
animationEnabled: true,
|
||||
axisY: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
valueFormatString: " ",
|
||||
gridThickness: 0
|
||||
},
|
||||
axisX: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
labelFontSize: 0.1,
|
||||
},
|
||||
data: [
|
||||
var options_canvasJS_1 = {
|
||||
|
||||
animationEnabled: true,
|
||||
axisY: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
valueFormatString: " ",
|
||||
gridThickness: 0
|
||||
},
|
||||
axisX: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
labelFontSize: 0.1,
|
||||
},
|
||||
data: [
|
||||
{
|
||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
||||
type: "bar",
|
||||
color: "green",
|
||||
dataPoints: [
|
||||
{y: comp_sum_day_pos/8}
|
||||
{y: comp_sum_hour_pos/totNumGraph}
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -355,32 +404,51 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
|||
type: "bar",
|
||||
color: "red",
|
||||
dataPoints: [
|
||||
{y: comp_sum_day_neg/8}
|
||||
{y: comp_sum_hour_neg/totNumGraph}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var chart_canvas1 = new CanvasJS.Chart("bar_today_last_hour", options_canvasJS_1);
|
||||
|
||||
var options_canvasJS_2 = {
|
||||
|
||||
animationEnabled: true,
|
||||
axisY: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
valueFormatString: " ",
|
||||
gridThickness: 0
|
||||
},
|
||||
axisX: {
|
||||
tickThickness: 0,
|
||||
lineThickness: 0,
|
||||
labelFontSize: 0.1,
|
||||
},
|
||||
data: [
|
||||
{
|
||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
||||
type: "bar",
|
||||
color: "green",
|
||||
dataPoints: [
|
||||
{y: comp_sum_day_pos/totNumGraph}
|
||||
]
|
||||
};
|
||||
|
||||
var chart_canvas2 = new CanvasJS.Chart("bar_today_last_days", options_canvasJS_2);
|
||||
|
||||
chart_canvas1.render();
|
||||
chart_canvas2.render();
|
||||
|
||||
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
},
|
||||
{
|
||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Negative: </strong></span><span><strong>{y}</strong></span>",
|
||||
type: "bar",
|
||||
color: "red",
|
||||
dataPoints: [
|
||||
{y: comp_sum_day_neg/totNumGraph}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var chart_canvas2 = new CanvasJS.Chart("bar_today_last_days", options_canvasJS_2);
|
||||
|
||||
chart_canvas1.render();
|
||||
chart_canvas2.render();
|
||||
|
||||
}
|
||||
|
|
|
@ -154,7 +154,6 @@
|
|||
</script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/plugins/metisMenu/metisMenu.js') }}"></script>
|
||||
|
||||
</body>
|
||||
|
||||
|
|
|
@ -113,6 +113,7 @@
|
|||
</div>
|
||||
<!-- /.row -->
|
||||
<div class="row">
|
||||
<button id="LoadAll" class="btn btn-info" style="margin: 5px;"><span class="glyphicon glyphicon-download"> </span> Load data from all providers </button>
|
||||
|
||||
<!-- Pannel TODAY -->
|
||||
<div class="row">
|
||||
|
@ -128,139 +129,13 @@
|
|||
<div class="col-lg-9" style="padding-left: 0px;">
|
||||
<!-- providers charts -->
|
||||
<div class="col-lg-6">
|
||||
<div class="sparkLineStats">
|
||||
<div id="panel-today" class="panel panel-default pannelToday1">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday1t">Graph 1</strong>
|
||||
<strong class="sparkLineStatsToday1s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday1"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday1b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday2">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday2t">Graph 2</strong>
|
||||
<strong class="sparkLineStatsToday2s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday2"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday2b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday3">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday3t">Graph 3</strong>
|
||||
<strong class="sparkLineStatsToday3s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday3"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday3b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday4">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday4t">Graph 4</strong>
|
||||
<strong class="sparkLineStatsToday4s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday4"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday4b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="today_divl" class="sparkLineStats">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<div class="sparkLineStats">
|
||||
<div id="panel-today" class="panel panel-default pannelToday5">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday5t">Graph 5</strong>
|
||||
<strong class="sparkLineStatsToday5s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday5"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday5b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday6">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday6t">Graph 6</strong>
|
||||
<strong class="sparkLineStatsToday6s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday6"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday6b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday7">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday7t">Graph 7</strong>
|
||||
<strong class="sparkLineStatsToday7s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday7"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday7b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-today" class="panel panel-default pannelToday8">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsToday8t">Graph 8</strong>
|
||||
<strong class="sparkLineStatsToday8s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday8"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday8b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="today_divr" class="sparkLineStats">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -304,139 +179,13 @@
|
|||
<div class="col-lg-9" style="padding-left: 0px;">
|
||||
<!-- providers charts -->
|
||||
<div class="col-lg-6">
|
||||
<div class="sparkLineStats">
|
||||
<div id="panel-week" class="panel panel-default pannelWeek1">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek1t">Graph 1</strong>
|
||||
<strong class="sparkLineStatsWeek1s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek1"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek1b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek2">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek2t">Graph 2</strong>
|
||||
<strong class="sparkLineStatsWeek2s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek2"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek2b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek3">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek3t">Graph 3</strong>
|
||||
<strong class="sparkLineStatsWeek3s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek3"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek3b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek4">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek4t">Graph 4</strong>
|
||||
<strong class="sparkLineStatsWeek4s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek4"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek4b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="week_divl" class="sparkLineStats">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<div class="sparkLineStats">
|
||||
<div id="panel-week" class="panel panel-default pannelWeek5">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek5t">Graph 5</strong>
|
||||
<strong class="sparkLineStatsWeek5s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek5"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek5b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek6">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek6t">Graph 6</strong>
|
||||
<strong class="sparkLineStatsWeek6s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek6"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek6b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek7">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek7t">Graph 7</strong>
|
||||
<strong class="sparkLineStatsWeek7s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek7"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek7b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="panel-week" class="panel panel-default pannelWeek8">
|
||||
<div class="panel-heading">
|
||||
<strong class="sparkLineStatsWeek8t">Graph 8</strong>
|
||||
<strong class="sparkLineStatsWeek8s pull-right">Avg</strong>
|
||||
</div>
|
||||
<div class="panel-body panelInside">
|
||||
<table class="table">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek8"></div></td>
|
||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek8b"></div></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div id="week_divr" class="sparkLineStats">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -490,8 +239,6 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- /.row -->
|
||||
</div>
|
||||
|
@ -502,13 +249,15 @@
|
|||
<!-- import graph function -->
|
||||
<script src="{{ url_for('static', filename='js/sentiment_trending.js') }}"></script>
|
||||
<script>
|
||||
$("#LoadAll").hide();
|
||||
|
||||
$(document).ready(function(){
|
||||
activePage = $('h1.page-header').attr('data-page');
|
||||
$("#"+activePage).addClass("active");
|
||||
$('[data-toggle="tooltip"]').tooltip();
|
||||
|
||||
|
||||
$("#LoadAll").click(function(){ draw_page("True"); });
|
||||
draw_page("False");
|
||||
// Reload every 30min
|
||||
setTimeout(function(){ location.reload(); }, 30*60*1000);
|
||||
});
|
||||
|
|
|
@ -233,15 +233,15 @@
|
|||
$.getJSON(url, function (data) {
|
||||
if (data.length != 0) {
|
||||
var html_to_add = "";
|
||||
html_to_add += "<table class=\"table table-striped\">";
|
||||
html_to_add += "<table id=\"modal-table\" class=\"table table-striped\">";
|
||||
html_to_add += "<thead>";
|
||||
html_to_add += "<tr>";
|
||||
html_to_add += "<th>Source</th>";
|
||||
html_to_add += "<th>Date</th>";
|
||||
html_to_add += "<th>Encoding</th>";
|
||||
html_to_add += "<th>Size (Kb)</th>";
|
||||
html_to_add += "<th>Mime</th>";
|
||||
html_to_add += "<th>(# lines, Max line length)</th>";
|
||||
html_to_add += "<th># lines</th>";
|
||||
html_to_add += "<th>Max length</th>";
|
||||
html_to_add += "<th>Preview</th>";
|
||||
html_to_add += "</tr>";
|
||||
html_to_add += "</thead>";
|
||||
|
@ -253,8 +253,8 @@
|
|||
html_to_add += "<td>"+curr_data.date+"</td>";
|
||||
html_to_add += "<td>"+curr_data.encoding+"</td>";
|
||||
html_to_add += "<td>"+curr_data.size+"</td>";
|
||||
html_to_add += "<td>"+curr_data.mime+"</td>";
|
||||
html_to_add += "<td>("+curr_data.lineinfo[0]+", "+curr_data.lineinfo[1]+")</td>";
|
||||
html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>";
|
||||
html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>";
|
||||
html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('showsavedpaste') }}?paste="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>";
|
||||
|
||||
html_to_add += "</tr>";
|
||||
|
@ -264,6 +264,7 @@
|
|||
$("#mymodalbody").html(html_to_add);
|
||||
$("[data-toggle=popover]").popover();
|
||||
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
||||
$('#modal-table').DataTable();
|
||||
} else {
|
||||
$("#mymodalbody").html("No paste containing this term has been received yet.");
|
||||
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
||||
|
|
Loading…
Reference in a new issue