Merge branch 'production' of github.com:mokaddem/AIL-framework into production
2
.dockerignore
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
*.git
|
||||||
|
*.md
|
66
.travis.yml
|
@ -5,27 +5,7 @@ python:
|
||||||
|
|
||||||
sudo: required
|
sudo: required
|
||||||
|
|
||||||
dist: trusty
|
cache: pip
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
# General dependencies
|
|
||||||
- python-pip
|
|
||||||
- python-virtualenv
|
|
||||||
- python-dev
|
|
||||||
- g++
|
|
||||||
- python-tk
|
|
||||||
- unzip
|
|
||||||
- libsnappy-dev
|
|
||||||
# Needed for bloomfilters
|
|
||||||
- libssl-dev
|
|
||||||
- python-numpy
|
|
||||||
- libfreetype6-dev
|
|
||||||
# Leveldb
|
|
||||||
- libgmp-dev
|
|
||||||
- libev-dev
|
|
||||||
- cmake
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
- AIL_HOME=$TRAVIS_BUILD_DIR AIL_BIN=$TRAVIS_BUILD_DIR/bin/ \
|
- AIL_HOME=$TRAVIS_BUILD_DIR AIL_BIN=$TRAVIS_BUILD_DIR/bin/ \
|
||||||
|
@ -35,49 +15,7 @@ env:
|
||||||
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- pip install -U pip
|
- ./installing_deps.sh
|
||||||
# DNS
|
|
||||||
- sudo apt-get install -y libadns1 libadns1-dev screen
|
|
||||||
# required for mathplotlib
|
|
||||||
- test ! -L /usr/include/ft2build.h && sudo ln -s freetype2/ft2build.h /usr/include/
|
|
||||||
- pip install distribute
|
|
||||||
# Redis
|
|
||||||
- test ! -d redis/ && git clone https://github.com/antirez/redis.git
|
|
||||||
- pushd redis
|
|
||||||
- git checkout 3.2
|
|
||||||
- make
|
|
||||||
- popd
|
|
||||||
# Redis leveldb
|
|
||||||
- test ! -d redis-leveldb/ && git clone https://github.com/KDr2/redis-leveldb.git
|
|
||||||
- pushd redis-leveldb/
|
|
||||||
- git submodule init
|
|
||||||
- git submodule update
|
|
||||||
- make
|
|
||||||
- popd
|
|
||||||
# Faup
|
|
||||||
- test ! -d faup && git clone https://github.com/stricaud/faup.git
|
|
||||||
- pushd faup/
|
|
||||||
- test ! -d build && mkdir build
|
|
||||||
- cd build
|
|
||||||
- cmake .. && make
|
|
||||||
- sudo make install
|
|
||||||
- echo '/usr/local/lib' | sudo tee -a /etc/ld.so.conf.d/faup.conf
|
|
||||||
- sudo ldconfig
|
|
||||||
- popd
|
|
||||||
# PyFaup
|
|
||||||
- pushd faup/src/lib/bindings/python/
|
|
||||||
- python setup.py install
|
|
||||||
- popd
|
|
||||||
# Set config
|
|
||||||
- cp bin/packages/config.cfg.sample bin/packages/config.cfg
|
|
||||||
- mkdir -p $AIL_HOME/{PASTES,Blooms,dumps}
|
|
||||||
- mkdir -p $AIL_HOME/LEVEL_DB_DATA/{2016,2015,2014,2013}
|
|
||||||
- pip install -r pip_packages_requirement.txt
|
|
||||||
- python -m textblob.download_corpora
|
|
||||||
- pushd var/www/
|
|
||||||
- ./update_thirdparty.sh
|
|
||||||
- popd
|
|
||||||
|
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- pushd bin
|
- pushd bin
|
||||||
|
|
9
Dockerfile
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
FROM ubuntu:14.04
|
||||||
|
|
||||||
|
RUN mkdir /opt/AIL && apt-get update -y \
|
||||||
|
&& apt-get install git python-dev build-essential \
|
||||||
|
libffi-dev libssl-dev libfuzzy-dev wget -y
|
||||||
|
ADD . /opt/AIL
|
||||||
|
WORKDIR /opt/AIL
|
||||||
|
RUN ./installing_deps.sh && cd var/www/ && ./update_thirdparty.sh
|
||||||
|
CMD bash docker_start.sh
|
|
@ -7,7 +7,7 @@ AIL
|
||||||
|
|
||||||
AIL framework - Framework for Analysis of Information Leaks
|
AIL framework - Framework for Analysis of Information Leaks
|
||||||
|
|
||||||
AIL is a modular framework to analyse potential information leaks from unstructured data sources like pastes from Pastebin or similar services. AIL framework is flexible and can be extended to support other functionalities to mine sensitive information.
|
AIL is a modular framework to analyse potential information leaks from unstructured data sources like pastes from Pastebin or similar services or unstructured data streams. AIL framework is flexible and can be extended to support other functionalities to mine sensitive information.
|
||||||
|
|
||||||
![Dashboard](./doc/screenshots/dashboard.png?raw=true "AIL framework dashboard")
|
![Dashboard](./doc/screenshots/dashboard.png?raw=true "AIL framework dashboard")
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ Terms manager and occurence
|
||||||
![Term-Plot](./doc/screenshots/terms-plot.png?raw=true "AIL framework termPlot")
|
![Term-Plot](./doc/screenshots/terms-plot.png?raw=true "AIL framework termPlot")
|
||||||
|
|
||||||
|
|
||||||
AIL framework screencast: https://www.youtube.com/watch?v=9idfHCIMzBY
|
[AIL framework screencast](https://www.youtube.com/watch?v=1_ZrZkRKmNo)
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
@ -54,7 +54,7 @@ Features
|
||||||
* A full-text indexer module to index unstructured information
|
* A full-text indexer module to index unstructured information
|
||||||
* Modules and web statistics
|
* Modules and web statistics
|
||||||
* Global sentiment analysis for each providers based on nltk vader module
|
* Global sentiment analysis for each providers based on nltk vader module
|
||||||
* Terms tracking and occurence
|
* Terms tracking and occurrence
|
||||||
* Many more modules for extracting phone numbers, credentials and others
|
* Many more modules for extracting phone numbers, credentials and others
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
|
|
|
@ -24,7 +24,7 @@ if __name__ == "__main__":
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
publisher.channel = "Script"
|
publisher.channel = "Script"
|
||||||
|
|
||||||
config_section = 'Browse_warning_paste'
|
config_section = 'BrowseWarningPaste'
|
||||||
|
|
||||||
p = Process(config_section)
|
p = Process(config_section)
|
||||||
|
|
|
@ -66,7 +66,7 @@ if __name__ == "__main__":
|
||||||
publisher.warning('{}Checked {} valid number(s)'.format(
|
publisher.warning('{}Checked {} valid number(s)'.format(
|
||||||
to_print, len(creditcard_set)))
|
to_print, len(creditcard_set)))
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(filepath, 'Duplicate')
|
p.populate_set_out(filename, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
#send to Browse_warning_paste
|
||||||
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
|
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
|
||||||
else:
|
else:
|
|
@ -22,8 +22,8 @@ from pubsublogger import publisher
|
||||||
from packages import lib_words
|
from packages import lib_words
|
||||||
import datetime
|
import datetime
|
||||||
import calendar
|
import calendar
|
||||||
|
import os
|
||||||
from Helper import Process
|
import ConfigParser
|
||||||
|
|
||||||
# Config Variables
|
# Config Variables
|
||||||
Refresh_rate = 60*5 #sec
|
Refresh_rate = 60*5 #sec
|
||||||
|
@ -96,13 +96,19 @@ if __name__ == '__main__':
|
||||||
# Script is the default channel used for the modules.
|
# Script is the default channel used for the modules.
|
||||||
publisher.channel = 'Script'
|
publisher.channel = 'Script'
|
||||||
|
|
||||||
config_section = 'CurveManageTopSets'
|
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||||
p = Process(config_section)
|
if not os.path.exists(configfile):
|
||||||
|
raise Exception('Unable to find the configuration file. \
|
||||||
|
Did you set environment variables? \
|
||||||
|
Or activate the virtualenv.')
|
||||||
|
|
||||||
|
cfg = ConfigParser.ConfigParser()
|
||||||
|
cfg.read(configfile)
|
||||||
|
|
||||||
server_term = redis.StrictRedis(
|
server_term = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB_TermFreq", "host"),
|
host=cfg.get("Redis_Level_DB_TermFreq", "host"),
|
||||||
port=p.config.get("Redis_Level_DB_TermFreq", "port"),
|
port=cfg.getint("Redis_Level_DB_TermFreq", "port"),
|
||||||
db=p.config.get("Redis_Level_DB_TermFreq", "db"))
|
db=cfg.getint("Redis_Level_DB_TermFreq", "db"))
|
||||||
|
|
||||||
publisher.info("Script Curve_manage_top_set started")
|
publisher.info("Script Curve_manage_top_set started")
|
||||||
|
|
||||||
|
@ -113,11 +119,6 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
# Get one message from the input queue (module only work if linked with a queue)
|
# Get one message from the input queue (module only work if linked with a queue)
|
||||||
message = p.get_from_set()
|
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
||||||
if message is None:
|
manage_top_set()
|
||||||
publisher.debug("{} queue is empty, waiting".format(config_section))
|
|
||||||
print 'sleeping'
|
|
||||||
time.sleep(Refresh_rate) # sleep a long time then manage the set
|
|
||||||
manage_top_set()
|
|
||||||
continue
|
|
||||||
|
|
|
@ -1,182 +0,0 @@
|
||||||
#!/usr/bin/env python2
|
|
||||||
# -*-coding:UTF-8 -*
|
|
||||||
|
|
||||||
"""
|
|
||||||
The Duplicate module
|
|
||||||
====================
|
|
||||||
|
|
||||||
This huge module is, in short term, checking duplicates.
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
-------------
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
import redis
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import ssdeep
|
|
||||||
from packages import Paste
|
|
||||||
from pubsublogger import publisher
|
|
||||||
|
|
||||||
from Helper import Process
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
publisher.port = 6380
|
|
||||||
publisher.channel = "Script"
|
|
||||||
|
|
||||||
config_section = 'Duplicates'
|
|
||||||
save_dico_and_reload = 1 #min
|
|
||||||
time_1 = time.time()
|
|
||||||
flag_reload_from_disk = True
|
|
||||||
flag_write_to_disk = False
|
|
||||||
|
|
||||||
p = Process(config_section)
|
|
||||||
|
|
||||||
# REDIS #
|
|
||||||
# DB OBJECT & HASHS ( DISK )
|
|
||||||
# FIXME increase flexibility
|
|
||||||
dico_redis = {}
|
|
||||||
for year in xrange(2013, datetime.date.today().year+1):
|
|
||||||
for month in xrange(0, 16):
|
|
||||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
|
||||||
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
|
||||||
db=month)
|
|
||||||
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
|
|
||||||
|
|
||||||
# FUNCTIONS #
|
|
||||||
publisher.info("Script duplicate started")
|
|
||||||
|
|
||||||
dicopath = os.path.join(os.environ['AIL_HOME'],
|
|
||||||
p.config.get("Directories", "dicofilters"))
|
|
||||||
|
|
||||||
dico_path_set = set()
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
hash_dico = {}
|
|
||||||
dupl = []
|
|
||||||
|
|
||||||
x = time.time()
|
|
||||||
|
|
||||||
message = p.get_from_set()
|
|
||||||
if message is not None:
|
|
||||||
path = message
|
|
||||||
PST = Paste.Paste(path)
|
|
||||||
else:
|
|
||||||
publisher.debug("Script Attribute is idling 10s")
|
|
||||||
time.sleep(10)
|
|
||||||
continue
|
|
||||||
|
|
||||||
PST._set_p_hash_kind("ssdeep")
|
|
||||||
|
|
||||||
# Assignate the correct redis connexion
|
|
||||||
r_serv1 = dico_redis[PST.p_date.year + PST.p_date.month]
|
|
||||||
|
|
||||||
# Creating the dicor name: dicoyyyymm
|
|
||||||
filedicopath = os.path.join(dicopath, 'dico' + PST.p_date.year +
|
|
||||||
PST.p_date.month)
|
|
||||||
filedicopath_today = filedicopath
|
|
||||||
|
|
||||||
# Save I/O
|
|
||||||
if time.time() - time_1 > save_dico_and_reload*60:
|
|
||||||
flag_write_to_disk = True
|
|
||||||
|
|
||||||
if os.path.exists(filedicopath):
|
|
||||||
if flag_reload_from_disk == True:
|
|
||||||
flag_reload_from_disk = False
|
|
||||||
print 'Reloading'
|
|
||||||
with open(filedicopath, 'r') as fp:
|
|
||||||
today_dico = json.load(fp)
|
|
||||||
else:
|
|
||||||
today_dico = {}
|
|
||||||
with open(filedicopath, 'w') as fp:
|
|
||||||
json.dump(today_dico, fp)
|
|
||||||
|
|
||||||
# For now, just use monthly dico
|
|
||||||
dico_path_set.add(filedicopath)
|
|
||||||
|
|
||||||
# UNIQUE INDEX HASHS TABLE
|
|
||||||
yearly_index = str(datetime.date.today().year)+'00'
|
|
||||||
r_serv0 = dico_redis[yearly_index]
|
|
||||||
r_serv0.incr("current_index")
|
|
||||||
index = r_serv0.get("current_index")+str(PST.p_date)
|
|
||||||
|
|
||||||
# For each dico
|
|
||||||
opened_dico = []
|
|
||||||
for dico in dico_path_set:
|
|
||||||
# Opening dico
|
|
||||||
if dico == filedicopath_today:
|
|
||||||
opened_dico.append([dico, today_dico])
|
|
||||||
else:
|
|
||||||
with open(dico, 'r') as fp:
|
|
||||||
opened_dico.append([dico, json.load(fp)])
|
|
||||||
|
|
||||||
|
|
||||||
#retrieve hash from paste
|
|
||||||
paste_hash = PST._get_p_hash()
|
|
||||||
|
|
||||||
# Go throught the Database of the dico (of the month)
|
|
||||||
threshold_dup = 99
|
|
||||||
for dico_name, dico in opened_dico:
|
|
||||||
for dico_key, dico_hash in dico.items():
|
|
||||||
percent = ssdeep.compare(dico_hash, paste_hash)
|
|
||||||
if percent > threshold_dup:
|
|
||||||
db = dico_name[-6:]
|
|
||||||
# Go throught the Database of the dico filter (month)
|
|
||||||
r_serv_dico = dico_redis[db]
|
|
||||||
|
|
||||||
# index of paste
|
|
||||||
index_current = r_serv_dico.get(dico_hash)
|
|
||||||
paste_path = r_serv_dico.get(index_current)
|
|
||||||
if paste_path != None:
|
|
||||||
hash_dico[dico_hash] = (paste_path, percent)
|
|
||||||
|
|
||||||
#print 'comparing: ' + str(dico_hash[:20]) + ' and ' + str(paste_hash[:20]) + ' percentage: ' + str(percent)
|
|
||||||
print ' '+ PST.p_path[44:] +', '+ paste_path[44:] + ', ' + str(percent)
|
|
||||||
|
|
||||||
# Add paste in DB to prevent its analyse twice
|
|
||||||
# HASHTABLES PER MONTH (because of r_serv1 changing db)
|
|
||||||
r_serv1.set(index, PST.p_path)
|
|
||||||
r_serv1.sadd("INDEX", index)
|
|
||||||
# Adding the hash in Redis
|
|
||||||
r_serv1.set(paste_hash, index)
|
|
||||||
r_serv1.sadd("HASHS", paste_hash)
|
|
||||||
##################### Similarity found #######################
|
|
||||||
|
|
||||||
# if there is data in this dictionnary
|
|
||||||
if len(hash_dico) != 0:
|
|
||||||
for dico_hash, paste_tuple in hash_dico.items():
|
|
||||||
paste_path, percent = paste_tuple
|
|
||||||
dupl.append((paste_path, percent))
|
|
||||||
|
|
||||||
# Creating the object attribute and save it.
|
|
||||||
to_print = 'Duplicate;{};{};{};'.format(
|
|
||||||
PST.p_source, PST.p_date, PST.p_name)
|
|
||||||
if dupl != []:
|
|
||||||
PST.__setattr__("p_duplicate", dupl)
|
|
||||||
PST.save_attribute_redis("p_duplicate", dupl)
|
|
||||||
publisher.info('{}Detected {}'.format(to_print, len(dupl)))
|
|
||||||
print '{}Detected {}'.format(to_print, len(dupl))
|
|
||||||
|
|
||||||
y = time.time()
|
|
||||||
|
|
||||||
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
|
|
||||||
|
|
||||||
|
|
||||||
# Adding the hash in the dico of the month
|
|
||||||
today_dico[index] = paste_hash
|
|
||||||
|
|
||||||
if flag_write_to_disk:
|
|
||||||
time_1 = time.time()
|
|
||||||
flag_write_to_disk = False
|
|
||||||
flag_reload_from_disk = True
|
|
||||||
print 'writing'
|
|
||||||
with open(filedicopath, 'w') as fp:
|
|
||||||
json.dump(today_dico, fp)
|
|
||||||
except IOError:
|
|
||||||
to_print = 'Duplicate;{};{};{};'.format(
|
|
||||||
PST.p_source, PST.p_date, PST.p_name)
|
|
||||||
print "CRC Checksum Failed on :", PST.p_path
|
|
||||||
publisher.error('{}CRC Checksum Failed'.format(to_print))
|
|
|
@ -16,6 +16,7 @@ import ConfigParser
|
||||||
import os
|
import os
|
||||||
import zmq
|
import zmq
|
||||||
import time
|
import time
|
||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,6 +108,7 @@ class Process(object):
|
||||||
self.modules = ConfigParser.ConfigParser()
|
self.modules = ConfigParser.ConfigParser()
|
||||||
self.modules.read(modulesfile)
|
self.modules.read(modulesfile)
|
||||||
self.subscriber_name = conf_section
|
self.subscriber_name = conf_section
|
||||||
|
|
||||||
self.pubsub = None
|
self.pubsub = None
|
||||||
if self.modules.has_section(conf_section):
|
if self.modules.has_section(conf_section):
|
||||||
self.pubsub = PubSub()
|
self.pubsub = PubSub()
|
||||||
|
@ -117,6 +119,9 @@ class Process(object):
|
||||||
port=self.config.get('RedisPubSub', 'port'),
|
port=self.config.get('RedisPubSub', 'port'),
|
||||||
db=self.config.get('RedisPubSub', 'db'))
|
db=self.config.get('RedisPubSub', 'db'))
|
||||||
|
|
||||||
|
self.moduleNum = os.getpid()
|
||||||
|
|
||||||
|
|
||||||
def populate_set_in(self):
|
def populate_set_in(self):
|
||||||
# monoproc
|
# monoproc
|
||||||
src = self.modules.get(self.subscriber_name, 'subscribe')
|
src = self.modules.get(self.subscriber_name, 'subscribe')
|
||||||
|
@ -132,7 +137,30 @@ class Process(object):
|
||||||
in_set = self.subscriber_name + 'in'
|
in_set = self.subscriber_name + 'in'
|
||||||
self.r_temp.hset('queues', self.subscriber_name,
|
self.r_temp.hset('queues', self.subscriber_name,
|
||||||
int(self.r_temp.scard(in_set)))
|
int(self.r_temp.scard(in_set)))
|
||||||
return self.r_temp.spop(in_set)
|
message = self.r_temp.spop(in_set)
|
||||||
|
timestamp = int(time.mktime(datetime.datetime.now().timetuple()))
|
||||||
|
dir_name = os.environ['AIL_HOME']+self.config.get('Directories', 'pastes')
|
||||||
|
|
||||||
|
if message is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if ".gz" in message:
|
||||||
|
path = message.split(".")[-2].split("/")[-1]
|
||||||
|
else:
|
||||||
|
path = "?"
|
||||||
|
value = str(timestamp) + ", " + path
|
||||||
|
self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value)
|
||||||
|
self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum))
|
||||||
|
return message
|
||||||
|
|
||||||
|
except:
|
||||||
|
path = "?"
|
||||||
|
value = str(timestamp) + ", " + path
|
||||||
|
self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value)
|
||||||
|
self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum))
|
||||||
|
return message
|
||||||
|
|
||||||
def populate_set_out(self, msg, channel=None):
|
def populate_set_out(self, msg, channel=None):
|
||||||
# multiproc
|
# multiproc
|
||||||
|
|
|
@ -112,33 +112,35 @@ function launching_scripts {
|
||||||
|
|
||||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
||||||
|
|
||||||
|
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModuleInformation.py -k 0 -c 1; read x'
|
||||||
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Duplicate" bash -c './Duplicate_ssdeep_v2.py; read x'
|
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Attribute" bash -c './Attribute.py; read x'
|
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Line" bash -c './Line.py; read x'
|
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "DomainClassifier" bash -c './DomClassifier.py; read x'
|
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "CreditCard" bash -c './CreditCard.py; read x'
|
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Url" bash -c './Url.py; read x'
|
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Curve_topsets_manager" bash -c './Curve_manage_top_sets.py; read x'
|
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
@ -156,9 +158,9 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
screen -S "Script" -X screen -t "BrowseWarningPaste" bash -c './BrowseWarningPaste.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "SentimentAnalyser" bash -c './SentimentAnalyser.py; read x'
|
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
215
bin/ModuleInformation.py
Executable file
|
@ -0,0 +1,215 @@
|
||||||
|
#!/usr/bin/env python2
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
This module can be use to see information of running modules.
|
||||||
|
These information are logged in "logs/moduleInfo.log"
|
||||||
|
|
||||||
|
It can also try to manage them by killing inactive one.
|
||||||
|
However, it does not support mutliple occurence of the same module
|
||||||
|
(It will kill the first one obtained by get)
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import redis
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import argparse
|
||||||
|
from subprocess import PIPE, Popen
|
||||||
|
import ConfigParser
|
||||||
|
import json
|
||||||
|
from terminaltables import AsciiTable
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
# CONFIG VARIABLES
|
||||||
|
threshold_stucked_module = 60*60*1 #1 hour
|
||||||
|
log_filename = "../logs/moduleInfo.log"
|
||||||
|
command_search_pid = "ps a -o pid,cmd | grep {}"
|
||||||
|
command_search_name = "ps a -o pid,cmd | grep {}"
|
||||||
|
command_restart_module = "screen -S \"Script\" -X screen -t \"{}\" bash -c \"./{}.py; read x\""
|
||||||
|
|
||||||
|
|
||||||
|
def getPid(module):
|
||||||
|
p = Popen([command_search_pid.format(module+".py")], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||||
|
for line in p.stdout:
|
||||||
|
splittedLine = line.split()
|
||||||
|
if 'python2' in splittedLine:
|
||||||
|
return int(splittedLine[0])
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def clearRedisModuleInfo():
|
||||||
|
for k in server.keys("MODULE_*"):
|
||||||
|
server.delete(k)
|
||||||
|
|
||||||
|
def cleanRedis():
|
||||||
|
for k in server.keys("MODULE_TYPE_*"):
|
||||||
|
moduleName = k[12:].split('_')[0]
|
||||||
|
for pid in server.smembers(k):
|
||||||
|
flag_pid_valid = False
|
||||||
|
proc = Popen([command_search_name.format(pid)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||||
|
for line in proc.stdout:
|
||||||
|
splittedLine = line.split()
|
||||||
|
if ('python2' in splittedLine or 'python' in splittedLine) and "./"+moduleName+".py" in splittedLine:
|
||||||
|
flag_pid_valid = True
|
||||||
|
|
||||||
|
if not flag_pid_valid:
|
||||||
|
print flag_pid_valid, 'cleaning', pid, 'in', k
|
||||||
|
server.srem(k, pid)
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
|
||||||
|
def kill_module(module):
|
||||||
|
print ''
|
||||||
|
print '-> trying to kill module:', module
|
||||||
|
|
||||||
|
pid = getPid(module)
|
||||||
|
if pid is not None:
|
||||||
|
os.kill(pid, signal.SIGUSR1)
|
||||||
|
time.sleep(1)
|
||||||
|
if getPid(module) is None:
|
||||||
|
print module, 'has been killed'
|
||||||
|
print 'restarting', module, '...'
|
||||||
|
p2 = Popen([command_restart_module.format(module, module)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print 'killing failed, retrying...'
|
||||||
|
time.sleep(3)
|
||||||
|
os.kill(pid, signal.SIGUSR1)
|
||||||
|
time.sleep(1)
|
||||||
|
if getPid(module) is None:
|
||||||
|
print module, 'has been killed'
|
||||||
|
print 'restarting', module, '...'
|
||||||
|
p2 = Popen([command_restart_module.format(module, module)], stdin=PIPE, stdout=PIPE, bufsize=1, shell=True)
|
||||||
|
else:
|
||||||
|
print 'killing failed!'
|
||||||
|
time.sleep(7)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Show info concerning running modules and log suspected stucked modules. May be use to automatically kill and restart stucked one.')
|
||||||
|
parser.add_argument('-r', '--refresh', type=int, required=False, default=1, help='Refresh rate')
|
||||||
|
parser.add_argument('-k', '--autokill', type=int, required=False, default=0, help='Enable auto kill option (1 for TRUE, anything else for FALSE)')
|
||||||
|
parser.add_argument('-c', '--clear', type=int, required=False, default=0, help='Clear the current module information (Used to clear data from old launched modules)')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||||
|
if not os.path.exists(configfile):
|
||||||
|
raise Exception('Unable to find the configuration file. \
|
||||||
|
Did you set environment variables? \
|
||||||
|
Or activate the virtualenv.')
|
||||||
|
|
||||||
|
cfg = ConfigParser.ConfigParser()
|
||||||
|
cfg.read(configfile)
|
||||||
|
|
||||||
|
# REDIS #
|
||||||
|
server = redis.StrictRedis(
|
||||||
|
host=cfg.get("Redis_Queues", "host"),
|
||||||
|
port=cfg.getint("Redis_Queues", "port"),
|
||||||
|
db=cfg.getint("Redis_Queues", "db"))
|
||||||
|
|
||||||
|
if args.clear == 1:
|
||||||
|
clearRedisModuleInfo()
|
||||||
|
|
||||||
|
lastTime = datetime.datetime.now()
|
||||||
|
|
||||||
|
module_file_array = set()
|
||||||
|
with open('../doc/all_modules.txt', 'r') as module_file:
|
||||||
|
for line in module_file:
|
||||||
|
module_file_array.add(line[:-1])
|
||||||
|
|
||||||
|
while True:
|
||||||
|
|
||||||
|
all_queue = set()
|
||||||
|
printarray1 = []
|
||||||
|
printarray2 = []
|
||||||
|
printarray3 = []
|
||||||
|
for queue, card in server.hgetall("queues").iteritems():
|
||||||
|
all_queue.add(queue)
|
||||||
|
key = "MODULE_" + queue + "_"
|
||||||
|
keySet = "MODULE_TYPE_" + queue
|
||||||
|
|
||||||
|
for moduleNum in server.smembers(keySet):
|
||||||
|
value = server.get(key + str(moduleNum))
|
||||||
|
if value is not None:
|
||||||
|
timestamp, path = value.split(", ")
|
||||||
|
if timestamp is not None and path is not None:
|
||||||
|
startTime_readable = datetime.datetime.fromtimestamp(int(timestamp))
|
||||||
|
processed_time_readable = str((datetime.datetime.now() - startTime_readable)).split('.')[0]
|
||||||
|
|
||||||
|
if int(card) > 0:
|
||||||
|
if int((datetime.datetime.now() - startTime_readable).total_seconds()) > threshold_stucked_module:
|
||||||
|
log = open(log_filename, 'a')
|
||||||
|
log.write(json.dumps([queue, card, str(startTime_readable), str(processed_time_readable), path]) + "\n")
|
||||||
|
if args.autokill == 1:
|
||||||
|
kill_module(queue)
|
||||||
|
|
||||||
|
printarray1.append([str(queue), str(moduleNum), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||||
|
|
||||||
|
else:
|
||||||
|
printarray2.append([str(queue), str(moduleNum), str(card), str(startTime_readable), str(processed_time_readable), str(path)])
|
||||||
|
|
||||||
|
for curr_queue in module_file_array:
|
||||||
|
if curr_queue not in all_queue:
|
||||||
|
printarray3.append([curr_queue, "Not running"])
|
||||||
|
|
||||||
|
printarray1.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||||
|
printarray2.sort(lambda x,y: cmp(x[4], y[4]), reverse=True)
|
||||||
|
printarray1.insert(0,["Queue", "PID", "Amount", "Paste start time", "Processing time for current paste (H:M:S)", "Paste hash"])
|
||||||
|
printarray2.insert(0,["Queue", "PID","Amount", "Paste start time", "Time since idle (H:M:S)", "Last paste hash"])
|
||||||
|
printarray3.insert(0,["Queue", "State"])
|
||||||
|
|
||||||
|
os.system('clear')
|
||||||
|
t1 = AsciiTable(printarray1, title="Working queues")
|
||||||
|
t1.column_max_width(1)
|
||||||
|
if not t1.ok:
|
||||||
|
longest_col = t1.column_widths.index(max(t1.column_widths))
|
||||||
|
max_length_col = t1.column_max_width(longest_col)
|
||||||
|
if max_length_col > 0:
|
||||||
|
for i, content in enumerate(t1.table_data):
|
||||||
|
if len(content[longest_col]) > max_length_col:
|
||||||
|
temp = ''
|
||||||
|
for l in content[longest_col].splitlines():
|
||||||
|
if len(l) > max_length_col:
|
||||||
|
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||||
|
else:
|
||||||
|
temp += l + '\n'
|
||||||
|
content[longest_col] = temp.strip()
|
||||||
|
t1.table_data[i] = content
|
||||||
|
|
||||||
|
t2 = AsciiTable(printarray2, title="Idling queues")
|
||||||
|
t2.column_max_width(1)
|
||||||
|
if not t2.ok:
|
||||||
|
longest_col = t2.column_widths.index(max(t2.column_widths))
|
||||||
|
max_length_col = t2.column_max_width(longest_col)
|
||||||
|
if max_length_col > 0:
|
||||||
|
for i, content in enumerate(t2.table_data):
|
||||||
|
if len(content[longest_col]) > max_length_col:
|
||||||
|
temp = ''
|
||||||
|
for l in content[longest_col].splitlines():
|
||||||
|
if len(l) > max_length_col:
|
||||||
|
temp += '\n'.join(textwrap.wrap(l, max_length_col)) + '\n'
|
||||||
|
else:
|
||||||
|
temp += l + '\n'
|
||||||
|
content[longest_col] = temp.strip()
|
||||||
|
t2.table_data[i] = content
|
||||||
|
|
||||||
|
t3 = AsciiTable(printarray3, title="Not running queues")
|
||||||
|
t3.column_max_width(1)
|
||||||
|
|
||||||
|
print t1.table
|
||||||
|
print '\n'
|
||||||
|
print t2.table
|
||||||
|
print '\n'
|
||||||
|
print t3.table
|
||||||
|
|
||||||
|
if (datetime.datetime.now() - lastTime).total_seconds() > args.refresh*5:
|
||||||
|
lastTime = datetime.datetime.now()
|
||||||
|
cleanRedis()
|
||||||
|
time.sleep(args.refresh)
|
|
@ -66,23 +66,7 @@ def compute_progression(server, field_name, num_day, url_parsed):
|
||||||
# filter
|
# filter
|
||||||
if (keyword_total_sum > threshold_total_sum) and (keyword_increase > threshold_increase):
|
if (keyword_total_sum > threshold_total_sum) and (keyword_increase > threshold_increase):
|
||||||
|
|
||||||
if server.sismember(redis_progression_name_set, keyword): #if keyword is in the set
|
server.zadd("z_top_progression_"+field_name, float(keyword_increase), keyword)
|
||||||
server.hset(redis_progression_name, keyword, keyword_increase) #update its value
|
|
||||||
|
|
||||||
elif (server.scard(redis_progression_name_set) < max_set_cardinality):
|
|
||||||
server.sadd(redis_progression_name_set, keyword)
|
|
||||||
|
|
||||||
else: #not in the set
|
|
||||||
#Check value for all members
|
|
||||||
member_set = []
|
|
||||||
for keyw in server.smembers(redis_progression_name_set):
|
|
||||||
member_set.append((keyw, int(server.hget(redis_progression_name, keyw))))
|
|
||||||
print member_set
|
|
||||||
member_set.sort(key=lambda tup: tup[1])
|
|
||||||
if member_set[0][1] < keyword_increase:
|
|
||||||
#remove min from set and add the new one
|
|
||||||
server.srem(redis_progression_name_set, member_set[0])
|
|
||||||
server.sadd(redis_progression_name_set, keyword)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -8,50 +8,52 @@ sleep 0.1
|
||||||
|
|
||||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
||||||
|
|
||||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModuleInformation.py -k 0 -c 1; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Duplicate" bash -c './Duplicate_ssdeep_v2.py; read x'
|
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Attribute" bash -c './Attribute.py; read x'
|
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Line" bash -c './Line.py; read x'
|
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "DomainClassifier" bash -c './DomClassifier.py; read x'
|
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "CreditCard" bash -c './CreditCard.py; read x'
|
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Url" bash -c './Url.py; read x'
|
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Curve_topsets_manager" bash -c './Curve_manage_top_sets.py; read x'
|
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script" -X screen -t "SentimentAnalyser" bash -c './SentimentAnalyser.py; read x'
|
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
|
||||||
|
sleep 0.1
|
||||||
|
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||||
|
|
|
@ -264,7 +264,7 @@ class Paste(object):
|
||||||
|
|
||||||
def _get_p_duplicate(self):
|
def _get_p_duplicate(self):
|
||||||
self.p_duplicate = self.store.hget(self.p_path, "p_duplicate")
|
self.p_duplicate = self.store.hget(self.p_path, "p_duplicate")
|
||||||
return self.p_duplicate if self.p_duplicate is not None else []
|
return self.p_duplicate if self.p_duplicate is not None else '[]'
|
||||||
|
|
||||||
def save_all_attributes_redis(self, key=None):
|
def save_all_attributes_redis(self, key=None):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -34,7 +34,7 @@ subscribe = Redis_Global
|
||||||
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve
|
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve
|
||||||
|
|
||||||
[CreditCards]
|
[CreditCards]
|
||||||
subscribe = Redis_CreditCard
|
subscribe = Redis_CreditCards
|
||||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
|
||||||
|
|
||||||
[Mail]
|
[Mail]
|
||||||
|
@ -63,7 +63,7 @@ publish = Redis_BrowseWarningPaste,Redis_Duplicate
|
||||||
[ModuleStats]
|
[ModuleStats]
|
||||||
subscribe = Redis_ModuleStats
|
subscribe = Redis_ModuleStats
|
||||||
|
|
||||||
[Browse_warning_paste]
|
[BrowseWarningPaste]
|
||||||
subscribe = Redis_BrowseWarningPaste
|
subscribe = Redis_BrowseWarningPaste
|
||||||
|
|
||||||
#[send_to_queue]
|
#[send_to_queue]
|
||||||
|
@ -88,9 +88,6 @@ publish = Redis_BrowseWarningPaste,Redis_Duplicate
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
||||||
|
|
||||||
[SourceCode]
|
|
||||||
subscribe = Redis_SourceCode
|
|
||||||
|
|
||||||
[Keys]
|
[Keys]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
publish = Redis_Duplicate,Redis_BrowseWarningPaste
|
||||||
|
|
65
doc/generate_graph_data.py
Executable file
|
@ -0,0 +1,65 @@
|
||||||
|
#!/usr/bin/env python2
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
content = ""
|
||||||
|
modules = {}
|
||||||
|
all_modules = []
|
||||||
|
curr_module = ""
|
||||||
|
streamingPub = {}
|
||||||
|
streamingSub = {}
|
||||||
|
|
||||||
|
with open('../bin/packages/modules.cfg', 'r') as f:
|
||||||
|
for line in f:
|
||||||
|
if line[0] != '#':
|
||||||
|
if line[0] == '[':
|
||||||
|
curr_name = line.replace('[','').replace(']','').replace('\n', '').replace(' ', '')
|
||||||
|
all_modules.append(curr_name)
|
||||||
|
modules[curr_name] = {'sub': [], 'pub': []}
|
||||||
|
curr_module = curr_name
|
||||||
|
elif curr_module != "": # searching for sub or pub
|
||||||
|
if line.startswith("subscribe"):
|
||||||
|
curr_subscribers = [w for w in line.replace('\n', '').replace(' ', '').split('=')[1].split(',')]
|
||||||
|
modules[curr_module]['sub'] = curr_subscribers
|
||||||
|
for sub in curr_subscribers:
|
||||||
|
streamingSub[sub] = curr_module
|
||||||
|
|
||||||
|
elif line.startswith("publish"):
|
||||||
|
curr_publishers = [w for w in line.replace('\n', '').replace(' ', '').split('=')[1].split(',')]
|
||||||
|
modules[curr_module]['pub'] = curr_publishers
|
||||||
|
for pub in curr_publishers:
|
||||||
|
streamingPub[pub] = curr_module
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
output_set_graph = set()
|
||||||
|
with open('all_modules.txt', 'w') as f2:
|
||||||
|
for e in all_modules:
|
||||||
|
f2.write(e+"\n")
|
||||||
|
|
||||||
|
for module in modules.keys():
|
||||||
|
for stream_in in modules[module]['sub']:
|
||||||
|
if stream_in not in streamingPub.keys():
|
||||||
|
output_set_graph.add("\"" + stream_in + "\" [color=darkorange1] ;\n")
|
||||||
|
output_set_graph.add("\"" + stream_in + "\"" + "->" + module + ";\n")
|
||||||
|
else:
|
||||||
|
output_set_graph.add("\"" + streamingPub[stream_in] + "\"" + "->" + module + ";\n")
|
||||||
|
|
||||||
|
for stream_out in modules[module]['pub']:
|
||||||
|
if stream_out not in streamingSub.keys():
|
||||||
|
output_set_graph.add("\"" + stream_out + "\" [color=darkorange1] ;\n")
|
||||||
|
output_set_graph.add("\"" + stream_out + "\"" + "->" + module + ";\n")
|
||||||
|
else:
|
||||||
|
output_set_graph.add("\"" + module + "\"" + "->" + streamingSub[stream_out] + ";\n")
|
||||||
|
|
||||||
|
|
||||||
|
output_text_graph = ""
|
||||||
|
output_text_graph += "digraph unix {\n"\
|
||||||
|
"graph [pad=\"0.5\"];\n"\
|
||||||
|
"size=\"25,25\";\n"\
|
||||||
|
"node [color=lightblue2, style=filled];\n"
|
||||||
|
|
||||||
|
for elem in output_set_graph:
|
||||||
|
output_text_graph += elem
|
||||||
|
|
||||||
|
output_text_graph += "}"
|
||||||
|
print output_text_graph
|
3
doc/generate_modules_data_flow_graph.sh
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
python generate_graph_data.py | dot -T png -o module-data-flow.png
|
BIN
doc/module-data-flow.png
Normal file
After Width: | Height: | Size: 152 KiB |
Before Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 86 KiB After Width: | Height: | Size: 111 KiB |
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 66 KiB |
75
docker_start.sh
Executable file
|
@ -0,0 +1,75 @@
|
||||||
|
source ./AILENV/bin/activate
|
||||||
|
cd bin
|
||||||
|
|
||||||
|
export PATH=$AIL_HOME:$PATH
|
||||||
|
export PATH=$AIL_REDIS:$PATH
|
||||||
|
export PATH=$AIL_LEVELDB:$PATH
|
||||||
|
export AILENV=/opt/AIL
|
||||||
|
|
||||||
|
conf_dir="${AIL_HOME}/configs/"
|
||||||
|
|
||||||
|
screen -dmS "Redis"
|
||||||
|
screen -S "Redis" -X screen -t "6379" bash -c 'redis-server '$conf_dir'6379.conf ; read x'
|
||||||
|
screen -S "Redis" -X screen -t "6380" bash -c 'redis-server '$conf_dir'6380.conf ; read x'
|
||||||
|
screen -S "Redis" -X screen -t "6381" bash -c 'redis-server '$conf_dir'6381.conf ; read x'
|
||||||
|
|
||||||
|
# For Words and curves
|
||||||
|
sleep 0.1
|
||||||
|
screen -S "Redis" -X screen -t "6382" bash -c 'redis-server '$conf_dir'6382.conf ; read x'
|
||||||
|
|
||||||
|
#Want to launch more level_db?
|
||||||
|
lvdbhost='127.0.0.1'
|
||||||
|
lvdbdir="${AIL_HOME}/LEVEL_DB_DATA/"
|
||||||
|
db1_y='2013'
|
||||||
|
db2_y='2014'
|
||||||
|
db3_y='2016'
|
||||||
|
db4_y='3016'
|
||||||
|
nb_db=13
|
||||||
|
|
||||||
|
screen -dmS "LevelDB"
|
||||||
|
#Add lines here with appropriates options.
|
||||||
|
screen -S "LevelDB" -X screen -t "2013" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'2013/ -P '$db1_y' -M '$nb_db'; read x'
|
||||||
|
screen -S "LevelDB" -X screen -t "2014" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'2014/ -P '$db2_y' -M '$nb_db'; read x'
|
||||||
|
screen -S "LevelDB" -X screen -t "2016" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'2016/ -P '$db3_y' -M '$nb_db'; read x'
|
||||||
|
|
||||||
|
# For Curve
|
||||||
|
screen -S "LevelDB" -X screen -t "3016" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'3016/ -P '$db4_y' -M '$nb_db'; read x'
|
||||||
|
|
||||||
|
|
||||||
|
screen -dmS "Logging"
|
||||||
|
screen -S "Logging" -X screen -t "LogQueue" bash -c 'log_subscriber -p 6380 -c Queuing -l ../logs/; read x'
|
||||||
|
screen -S "Logging" -X screen -t "LogScript" bash -c 'log_subscriber -p 6380 -c Script -l ../logs/; read x'
|
||||||
|
|
||||||
|
screen -dmS "Queue"
|
||||||
|
screen -S "Queue" -X screen -t "Queues" bash -c './launch_queues.py; read x'
|
||||||
|
|
||||||
|
screen -dmS "Script"
|
||||||
|
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModuleInformation.py -k 0 -c 1; read x'
|
||||||
|
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "BrowseWarningPaste" bash -c './BrowseWarningPaste.py; read x'
|
||||||
|
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
||||||
|
|
||||||
|
cd $AILENV
|
||||||
|
cd var/www/
|
||||||
|
python Flask_server.py
|
|
@ -6,20 +6,24 @@ set -x
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
|
|
||||||
sudo apt-get install python-pip python-virtualenv python-dev libfreetype6-dev \
|
sudo apt-get install python-pip python-virtualenv python-dev libfreetype6-dev \
|
||||||
screen g++ python-tk unzip libsnappy-dev cmake
|
screen g++ python-tk unzip libsnappy-dev cmake -y
|
||||||
|
|
||||||
#Needed for bloom filters
|
#Needed for bloom filters
|
||||||
sudo apt-get install libssl-dev libfreetype6-dev python-numpy
|
sudo apt-get install libssl-dev libfreetype6-dev python-numpy -y
|
||||||
|
|
||||||
# DNS deps
|
# DNS deps
|
||||||
sudo apt-get install libadns1 libadns1-dev
|
sudo apt-get install libadns1 libadns1-dev -y
|
||||||
|
|
||||||
#Needed for redis-lvlDB
|
#Needed for redis-lvlDB
|
||||||
sudo apt-get install libev-dev libgmp-dev
|
sudo apt-get install libev-dev libgmp-dev -y
|
||||||
|
|
||||||
|
#Need for generate-data-flow graph
|
||||||
|
sudo apt-get install graphviz -y
|
||||||
|
|
||||||
#needed for mathplotlib
|
#needed for mathplotlib
|
||||||
test ! -L /usr/include/ft2build.h && sudo ln -s freetype2/ft2build.h /usr/include/
|
|
||||||
sudo easy_install -U distribute
|
sudo easy_install -U distribute
|
||||||
|
# ssdeep
|
||||||
|
sudo apt-get install libfuzzy-dev
|
||||||
|
|
||||||
# REDIS #
|
# REDIS #
|
||||||
test ! -d redis/ && git clone https://github.com/antirez/redis.git
|
test ! -d redis/ && git clone https://github.com/antirez/redis.git
|
||||||
|
@ -29,7 +33,7 @@ make
|
||||||
popd
|
popd
|
||||||
|
|
||||||
# Faup
|
# Faup
|
||||||
test ! -d faup && git clone https://github.com/stricaud/faup.git
|
test ! -d faup/ && git clone https://github.com/stricaud/faup.git
|
||||||
pushd faup/
|
pushd faup/
|
||||||
test ! -d build && mkdir build
|
test ! -d build && mkdir build
|
||||||
cd build
|
cd build
|
||||||
|
@ -43,6 +47,10 @@ popd
|
||||||
test ! -d tlsh && git clone git://github.com/trendmicro/tlsh.git
|
test ! -d tlsh && git clone git://github.com/trendmicro/tlsh.git
|
||||||
pushd tlsh/
|
pushd tlsh/
|
||||||
./make.sh
|
./make.sh
|
||||||
|
pushd build/release/
|
||||||
|
sudo make install
|
||||||
|
sudo ldconfig
|
||||||
|
popd
|
||||||
popd
|
popd
|
||||||
|
|
||||||
# REDIS LEVEL DB #
|
# REDIS LEVEL DB #
|
||||||
|
@ -57,22 +65,30 @@ if [ ! -f bin/packages/config.cfg ]; then
|
||||||
cp bin/packages/config.cfg.sample bin/packages/config.cfg
|
cp bin/packages/config.cfg.sample bin/packages/config.cfg
|
||||||
fi
|
fi
|
||||||
|
|
||||||
virtualenv AILENV
|
pushd var/www/
|
||||||
|
./update_thirdparty.sh
|
||||||
|
popd
|
||||||
|
|
||||||
echo export AIL_HOME=$(pwd) >> ./AILENV/bin/activate
|
if [ -z "$VIRTUAL_ENV" ]; then
|
||||||
echo export AIL_BIN=$(pwd)/bin/ >> ./AILENV/bin/activate
|
|
||||||
echo export AIL_FLASK=$(pwd)/var/www/ >> ./AILENV/bin/activate
|
|
||||||
echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate
|
|
||||||
echo export AIL_LEVELDB=$(pwd)/redis-leveldb/ >> ./AILENV/bin/activate
|
|
||||||
|
|
||||||
. ./AILENV/bin/activate
|
virtualenv AILENV
|
||||||
|
|
||||||
|
echo export AIL_HOME=$(pwd) >> ./AILENV/bin/activate
|
||||||
|
echo export AIL_BIN=$(pwd)/bin/ >> ./AILENV/bin/activate
|
||||||
|
echo export AIL_FLASK=$(pwd)/var/www/ >> ./AILENV/bin/activate
|
||||||
|
echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate
|
||||||
|
echo export AIL_LEVELDB=$(pwd)/redis-leveldb/ >> ./AILENV/bin/activate
|
||||||
|
|
||||||
|
. ./AILENV/bin/activate
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
mkdir -p $AIL_HOME/{PASTES,Blooms,dumps}
|
mkdir -p $AIL_HOME/{PASTES,Blooms,dumps}
|
||||||
mkdir -p $AIL_HOME/LEVEL_DB_DATA/2016
|
mkdir -p $AIL_HOME/LEVEL_DB_DATA/2016
|
||||||
mkdir -p $AIL_HOME/LEVEL_DB_DATA/3016
|
mkdir -p $AIL_HOME/LEVEL_DB_DATA/3016
|
||||||
|
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install -r pip_packages_requirement.txt
|
pip install -U -r pip_packages_requirement.txt
|
||||||
|
|
||||||
# Pyfaup
|
# Pyfaup
|
||||||
pushd faup/src/lib/bindings/python/
|
pushd faup/src/lib/bindings/python/
|
||||||
|
@ -81,6 +97,7 @@ popd
|
||||||
|
|
||||||
# Py tlsh
|
# Py tlsh
|
||||||
pushd tlsh/py_ext
|
pushd tlsh/py_ext
|
||||||
|
git checkout a67c69b0cdfd168c62c159d41b8a3612ee2b0df1 # temporary, latest commit breaks the python module
|
||||||
python setup.py build
|
python setup.py build
|
||||||
python setup.py install
|
python setup.py install
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ textblob
|
||||||
numpy
|
numpy
|
||||||
matplotlib
|
matplotlib
|
||||||
networkx
|
networkx
|
||||||
|
terminaltables
|
||||||
|
|
||||||
#Tokeniser
|
#Tokeniser
|
||||||
nltk
|
nltk
|
||||||
|
@ -39,7 +40,7 @@ pycountry
|
||||||
PySocks
|
PySocks
|
||||||
|
|
||||||
#ASN lookup requirements
|
#ASN lookup requirements
|
||||||
http://adns-python.googlecode.com/files/adns-python-1.2.1.tar.gz
|
https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/adns-python/adns-python-1.2.1.tar.gz
|
||||||
https://github.com/trolldbois/python-cymru-services/archive/master.zip
|
https://github.com/trolldbois/python-cymru-services/archive/master.zip
|
||||||
|
|
||||||
https://github.com/saffsd/langid.py/archive/master.zip
|
https://github.com/saffsd/langid.py/archive/master.zip
|
||||||
|
|
|
@ -81,48 +81,31 @@ def event_stream():
|
||||||
|
|
||||||
def get_queues(r):
|
def get_queues(r):
|
||||||
# We may want to put the llen in a pipeline to do only one query.
|
# We may want to put the llen in a pipeline to do only one query.
|
||||||
return [(queue, int(card)) for queue, card in
|
newData = []
|
||||||
r.hgetall("queues").iteritems()]
|
for queue, card in r.hgetall("queues").iteritems():
|
||||||
|
key = "MODULE_" + queue + "_"
|
||||||
|
keySet = "MODULE_TYPE_" + queue
|
||||||
|
|
||||||
|
for moduleNum in r.smembers(keySet):
|
||||||
|
|
||||||
|
value = r.get(key + str(moduleNum))
|
||||||
|
if value is not None:
|
||||||
|
timestamp, path = value.split(", ")
|
||||||
|
if timestamp is not None:
|
||||||
|
startTime_readable = datetime.datetime.fromtimestamp(int(timestamp))
|
||||||
|
processed_time_readable = str((datetime.datetime.now() - startTime_readable)).split('.')[0]
|
||||||
|
seconds = int((datetime.datetime.now() - startTime_readable).total_seconds())
|
||||||
|
newData.append( (queue, card, seconds, moduleNum) )
|
||||||
|
else:
|
||||||
|
newData.append( (queue, cards, 0, moduleNum) )
|
||||||
|
|
||||||
|
return newData
|
||||||
|
|
||||||
|
|
||||||
def list_len(s):
|
def list_len(s):
|
||||||
return len(s)
|
return len(s)
|
||||||
app.jinja_env.filters['list_len'] = list_len
|
app.jinja_env.filters['list_len'] = list_len
|
||||||
|
|
||||||
def parseStringToList(the_string):
|
|
||||||
strList = ""
|
|
||||||
elemList = []
|
|
||||||
for c in the_string:
|
|
||||||
if c != ']':
|
|
||||||
if c != '[' and c !=' ' and c != '"':
|
|
||||||
strList += c
|
|
||||||
else:
|
|
||||||
the_list = strList.split(',')
|
|
||||||
if len(the_list) == 3:
|
|
||||||
elemList = elemList + the_list
|
|
||||||
elif len(the_list) == 2:
|
|
||||||
elemList.append(the_list)
|
|
||||||
elif len(the_list) > 1:
|
|
||||||
elemList.append(the_list[1:])
|
|
||||||
strList = ""
|
|
||||||
return elemList
|
|
||||||
|
|
||||||
def parseStringToList2(the_string):
|
|
||||||
if the_string == []:
|
|
||||||
return []
|
|
||||||
else:
|
|
||||||
res = []
|
|
||||||
tab_str = the_string.split('], [')
|
|
||||||
tab_str[0] = tab_str[0][1:]+']'
|
|
||||||
tab_str[len(tab_str)-1] = '['+tab_str[len(tab_str)-1][:-1]
|
|
||||||
res.append(parseStringToList(tab_str[0]))
|
|
||||||
for i in range(1, len(tab_str)-2):
|
|
||||||
tab_str[i] = '['+tab_str[i]+']'
|
|
||||||
res.append(parseStringToList(tab_str[i]))
|
|
||||||
if len(tab_str) > 1:
|
|
||||||
res.append(parseStringToList(tab_str[len(tab_str)-1]))
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def showpaste(content_range):
|
def showpaste(content_range):
|
||||||
requested_path = request.args.get('paste', '')
|
requested_path = request.args.get('paste', '')
|
||||||
|
@ -136,7 +119,7 @@ def showpaste(content_range):
|
||||||
p_mime = paste.p_mime
|
p_mime = paste.p_mime
|
||||||
p_lineinfo = paste.get_lines_info()
|
p_lineinfo = paste.get_lines_info()
|
||||||
p_content = paste.get_p_content().decode('utf-8', 'ignore')
|
p_content = paste.get_p_content().decode('utf-8', 'ignore')
|
||||||
p_duplicate_full_list = parseStringToList2(paste._get_p_duplicate())
|
p_duplicate_full_list = json.loads(paste._get_p_duplicate())
|
||||||
p_duplicate_list = []
|
p_duplicate_list = []
|
||||||
p_simil_list = []
|
p_simil_list = []
|
||||||
p_hashtype_list = []
|
p_hashtype_list = []
|
||||||
|
@ -160,7 +143,7 @@ def showpaste(content_range):
|
||||||
hash_types = []
|
hash_types = []
|
||||||
comp_vals = []
|
comp_vals = []
|
||||||
for i in indices:
|
for i in indices:
|
||||||
hash_types.append(p_duplicate_full_list[i][0])
|
hash_types.append(p_duplicate_full_list[i][0].encode('utf8'))
|
||||||
comp_vals.append(p_duplicate_full_list[i][2])
|
comp_vals.append(p_duplicate_full_list[i][2])
|
||||||
dup_list_removed.append(i)
|
dup_list_removed.append(i)
|
||||||
|
|
||||||
|
@ -267,19 +250,9 @@ def progressionCharts():
|
||||||
return jsonify(bar_values)
|
return jsonify(bar_values)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
redis_progression_name = 'top_progression_'+trending_name
|
redis_progression_name = "z_top_progression_" + trending_name
|
||||||
redis_progression_name_set = 'top_progression_'+trending_name+'_set'
|
keyw_value = r_serv_charts.zrevrangebyscore(redis_progression_name, '+inf', '-inf', withscores=True, start=0, num=10)
|
||||||
|
return jsonify(keyw_value)
|
||||||
# Iterate over element in top_x_set and retreive their value
|
|
||||||
member_set = []
|
|
||||||
for keyw in r_serv_charts.smembers(redis_progression_name_set):
|
|
||||||
keyw_value = r_serv_charts.hget(redis_progression_name, keyw)
|
|
||||||
keyw_value = keyw_value if keyw_value is not None else 0
|
|
||||||
member_set.append((keyw, int(keyw_value)))
|
|
||||||
member_set.sort(key=lambda tup: tup[1], reverse=True)
|
|
||||||
if len(member_set) == 0:
|
|
||||||
member_set.append(("No relevant data", int(100)))
|
|
||||||
return jsonify(member_set)
|
|
||||||
|
|
||||||
@app.route("/_moduleCharts", methods=['GET'])
|
@app.route("/_moduleCharts", methods=['GET'])
|
||||||
def modulesCharts():
|
def modulesCharts():
|
||||||
|
@ -458,7 +431,7 @@ def sentiment_analysis_trending():
|
||||||
return render_template("sentiment_analysis_trending.html")
|
return render_template("sentiment_analysis_trending.html")
|
||||||
|
|
||||||
|
|
||||||
@app.route("/sentiment_analysis_getplotdata/")
|
@app.route("/sentiment_analysis_getplotdata/", methods=['GET'])
|
||||||
def sentiment_analysis_getplotdata():
|
def sentiment_analysis_getplotdata():
|
||||||
# Get the top providers based on number of pastes
|
# Get the top providers based on number of pastes
|
||||||
oneHour = 60*60
|
oneHour = 60*60
|
||||||
|
@ -467,19 +440,27 @@ def sentiment_analysis_getplotdata():
|
||||||
dateStart = dateStart.replace(minute=0, second=0, microsecond=0)
|
dateStart = dateStart.replace(minute=0, second=0, microsecond=0)
|
||||||
dateStart_timestamp = calendar.timegm(dateStart.timetuple())
|
dateStart_timestamp = calendar.timegm(dateStart.timetuple())
|
||||||
|
|
||||||
to_return = {}
|
getAllProviders = request.args.get('getProviders')
|
||||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
provider = request.args.get('provider')
|
||||||
# if empty, get yesterday top providers
|
allProvider = request.args.get('all')
|
||||||
print 'providers_set_'+ get_date_range(1)[1]
|
if getAllProviders == 'True':
|
||||||
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers
|
if allProvider == "True":
|
||||||
# if still empty, takes from all providers
|
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||||
if range_providers == []:
|
return jsonify(list(range_providers))
|
||||||
print 'today provider empty'
|
else:
|
||||||
range_providers = r_serv_charts.smembers('all_provider_set')
|
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8)
|
||||||
|
# if empty, get yesterday top providers
|
||||||
|
range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers
|
||||||
|
# if still empty, takes from all providers
|
||||||
|
if range_providers == []:
|
||||||
|
print 'today provider empty'
|
||||||
|
range_providers = r_serv_charts.smembers('all_provider_set')
|
||||||
|
return jsonify(range_providers)
|
||||||
|
|
||||||
for cur_provider in range_providers:
|
elif provider is not None:
|
||||||
print cur_provider
|
to_return = {}
|
||||||
cur_provider_name = cur_provider + '_'
|
|
||||||
|
cur_provider_name = provider + '_'
|
||||||
list_date = {}
|
list_date = {}
|
||||||
for cur_timestamp in range(int(dateStart_timestamp), int(dateStart_timestamp)-sevenDays-oneHour, -oneHour):
|
for cur_timestamp in range(int(dateStart_timestamp), int(dateStart_timestamp)-sevenDays-oneHour, -oneHour):
|
||||||
cur_set_name = cur_provider_name + str(cur_timestamp)
|
cur_set_name = cur_provider_name + str(cur_timestamp)
|
||||||
|
@ -489,9 +470,10 @@ def sentiment_analysis_getplotdata():
|
||||||
cur_value = r_serv_sentiment.get(cur_id)
|
cur_value = r_serv_sentiment.get(cur_id)
|
||||||
list_value.append(cur_value)
|
list_value.append(cur_value)
|
||||||
list_date[cur_timestamp] = list_value
|
list_date[cur_timestamp] = list_value
|
||||||
to_return[cur_provider] = list_date
|
to_return[provider] = list_date
|
||||||
|
|
||||||
return jsonify(to_return)
|
return jsonify(to_return)
|
||||||
|
return "Bad request"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -695,7 +677,6 @@ def terms_plot_tool_data():
|
||||||
else:
|
else:
|
||||||
value_range = []
|
value_range = []
|
||||||
for timestamp in range(range_start, range_end+oneDay, oneDay):
|
for timestamp in range(range_start, range_end+oneDay, oneDay):
|
||||||
print timestamp, term
|
|
||||||
value = r_serv_term.hget(timestamp, term)
|
value = r_serv_term.hget(timestamp, term)
|
||||||
curr_value_range = int(value) if value is not None else 0
|
curr_value_range = int(value) if value is not None else 0
|
||||||
value_range.append([timestamp, curr_value_range])
|
value_range.append([timestamp, curr_value_range])
|
||||||
|
|
|
@ -207,7 +207,7 @@ function create_queue_table() {
|
||||||
table.appendChild(tableHead);
|
table.appendChild(tableHead);
|
||||||
table.appendChild(tableBody);
|
table.appendChild(tableBody);
|
||||||
var heading = new Array();
|
var heading = new Array();
|
||||||
heading[0] = "Queue Name"
|
heading[0] = "Queue Name.PID"
|
||||||
heading[1] = "Amount"
|
heading[1] = "Amount"
|
||||||
var tr = document.createElement('TR');
|
var tr = document.createElement('TR');
|
||||||
tableHead.appendChild(tr);
|
tableHead.appendChild(tr);
|
||||||
|
@ -221,22 +221,31 @@ function create_queue_table() {
|
||||||
|
|
||||||
for(i = 0; i < (glob_tabvar.row1).length;i++){
|
for(i = 0; i < (glob_tabvar.row1).length;i++){
|
||||||
var tr = document.createElement('TR')
|
var tr = document.createElement('TR')
|
||||||
for(j = 0; j < (glob_tabvar.row1[i]).length; j++){
|
for(j = 0; j < 2; j++){
|
||||||
var td = document.createElement('TD')
|
var td = document.createElement('TD')
|
||||||
td.appendChild(document.createTextNode(glob_tabvar.row1[i][j]));
|
var moduleNum = j == 0 ? "." + glob_tabvar.row1[i][3] : "";
|
||||||
|
td.appendChild(document.createTextNode(glob_tabvar.row1[i][j] + moduleNum));
|
||||||
tr.appendChild(td)
|
tr.appendChild(td)
|
||||||
}
|
}
|
||||||
|
// Used to decide the color of the row
|
||||||
|
// We have glob_tabvar.row1[][j] with:
|
||||||
|
// - j=0: ModuleName
|
||||||
|
// - j=1: queueLength
|
||||||
|
// - j=2: LastProcessedPasteTime
|
||||||
|
// - j=3: Number of the module belonging in the same category
|
||||||
|
if (parseInt(glob_tabvar.row1[i][2]) > 60*2 && parseInt(glob_tabvar.row1[i][1]) > 2)
|
||||||
|
tr.className += " danger";
|
||||||
|
else if (parseInt(glob_tabvar.row1[i][2]) > 60*1)
|
||||||
|
tr.className += " warning";
|
||||||
|
else
|
||||||
|
tr.className += " success";
|
||||||
tableBody.appendChild(tr);
|
tableBody.appendChild(tr);
|
||||||
}
|
}
|
||||||
Tablediv.appendChild(table);
|
Tablediv.appendChild(table);
|
||||||
}
|
}
|
||||||
|
|
||||||
$(document).ready(function () {
|
|
||||||
if (typeof glob_tabvar == "undefined")
|
|
||||||
location.reload();
|
|
||||||
if (typeof glob_tabvar.row1 == "undefined")
|
|
||||||
location.reload();
|
|
||||||
|
|
||||||
|
function load_queues() {
|
||||||
var data = [];
|
var data = [];
|
||||||
var data2 = [];
|
var data2 = [];
|
||||||
var tmp_tab = [];
|
var tmp_tab = [];
|
||||||
|
@ -246,13 +255,17 @@ $(document).ready(function () {
|
||||||
var x = new Date();
|
var x = new Date();
|
||||||
|
|
||||||
for (i = 0; i < glob_tabvar.row1.length; i++){
|
for (i = 0; i < glob_tabvar.row1.length; i++){
|
||||||
if (glob_tabvar.row1[i][0] == 'Categ' || glob_tabvar.row1[i][0] == 'Curve'){
|
if (glob_tabvar.row1[i][0].split(".")[0] == 'Categ' || glob_tabvar.row1[i][0].split(".")[0] == 'Curve'){
|
||||||
tmp_tab2.push(0);
|
if (curves_labels2.indexOf(glob_tabvar.row1[i][0].split(".")[0]) == -1) {
|
||||||
curves_labels2.push(glob_tabvar.row1[i][0]);
|
tmp_tab2.push(0);
|
||||||
|
curves_labels2.push(glob_tabvar.row1[i][0].split(".")[0]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
tmp_tab.push(0);
|
if (curves_labels.indexOf(glob_tabvar.row1[i][0].split(".")[0]) == -1) {
|
||||||
curves_labels.push(glob_tabvar.row1[i][0]);
|
tmp_tab.push(0);
|
||||||
|
curves_labels.push(glob_tabvar.row1[i][0].split(".")[0]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tmp_tab.unshift(x);
|
tmp_tab.unshift(x);
|
||||||
|
@ -311,19 +324,29 @@ $(document).ready(function () {
|
||||||
update_values();
|
update_values();
|
||||||
|
|
||||||
if($('#button-toggle-queues').prop('checked')){
|
if($('#button-toggle-queues').prop('checked')){
|
||||||
|
$("#queue-color-legend").show();
|
||||||
create_queue_table();
|
create_queue_table();
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
$("#queueing").html('');
|
$("#queueing").html('');
|
||||||
|
$("#queue-color-legend").hide();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
queues_pushed = []
|
||||||
for (i = 0; i < (glob_tabvar.row1).length; i++){
|
for (i = 0; i < (glob_tabvar.row1).length; i++){
|
||||||
if (glob_tabvar.row1[i][0] == 'Categ' || glob_tabvar.row1[i][0] == 'Curve'){
|
if (glob_tabvar.row1[i][0].split(".")[0] == 'Categ' || glob_tabvar.row1[i][0].split(".")[0] == 'Curve'){
|
||||||
tmp_values2.push(glob_tabvar.row1[i][1]);
|
if (queues_pushed.indexOf(glob_tabvar.row1[i][0].split(".")[0]) == -1) {
|
||||||
|
queues_pushed.push(glob_tabvar.row1[i][0].split(".")[0]);
|
||||||
|
tmp_values2.push(parseInt(glob_tabvar.row1[i][1]));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
tmp_values.push(glob_tabvar.row1[i][1]);
|
if (queues_pushed.indexOf(glob_tabvar.row1[i][0].split(".")[0]) == -1) {
|
||||||
|
queues_pushed.push(glob_tabvar.row1[i][0].split(".")[0]);
|
||||||
|
tmp_values.push(parseInt(glob_tabvar.row1[i][1]));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tmp_values.unshift(x);
|
tmp_values.unshift(x);
|
||||||
|
@ -362,7 +385,19 @@ $(document).ready(function () {
|
||||||
// something went wrong, hide the canvas container
|
// something went wrong, hide the canvas container
|
||||||
document.getElementById('myCanvasContainer').style.display = 'none';
|
document.getElementById('myCanvasContainer').style.display = 'none';
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function manage_undefined() {
|
||||||
|
if (typeof glob_tabvar == "undefined")
|
||||||
|
setTimeout(function() { if (typeof glob_tabvar == "undefined") { manage_undefined(); } else { load_queues(); } }, 1000);
|
||||||
|
else if (typeof glob_tabvar.row1 == "undefined")
|
||||||
|
setTimeout(function() { if (typeof glob_tabvar.row1 == "undefined") { manage_undefined(); } else { load_queues(); } }, 1000);
|
||||||
|
else
|
||||||
|
load_queues();
|
||||||
|
}
|
||||||
|
|
||||||
|
$(document).ready(function () {
|
||||||
|
manage_undefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,42 @@
|
||||||
|
|
||||||
|
/* Functions and config */
|
||||||
|
function add_new_graph_today(id) {
|
||||||
|
return "<div id=\"panel-today\" class=\"panel panel-default pannelToday"+id+"\">" +
|
||||||
|
"<div class=\"panel-heading\">" +
|
||||||
|
"<strong class=\"sparkLineStatsToday"+id+"t\">Graph "+id+"</strong>" +
|
||||||
|
"<strong class=\"sparkLineStatsToday"+id+"s pull-right\">Avg</strong>" +
|
||||||
|
"</div>" +
|
||||||
|
"<div class=\"panel-body panelInside\">" +
|
||||||
|
"<table class=\"table\">" +
|
||||||
|
"<tbody>" +
|
||||||
|
"<tr>" +
|
||||||
|
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsToday"+id+"\"></div></td> " +
|
||||||
|
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsToday"+id+"b\"></div></td> " +
|
||||||
|
"</tr>" +
|
||||||
|
"</tbody>" +
|
||||||
|
"</table>" +
|
||||||
|
"</div>" +
|
||||||
|
"</div>";
|
||||||
|
};
|
||||||
|
function add_new_graph_week(id) {
|
||||||
|
return "<div id=\"panel-week\" class=\"panel panel-default pannelWeek"+id+"\">" +
|
||||||
|
"<div class=\"panel-heading\">" +
|
||||||
|
"<strong class=\"sparkLineStatsWeek"+id+"t\">Graph "+id+"</strong>" +
|
||||||
|
"<strong class=\"sparkLineStatsWeek"+id+"s pull-right\">Avg</strong>" +
|
||||||
|
"</div>" +
|
||||||
|
"<div class=\"panel-body panelInside\">" +
|
||||||
|
"<table class=\"table\">" +
|
||||||
|
"<tbody>" +
|
||||||
|
"<tr>" +
|
||||||
|
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsWeek"+id+"\"></div></td> " +
|
||||||
|
"<td style=\"border-top: 0px solid #ddd;\"><div class=\"sparkLineStatsWeek"+id+"b\"></div></td> " +
|
||||||
|
"</tr>" +
|
||||||
|
"</tbody>" +
|
||||||
|
"</table>" +
|
||||||
|
"</div>" +
|
||||||
|
"</div>";
|
||||||
|
}
|
||||||
|
|
||||||
function generate_offset_to_time(num){
|
function generate_offset_to_time(num){
|
||||||
var to_ret = {};
|
var to_ret = {};
|
||||||
for(i=0; i<=num; i++) {
|
for(i=0; i<=num; i++) {
|
||||||
|
@ -45,29 +83,74 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
$.getJSON("/sentiment_analysis_getplotdata/",
|
/* Plot and queries */
|
||||||
function(data) {
|
|
||||||
var all_data = [];
|
|
||||||
var plot_data = [];
|
|
||||||
var graph_avg = [];
|
|
||||||
var array_provider = Object.keys(data);
|
|
||||||
var dates_providers = Object.keys(data[array_provider[0]]);
|
|
||||||
var dateStart = parseInt(dates_providers[0]);
|
|
||||||
var oneHour = 60*60;
|
|
||||||
var oneWeek = oneHour*24*7;
|
|
||||||
|
|
||||||
var all_graph_day_sum = 0.0;
|
var all_graph_day_sum = 0.0;
|
||||||
var all_graph_hour_sum = 0.0;
|
var all_graph_hour_sum = 0.0;
|
||||||
var all_graph_hour_maxVal = 0.0;
|
var all_graph_hour_sum_minus = 0.0;
|
||||||
var all_day_avg = 0.0;
|
var all_graph_hour_maxVal = 0.0;
|
||||||
var all_day_avg_maxVal = 0.0;
|
var all_day_avg = 0.0;
|
||||||
|
var all_day_avg_maxVal = 0.0;
|
||||||
|
var graph_avg = [];
|
||||||
|
var all_data = [];
|
||||||
|
var provider_already_loaded = [];
|
||||||
|
var totNumGraph = 0;
|
||||||
|
|
||||||
|
// Query all providers name then launch the query and plot process for each of them.
|
||||||
|
// When everything is terminated, plot the widgets (Gauge, canvasJS, table)
|
||||||
|
// input: all - set to 'True' if you take all providers
|
||||||
|
function draw_page(all) {
|
||||||
|
$.getJSON("/sentiment_analysis_getplotdata/?getProviders=True&all="+all,
|
||||||
|
function(data) {
|
||||||
|
var promises = [];
|
||||||
|
|
||||||
|
var the_length = provider_already_loaded.length == 0 ? 0 : provider_already_loaded.length;
|
||||||
|
for(i=0; i<data.length; i++) {
|
||||||
|
if(provider_already_loaded.indexOf(data[i]) != -1) {
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
totNumGraph++;
|
||||||
|
if(i % 2 == 0) {
|
||||||
|
$("#today_divl").append(add_new_graph_today(i+the_length+1));
|
||||||
|
$("#week_divl").append(add_new_graph_week(i+the_length+1));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$("#today_divr").append(add_new_graph_today(i+the_length+1));
|
||||||
|
$("#week_divr").append(add_new_graph_week(i+the_length+1));
|
||||||
|
}
|
||||||
|
provider_already_loaded.push(data[i])
|
||||||
|
promises.push(query_and_plot(data[i], i+the_length));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$.when.apply($, promises).done( function (arg) {
|
||||||
|
draw_widgets();
|
||||||
|
$("#LoadAll").show('fast');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Query data and plot it for a given provider
|
||||||
|
// input - provider: The povider name to be plotted
|
||||||
|
// input - graphNum: The number of the graph (Use to plot on correct div)
|
||||||
|
function query_and_plot(provider, graphNum) {
|
||||||
|
var query_plot = $.getJSON("/sentiment_analysis_getplotdata/?provider="+provider,
|
||||||
|
function(data) {
|
||||||
|
var plot_data = [];
|
||||||
|
var array_provider = Object.keys(data);
|
||||||
|
var dates_providers = Object.keys(data[array_provider[0]]);
|
||||||
|
var dateStart = parseInt(dates_providers[0]);
|
||||||
|
var oneHour = 60*60;
|
||||||
|
var oneWeek = oneHour*24*7;
|
||||||
|
|
||||||
for (graphNum=0; graphNum<8; graphNum++) {
|
|
||||||
var max_value = 0.0;
|
var max_value = 0.0;
|
||||||
var max_value_day = 0.0;
|
var max_value_day = 0.0;
|
||||||
var graph_data = [];
|
var graph_data = [];
|
||||||
var spark_data = [];
|
var spark_data = [];
|
||||||
var curr_provider = array_provider[graphNum];
|
var curr_provider = array_provider[0];
|
||||||
var curr_sum = 0.0;
|
var curr_sum = 0.0;
|
||||||
var curr_sum_elem = 0.0;
|
var curr_sum_elem = 0.0;
|
||||||
var day_sum = 0.0;
|
var day_sum = 0.0;
|
||||||
|
@ -88,7 +171,6 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
var neu = 0;
|
var neu = 0;
|
||||||
|
|
||||||
for(i=0; i<data_array.length; i++){
|
for(i=0; i<data_array.length; i++){
|
||||||
//console.log(data_array[i].replace(/\'/g, '\"'));
|
|
||||||
var curr_data = jQuery.parseJSON(data_array[i].replace(/\'/g, '\"'));
|
var curr_data = jQuery.parseJSON(data_array[i].replace(/\'/g, '\"'));
|
||||||
compPosAvg += curr_data['compoundPos'];
|
compPosAvg += curr_data['compoundPos'];
|
||||||
compNegAvg += curr_data['compoundNeg'];
|
compNegAvg += curr_data['compoundNeg'];
|
||||||
|
@ -109,8 +191,7 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
max_value = Math.abs(pos-neg) > max_value ? Math.abs(pos-neg) : max_value;
|
max_value = Math.abs(pos-neg) > max_value ? Math.abs(pos-neg) : max_value;
|
||||||
|
|
||||||
if(curr_date >= dateStart+oneWeek-23*oneHour){
|
if(curr_date >= dateStart+oneWeek-23*oneHour){
|
||||||
max_value_day = Math.abs(pos-neg) > max_value_day ? Math.abs(pos-neg) : max_value_day;
|
max_value_day = Math.abs(pos-neg) > max_value_day ? Math.abs(pos-neg) : max_value_day; day_sum += (pos-neg);
|
||||||
day_sum += (pos-neg);
|
|
||||||
day_sum_elem++;
|
day_sum_elem++;
|
||||||
}
|
}
|
||||||
if(curr_date > dateStart+oneWeek-2*oneHour && curr_date <=dateStart+oneWeek-oneHour){
|
if(curr_date > dateStart+oneWeek-2*oneHour && curr_date <=dateStart+oneWeek-oneHour){
|
||||||
|
@ -121,13 +202,12 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
}
|
}
|
||||||
all_graph_day_sum += day_sum;
|
all_graph_day_sum += day_sum;
|
||||||
all_graph_hour_sum += hour_sum;
|
all_graph_hour_sum += hour_sum;
|
||||||
|
all_graph_hour_sum_minus += hour_sum > 0 ? 0 : 1;
|
||||||
all_graph_hour_maxVal = Math.abs(hour_sum) > all_graph_hour_maxVal ? Math.abs(hour_sum) : all_graph_hour_maxVal;
|
all_graph_hour_maxVal = Math.abs(hour_sum) > all_graph_hour_maxVal ? Math.abs(hour_sum) : all_graph_hour_maxVal;
|
||||||
|
|
||||||
var curr_avg = curr_sum / (curr_sum_elem);
|
var curr_avg = curr_sum / (curr_sum_elem);
|
||||||
if(isNaN(curr_avg))
|
if(isNaN(curr_avg))
|
||||||
curr_avg = 0.0
|
curr_avg = 0.0
|
||||||
//var curr_avg = curr_sum / (oneWeek/oneHour);
|
|
||||||
//var curr_avg = curr_sum / (spark_data.length);
|
|
||||||
graph_avg.push([curr_provider, curr_avg]);
|
graph_avg.push([curr_provider, curr_avg]);
|
||||||
plot_data.push(spark_data);
|
plot_data.push(spark_data);
|
||||||
all_data.push(graph_data);
|
all_data.push(graph_data);
|
||||||
|
@ -141,7 +221,7 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
var num = graphNum + 1;
|
var num = graphNum + 1;
|
||||||
var placeholder = '.sparkLineStatsWeek' + num;
|
var placeholder = '.sparkLineStatsWeek' + num;
|
||||||
sparklineOptions.barWidth = 2;
|
sparklineOptions.barWidth = 2;
|
||||||
$(placeholder).sparkline(plot_data[graphNum], sparklineOptions);
|
$(placeholder).sparkline(plot_data[0], sparklineOptions);
|
||||||
$(placeholder+'t').text(curr_provider);
|
$(placeholder+'t').text(curr_provider);
|
||||||
var curr_avg_text = isNaN(curr_avg) ? "No data" : curr_avg.toFixed(5);
|
var curr_avg_text = isNaN(curr_avg) ? "No data" : curr_avg.toFixed(5);
|
||||||
$(placeholder+'s').text(curr_avg_text);
|
$(placeholder+'s').text(curr_avg_text);
|
||||||
|
@ -169,8 +249,8 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
|
|
||||||
|
|
||||||
// print today
|
// print today
|
||||||
var data_length = plot_data[graphNum].length;
|
var data_length = plot_data[0].length;
|
||||||
var data_today = plot_data[graphNum].slice(data_length-24, data_length);
|
var data_today = plot_data[0].slice(data_length-24, data_length);
|
||||||
|
|
||||||
placeholder = '.sparkLineStatsToday' + num;
|
placeholder = '.sparkLineStatsToday' + num;
|
||||||
sparklineOptions.barWidth = 14;
|
sparklineOptions.barWidth = 14;
|
||||||
|
@ -199,155 +279,124 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
$(avgName).addClass("panel-warning")
|
$(avgName).addClass("panel-warning")
|
||||||
}
|
}
|
||||||
|
|
||||||
}//for loop
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* ---------------- Gauge ---------------- */
|
|
||||||
var gaugeOptions = {
|
|
||||||
animateEasing: true,
|
|
||||||
|
|
||||||
elementWidth: 200,
|
|
||||||
elementHeight: 125,
|
|
||||||
|
|
||||||
arcFillStart: 10,
|
|
||||||
arcFillEnd: 12,
|
|
||||||
arcFillTotal: 20,
|
|
||||||
incTot: 1.0,
|
|
||||||
|
|
||||||
arcBgColorLight: 200,
|
|
||||||
arcBgColorSat: 0,
|
|
||||||
arcStrokeFg: 20,
|
|
||||||
arcStrokeBg: 30,
|
|
||||||
|
|
||||||
colorArcFg: '#FF3300',
|
|
||||||
animateSpeed: 1,
|
|
||||||
|
|
||||||
};
|
|
||||||
// Clone object
|
|
||||||
var gaugeOptions2 = jQuery.extend(true, {}, gaugeOptions);
|
|
||||||
var gaugeOptions3 = jQuery.extend(true, {}, gaugeOptions);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
gaugeOptions.appendTo = '#gauge_today_last_hour';
|
|
||||||
gaugeOptions.dialLabel = 'Last hour';
|
|
||||||
gaugeOptions.elementId = 'gauge1';
|
|
||||||
var piePercent = (all_graph_hour_sum / 8) / all_graph_hour_maxVal;
|
|
||||||
gaugeOptions.inc = piePercent;
|
|
||||||
var gauge_today_last_hour = new FlexGauge(gaugeOptions);
|
|
||||||
|
|
||||||
gaugeOptions2.appendTo = '#gauge_today_last_days';
|
|
||||||
gaugeOptions2.dialLabel = 'Today';
|
|
||||||
gaugeOptions2.elementId = 'gauge2';
|
|
||||||
//piePercent = (all_graph_day_sum / (8*24)) / max_value;
|
|
||||||
piePercent = (all_day_avg / 8) / all_day_avg_maxVal;
|
|
||||||
gaugeOptions2.inc = piePercent;
|
|
||||||
var gauge_today_last_days = new FlexGauge(gaugeOptions2);
|
|
||||||
|
|
||||||
gaugeOptions3.appendTo = '#gauge_week';
|
|
||||||
gaugeOptions3.dialLabel = 'Week';
|
|
||||||
gaugeOptions3.elementId = 'gauge3';
|
|
||||||
|
|
||||||
var graph_avg_sum = 0.0;
|
|
||||||
var temp_max_val = 0.0;
|
|
||||||
for (i=0; i<graph_avg.length; i++){
|
|
||||||
graph_avg_sum += graph_avg[i][1];
|
|
||||||
temp_max_val = Math.abs(graph_avg[i][1]) > temp_max_val ? Math.abs(graph_avg[i][1]) : temp_max_val;
|
|
||||||
}
|
}
|
||||||
|
);
|
||||||
piePercent = (graph_avg_sum / graph_avg.length) / temp_max_val;
|
return query_plot
|
||||||
gaugeOptions3.inc = piePercent;
|
}
|
||||||
var gauge_today_last_days = new FlexGauge(gaugeOptions3);
|
|
||||||
|
|
||||||
|
|
||||||
/* --------- Sort providers -------- */
|
|
||||||
|
|
||||||
graph_avg.sort(function(a, b){return b[1]-a[1]});
|
function draw_widgets() {
|
||||||
|
|
||||||
for (i=1; i<6; i++){
|
/* ---------------- Gauge ---------------- */
|
||||||
$('.worst'+i).text(graph_avg[7-(i-1)][0]);
|
var gaugeOptions = {
|
||||||
$('.best'+i).text(graph_avg[i-1][0]);
|
animateEasing: true,
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------- CanvasJS ------------ */
|
elementWidth: 200,
|
||||||
|
elementHeight: 125,
|
||||||
|
|
||||||
var comp_sum_day_pos = 0.0;
|
arcFillStart: 10,
|
||||||
var comp_sum_day_neg = 0.0;
|
arcFillEnd: 12,
|
||||||
var comp_sum_hour_pos = 0.0;
|
arcFillTotal: 20,
|
||||||
var comp_sum_hour_neg = 0.0;
|
incTot: 1.0,
|
||||||
for(graphNum=0; graphNum<8; graphNum++){
|
|
||||||
curr_graphData = all_data[graphNum];
|
|
||||||
var gauge_data = curr_graphData.slice(curr_graphData.length-24, curr_graphData.length);
|
|
||||||
for (i=1; i< gauge_data.length; i++){
|
|
||||||
comp_sum_day_pos += gauge_data[i].compoundPos;
|
|
||||||
comp_sum_day_neg += gauge_data[i].compoundNeg;
|
|
||||||
|
|
||||||
if(i == 23){
|
arcBgColorLight: 200,
|
||||||
comp_sum_hour_pos += gauge_data[i].compoundPos;
|
arcBgColorSat: 0,
|
||||||
comp_sum_hour_neg += gauge_data[i].compoundNeg;
|
arcStrokeFg: 20,
|
||||||
}
|
arcStrokeBg: 30,
|
||||||
|
|
||||||
|
colorArcFg: '#FF3300',
|
||||||
|
animateSpeed: 1,
|
||||||
|
|
||||||
|
};
|
||||||
|
// Clone object
|
||||||
|
var gaugeOptions2 = jQuery.extend(true, {}, gaugeOptions);
|
||||||
|
var gaugeOptions3 = jQuery.extend(true, {}, gaugeOptions);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
gaugeOptions.appendTo = '#gauge_today_last_hour';
|
||||||
|
gaugeOptions.dialLabel = 'Last hour';
|
||||||
|
gaugeOptions.elementId = 'gauge1';
|
||||||
|
var piePercent = (all_graph_hour_sum / (totNumGraph - all_graph_hour_sum_minus)) / all_graph_hour_maxVal;
|
||||||
|
gaugeOptions.inc = piePercent;
|
||||||
|
var gauge_today_last_hour = new FlexGauge(gaugeOptions);
|
||||||
|
|
||||||
|
gaugeOptions2.appendTo = '#gauge_today_last_days';
|
||||||
|
gaugeOptions2.dialLabel = 'Today';
|
||||||
|
gaugeOptions2.elementId = 'gauge2';
|
||||||
|
piePercent = (all_day_avg / totNumGraph) / all_day_avg_maxVal;
|
||||||
|
gaugeOptions2.inc = piePercent;
|
||||||
|
var gauge_today_last_days = new FlexGauge(gaugeOptions2);
|
||||||
|
|
||||||
|
gaugeOptions3.appendTo = '#gauge_week';
|
||||||
|
gaugeOptions3.dialLabel = 'Week';
|
||||||
|
gaugeOptions3.elementId = 'gauge3';
|
||||||
|
|
||||||
|
var graph_avg_sum = 0.0;
|
||||||
|
var temp_max_val = 0.0;
|
||||||
|
for (i=0; i<graph_avg.length; i++){
|
||||||
|
graph_avg_sum += graph_avg[i][1];
|
||||||
|
temp_max_val = Math.abs(graph_avg[i][1]) > temp_max_val ? Math.abs(graph_avg[i][1]) : temp_max_val;
|
||||||
|
}
|
||||||
|
|
||||||
|
piePercent = (graph_avg_sum / graph_avg.length) / temp_max_val;
|
||||||
|
gaugeOptions3.inc = piePercent;
|
||||||
|
var gauge_today_last_days = new FlexGauge(gaugeOptions3);
|
||||||
|
|
||||||
|
|
||||||
|
/* --------- Sort providers -------- */
|
||||||
|
|
||||||
|
graph_avg.sort(function(a, b){return b[1]-a[1]});
|
||||||
|
|
||||||
|
for (i=1; i<6; i++){
|
||||||
|
$('.worst'+i).text(graph_avg[7-(i-1)][0]);
|
||||||
|
$('.best'+i).text(graph_avg[i-1][0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------- CanvasJS ------------ */
|
||||||
|
|
||||||
|
var comp_sum_day_pos = 0.0;
|
||||||
|
var comp_sum_day_neg = 0.0;
|
||||||
|
var comp_sum_hour_pos = 0.0;
|
||||||
|
var comp_sum_hour_neg = 0.0;
|
||||||
|
for(graphNum=0; graphNum<totNumGraph; graphNum++){
|
||||||
|
curr_graphData = all_data[graphNum];
|
||||||
|
var gauge_data = curr_graphData.slice(curr_graphData.length-24, curr_graphData.length);
|
||||||
|
for (i=1; i< gauge_data.length; i++){
|
||||||
|
comp_sum_day_pos += gauge_data[i].compoundPos;
|
||||||
|
comp_sum_day_neg += gauge_data[i].compoundNeg;
|
||||||
|
|
||||||
|
if(i == 23){
|
||||||
|
comp_sum_hour_pos += gauge_data[i].compoundPos;
|
||||||
|
comp_sum_hour_neg += gauge_data[i].compoundNeg;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var options_canvasJS_1 = {
|
}
|
||||||
|
|
||||||
animationEnabled: true,
|
var options_canvasJS_1 = {
|
||||||
axisY: {
|
|
||||||
tickThickness: 0,
|
|
||||||
lineThickness: 0,
|
|
||||||
valueFormatString: " ",
|
|
||||||
gridThickness: 0
|
|
||||||
},
|
|
||||||
axisX: {
|
|
||||||
tickThickness: 0,
|
|
||||||
lineThickness: 0,
|
|
||||||
labelFontSize: 0.1,
|
|
||||||
},
|
|
||||||
data: [
|
|
||||||
{
|
|
||||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
|
||||||
type: "bar",
|
|
||||||
color: "green",
|
|
||||||
dataPoints: [
|
|
||||||
{y: comp_sum_hour_pos/8}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Negative: </strong></span><span><strong>{y}</strong></span>",
|
|
||||||
type: "bar",
|
|
||||||
color: "red",
|
|
||||||
dataPoints: [
|
|
||||||
{y: comp_sum_hour_neg/8}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
var chart_canvas1 = new CanvasJS.Chart("bar_today_last_hour", options_canvasJS_1);
|
animationEnabled: true,
|
||||||
|
axisY: {
|
||||||
var options_canvasJS_2 = {
|
tickThickness: 0,
|
||||||
|
lineThickness: 0,
|
||||||
animationEnabled: true,
|
valueFormatString: " ",
|
||||||
axisY: {
|
gridThickness: 0
|
||||||
tickThickness: 0,
|
},
|
||||||
lineThickness: 0,
|
axisX: {
|
||||||
valueFormatString: " ",
|
tickThickness: 0,
|
||||||
gridThickness: 0
|
lineThickness: 0,
|
||||||
},
|
labelFontSize: 0.1,
|
||||||
axisX: {
|
},
|
||||||
tickThickness: 0,
|
data: [
|
||||||
lineThickness: 0,
|
|
||||||
labelFontSize: 0.1,
|
|
||||||
},
|
|
||||||
data: [
|
|
||||||
{
|
{
|
||||||
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
||||||
type: "bar",
|
type: "bar",
|
||||||
color: "green",
|
color: "green",
|
||||||
dataPoints: [
|
dataPoints: [
|
||||||
{y: comp_sum_day_pos/8}
|
{y: comp_sum_hour_pos/totNumGraph}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -355,32 +404,51 @@ $.getJSON("/sentiment_analysis_getplotdata/",
|
||||||
type: "bar",
|
type: "bar",
|
||||||
color: "red",
|
color: "red",
|
||||||
dataPoints: [
|
dataPoints: [
|
||||||
{y: comp_sum_day_neg/8}
|
{y: comp_sum_hour_neg/totNumGraph}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
var chart_canvas1 = new CanvasJS.Chart("bar_today_last_hour", options_canvasJS_1);
|
||||||
|
|
||||||
|
var options_canvasJS_2 = {
|
||||||
|
|
||||||
|
animationEnabled: true,
|
||||||
|
axisY: {
|
||||||
|
tickThickness: 0,
|
||||||
|
lineThickness: 0,
|
||||||
|
valueFormatString: " ",
|
||||||
|
gridThickness: 0
|
||||||
|
},
|
||||||
|
axisX: {
|
||||||
|
tickThickness: 0,
|
||||||
|
lineThickness: 0,
|
||||||
|
labelFontSize: 0.1,
|
||||||
|
},
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Positive: </strong></span><span><strong>{y}</strong></span>",
|
||||||
|
type: "bar",
|
||||||
|
color: "green",
|
||||||
|
dataPoints: [
|
||||||
|
{y: comp_sum_day_pos/totNumGraph}
|
||||||
]
|
]
|
||||||
};
|
},
|
||||||
|
{
|
||||||
var chart_canvas2 = new CanvasJS.Chart("bar_today_last_days", options_canvasJS_2);
|
toolTipContent: "<span style='\"'color: {color};'\"'><strong>Negative: </strong></span><span><strong>{y}</strong></span>",
|
||||||
|
type: "bar",
|
||||||
chart_canvas1.render();
|
color: "red",
|
||||||
chart_canvas2.render();
|
dataPoints: [
|
||||||
|
{y: comp_sum_day_neg/totNumGraph}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
]
|
||||||
);
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
var chart_canvas2 = new CanvasJS.Chart("bar_today_last_days", options_canvasJS_2);
|
||||||
|
|
||||||
|
chart_canvas1.render();
|
||||||
|
chart_canvas2.render();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -66,7 +66,18 @@
|
||||||
</label>
|
</label>
|
||||||
<strong style="top: 3px; position: relative;">Display queues</strong>
|
<strong style="top: 3px; position: relative;">Display queues</strong>
|
||||||
<div>
|
<div>
|
||||||
<div class="table-responsive", id="queueing" style="margin-top:10px;"></div>
|
<div>
|
||||||
|
<table id="queue-color-legend" class="table">
|
||||||
|
<thead>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr><td class="legendColorBox" style="vertical-align: ; "><div style="border:1px solid #ccc;padding:1px"><div style="width:100%;height:0;border:5px solid #d0e9c6;overflow:hidden"></div></div></td><td> Working queues</td></tr>
|
||||||
|
<tr><td class="legendColorBox" style="vertical-align: ;"><div style="border:1px solid #ccc;padding:1px"><div style="width:100%;height:0;border:5px solid #faf2cc;overflow:hidden"></div></div></td><td> Idling queues</td></tr>
|
||||||
|
<tr><td class="legendColorBox" style="vertical-align: ;"><div style="border:1px solid #ccc;padding:1px"><div style="width:100%;height:0;border:5px solid #ebcccc;overflow:hidden"></div></div></td><td> Stuck queues</td></tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
<div class="table-responsive", id="queueing" style="margin-top:10px; font-size: small;"></div>
|
||||||
<a href="{{ url_for('index') }}"><img src="{{ url_for('static', filename='image/AIL.png') }}" /></a>
|
<a href="{{ url_for('index') }}"><img src="{{ url_for('static', filename='image/AIL.png') }}" /></a>
|
||||||
</div>
|
</div>
|
||||||
<!-- /.navbar-static-side -->
|
<!-- /.navbar-static-side -->
|
||||||
|
@ -154,7 +165,6 @@
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/plugins/metisMenu/metisMenu.js') }}"></script>
|
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
|
|
|
@ -113,6 +113,7 @@
|
||||||
</div>
|
</div>
|
||||||
<!-- /.row -->
|
<!-- /.row -->
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
<button id="LoadAll" class="btn btn-info" style="margin: 5px;"><span class="glyphicon glyphicon-download"> </span> Load data from all providers </button>
|
||||||
|
|
||||||
<!-- Pannel TODAY -->
|
<!-- Pannel TODAY -->
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
@ -128,139 +129,13 @@
|
||||||
<div class="col-lg-9" style="padding-left: 0px;">
|
<div class="col-lg-9" style="padding-left: 0px;">
|
||||||
<!-- providers charts -->
|
<!-- providers charts -->
|
||||||
<div class="col-lg-6">
|
<div class="col-lg-6">
|
||||||
<div class="sparkLineStats">
|
<div id="today_divl" class="sparkLineStats">
|
||||||
<div id="panel-today" class="panel panel-default pannelToday1">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday1t">Graph 1</strong>
|
|
||||||
<strong class="sparkLineStatsToday1s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday1"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday1b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday2">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday2t">Graph 2</strong>
|
|
||||||
<strong class="sparkLineStatsToday2s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday2"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday2b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday3">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday3t">Graph 3</strong>
|
|
||||||
<strong class="sparkLineStatsToday3s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday3"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday3b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday4">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday4t">Graph 4</strong>
|
|
||||||
<strong class="sparkLineStatsToday4s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday4"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday4b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-6">
|
<div class="col-lg-6">
|
||||||
<div class="sparkLineStats">
|
<div id="today_divr" class="sparkLineStats">
|
||||||
<div id="panel-today" class="panel panel-default pannelToday5">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday5t">Graph 5</strong>
|
|
||||||
<strong class="sparkLineStatsToday5s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday5"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday5b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday6">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday6t">Graph 6</strong>
|
|
||||||
<strong class="sparkLineStatsToday6s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday6"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday6b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday7">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday7t">Graph 7</strong>
|
|
||||||
<strong class="sparkLineStatsToday7s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday7"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday7b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-today" class="panel panel-default pannelToday8">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsToday8t">Graph 8</strong>
|
|
||||||
<strong class="sparkLineStatsToday8s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday8"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsToday8b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -304,139 +179,13 @@
|
||||||
<div class="col-lg-9" style="padding-left: 0px;">
|
<div class="col-lg-9" style="padding-left: 0px;">
|
||||||
<!-- providers charts -->
|
<!-- providers charts -->
|
||||||
<div class="col-lg-6">
|
<div class="col-lg-6">
|
||||||
<div class="sparkLineStats">
|
<div id="week_divl" class="sparkLineStats">
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek1">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek1t">Graph 1</strong>
|
|
||||||
<strong class="sparkLineStatsWeek1s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek1"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek1b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek2">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek2t">Graph 2</strong>
|
|
||||||
<strong class="sparkLineStatsWeek2s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek2"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek2b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek3">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek3t">Graph 3</strong>
|
|
||||||
<strong class="sparkLineStatsWeek3s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek3"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek3b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek4">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek4t">Graph 4</strong>
|
|
||||||
<strong class="sparkLineStatsWeek4s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek4"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek4b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-6">
|
<div class="col-lg-6">
|
||||||
<div class="sparkLineStats">
|
<div id="week_divr" class="sparkLineStats">
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek5">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek5t">Graph 5</strong>
|
|
||||||
<strong class="sparkLineStatsWeek5s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek5"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek5b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek6">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek6t">Graph 6</strong>
|
|
||||||
<strong class="sparkLineStatsWeek6s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek6"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek6b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek7">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek7t">Graph 7</strong>
|
|
||||||
<strong class="sparkLineStatsWeek7s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek7"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek7b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div id="panel-week" class="panel panel-default pannelWeek8">
|
|
||||||
<div class="panel-heading">
|
|
||||||
<strong class="sparkLineStatsWeek8t">Graph 8</strong>
|
|
||||||
<strong class="sparkLineStatsWeek8s pull-right">Avg</strong>
|
|
||||||
</div>
|
|
||||||
<div class="panel-body panelInside">
|
|
||||||
<table class="table">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek8"></div></td>
|
|
||||||
<td style="border-top: 0px solid #ddd;"><div class="sparkLineStatsWeek8b"></div></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -491,8 +240,6 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- /.row -->
|
<!-- /.row -->
|
||||||
</div>
|
</div>
|
||||||
<!-- /#page-wrapper -->
|
<!-- /#page-wrapper -->
|
||||||
|
@ -502,13 +249,15 @@
|
||||||
<!-- import graph function -->
|
<!-- import graph function -->
|
||||||
<script src="{{ url_for('static', filename='js/sentiment_trending.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/sentiment_trending.js') }}"></script>
|
||||||
<script>
|
<script>
|
||||||
|
$("#LoadAll").hide();
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
activePage = $('h1.page-header').attr('data-page');
|
activePage = $('h1.page-header').attr('data-page');
|
||||||
$("#"+activePage).addClass("active");
|
$("#"+activePage).addClass("active");
|
||||||
$('[data-toggle="tooltip"]').tooltip();
|
$('[data-toggle="tooltip"]').tooltip();
|
||||||
|
|
||||||
|
$("#LoadAll").click(function(){ draw_page("True"); });
|
||||||
|
draw_page("False");
|
||||||
// Reload every 30min
|
// Reload every 30min
|
||||||
setTimeout(function(){ location.reload(); }, 30*60*1000);
|
setTimeout(function(){ location.reload(); }, 30*60*1000);
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" />
|
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" />
|
||||||
<link href="{{ url_for('static', filename='css/jquery-ui.min.css') }}" rel="stylesheet" type="text/css" />
|
|
||||||
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
|
||||||
|
@ -233,15 +232,15 @@
|
||||||
$.getJSON(url, function (data) {
|
$.getJSON(url, function (data) {
|
||||||
if (data.length != 0) {
|
if (data.length != 0) {
|
||||||
var html_to_add = "";
|
var html_to_add = "";
|
||||||
html_to_add += "<table class=\"table table-striped\">";
|
html_to_add += "<table id=\"modal-table\" class=\"table table-striped\">";
|
||||||
html_to_add += "<thead>";
|
html_to_add += "<thead>";
|
||||||
html_to_add += "<tr>";
|
html_to_add += "<tr>";
|
||||||
html_to_add += "<th>Source</th>";
|
html_to_add += "<th>Source</th>";
|
||||||
html_to_add += "<th>Date</th>";
|
html_to_add += "<th>Date</th>";
|
||||||
html_to_add += "<th>Encoding</th>";
|
html_to_add += "<th>Encoding</th>";
|
||||||
html_to_add += "<th>Size (Kb)</th>";
|
html_to_add += "<th>Size (Kb)</th>";
|
||||||
html_to_add += "<th>Mime</th>";
|
html_to_add += "<th># lines</th>";
|
||||||
html_to_add += "<th>(# lines, Max line length)</th>";
|
html_to_add += "<th>Max length</th>";
|
||||||
html_to_add += "<th>Preview</th>";
|
html_to_add += "<th>Preview</th>";
|
||||||
html_to_add += "</tr>";
|
html_to_add += "</tr>";
|
||||||
html_to_add += "</thead>";
|
html_to_add += "</thead>";
|
||||||
|
@ -253,8 +252,8 @@
|
||||||
html_to_add += "<td>"+curr_data.date+"</td>";
|
html_to_add += "<td>"+curr_data.date+"</td>";
|
||||||
html_to_add += "<td>"+curr_data.encoding+"</td>";
|
html_to_add += "<td>"+curr_data.encoding+"</td>";
|
||||||
html_to_add += "<td>"+curr_data.size+"</td>";
|
html_to_add += "<td>"+curr_data.size+"</td>";
|
||||||
html_to_add += "<td>"+curr_data.mime+"</td>";
|
html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>";
|
||||||
html_to_add += "<td>("+curr_data.lineinfo[0]+", "+curr_data.lineinfo[1]+")</td>";
|
html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>";
|
||||||
html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('showsavedpaste') }}?paste="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>";
|
html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('showsavedpaste') }}?paste="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>";
|
||||||
|
|
||||||
html_to_add += "</tr>";
|
html_to_add += "</tr>";
|
||||||
|
@ -264,6 +263,7 @@
|
||||||
$("#mymodalbody").html(html_to_add);
|
$("#mymodalbody").html(html_to_add);
|
||||||
$("[data-toggle=popover]").popover();
|
$("[data-toggle=popover]").popover();
|
||||||
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
||||||
|
$('#modal-table').DataTable();
|
||||||
} else {
|
} else {
|
||||||
$("#mymodalbody").html("No paste containing this term has been received yet.");
|
$("#mymodalbody").html("No paste containing this term has been received yet.");
|
||||||
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
$("#button_show_plot").attr("href", "{{ url_for('terms_plot_tool')}}"+"?term="+the_modal.attr('data-term') );
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
|
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
|
||||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" />
|
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" />
|
||||||
<link href="{{ url_for('static', filename='css/jquery-ui.min.css') }}" rel="stylesheet" type="text/css" />
|
|
||||||
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
|
||||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
|
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
|
||||||
|
|
|
@ -4,24 +4,28 @@ set -e
|
||||||
|
|
||||||
wget http://dygraphs.com/dygraph-combined.js -O ./static/js/dygraph-combined.js
|
wget http://dygraphs.com/dygraph-combined.js -O ./static/js/dygraph-combined.js
|
||||||
|
|
||||||
SBADMIN_VERSION='1.0.4'
|
SBADMIN_VERSION='3.3.7'
|
||||||
|
|
||||||
rm -rf temp
|
rm -rf temp
|
||||||
mkdir temp
|
mkdir temp
|
||||||
|
|
||||||
wget https://github.com/BlackrockDigital/startbootstrap-sb-admin/archive/v${SBADMIN_VERSION}.zip -O temp/${SBADMIN_VERSION}.zip
|
wget https://github.com/BlackrockDigital/startbootstrap-sb-admin/archive/v${SBADMIN_VERSION}.zip -O temp/${SBADMIN_VERSION}.zip
|
||||||
|
wget https://github.com/BlackrockDigital/startbootstrap-sb-admin-2/archive/v${SBADMIN_VERSION}.zip -O temp/${SBADMIN_VERSION}-2.zip
|
||||||
unzip temp/${SBADMIN_VERSION}.zip -d temp/
|
unzip temp/${SBADMIN_VERSION}.zip -d temp/
|
||||||
mv temp/startbootstrap-sb-admin-${SBADMIN_VERSION} temp/sb-admin-2
|
unzip temp/${SBADMIN_VERSION}-2.zip -d temp/
|
||||||
|
mv temp/startbootstrap-sb-admin-${SBADMIN_VERSION} temp/sb-admin
|
||||||
|
mv temp/startbootstrap-sb-admin-2-${SBADMIN_VERSION} temp/sb-admin-2
|
||||||
|
|
||||||
rm -rf ./static/js/plugins
|
rm -rf ./static/js/plugins
|
||||||
mv temp/sb-admin-2/js/* ./static/js/
|
mv temp/sb-admin/js/* ./static/js/
|
||||||
|
|
||||||
rm -rf ./static/fonts/ ./static/font-awesome/
|
rm -rf ./static/fonts/ ./static/font-awesome/
|
||||||
mv temp/sb-admin-2/fonts/ ./static/
|
mv temp/sb-admin/fonts/ ./static/
|
||||||
mv temp/sb-admin-2/font-awesome/ ./static/
|
mv temp/sb-admin/font-awesome/ ./static/
|
||||||
|
|
||||||
rm -rf ./static/css/plugins/
|
rm -rf ./static/css/plugins/
|
||||||
mv temp/sb-admin-2/css/* ./static/css/
|
mv temp/sb-admin/css/* ./static/css/
|
||||||
|
mv temp/sb-admin-2/dist/css/* ./static/css/
|
||||||
|
|
||||||
rm -rf temp
|
rm -rf temp
|
||||||
|
|
||||||
|
@ -39,12 +43,17 @@ wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.pie.js -O ./
|
||||||
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.time.js -O ./static/js/jquery.flot.time.js
|
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.time.js -O ./static/js/jquery.flot.time.js
|
||||||
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.stack.js -O ./static/js/jquery.flot.stack.js
|
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.stack.js -O ./static/js/jquery.flot.stack.js
|
||||||
|
|
||||||
#Ressources for sparkline and canvasJS
|
#Ressources for sparkline and canvasJS and slider
|
||||||
wget http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js
|
wget http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js
|
||||||
|
mkdir temp
|
||||||
wget http://canvasjs.com/fdm/chart/ -O temp/canvasjs.zip
|
wget http://canvasjs.com/fdm/chart/ -O temp/canvasjs.zip
|
||||||
unzip temp/canvasjs.zip -d temp/
|
unzip temp/canvasjs.zip -d temp/
|
||||||
mkdir temp
|
|
||||||
mv temp/jquery.canvasjs.min.js ./static/js/jquery.canvasjs.min.js
|
mv temp/jquery.canvasjs.min.js ./static/js/jquery.canvasjs.min.js
|
||||||
|
|
||||||
|
wget https://jqueryui.com/resources/download/jquery-ui-1.12.0.zip -O temp/jquery-ui.zip
|
||||||
|
unzip temp/jquery-ui.zip -d temp/
|
||||||
|
mv temp/jquery-ui-1.12.0/jquery-ui.min.js ./static/js/jquery-ui.min.js
|
||||||
|
mv temp/jquery-ui-1.12.0/jquery-ui.min.css ./static/css/jquery-ui.min.css
|
||||||
rm -rf temp
|
rm -rf temp
|
||||||
|
|
||||||
mkdir -p ./static/image
|
mkdir -p ./static/image
|
||||||
|
|