Merge pull request #65 from mokaddem/production

New modules and upgraded front-end
This commit is contained in:
Raphaël Vinot 2016-08-11 09:54:51 +02:00 committed by GitHub
commit cb49dbb3a3
39 changed files with 2223 additions and 199 deletions

3
.gitignore vendored
View file

@ -17,6 +17,9 @@ logs/
var/www/static/
!var/www/static/css/dygraph_gallery.css
!var/www/static/js/indexjavascript.js
!var/www/static/js/moduleTrending.js
!var/www/static/js/plot-graph.js
!var/www/static/js/trendingchart.js
# Local config
bin/packages/config.cfg

57
bin/Browse_warning_paste.py Executable file
View file

@ -0,0 +1,57 @@
#!/usr/bin/env python2
# -*-coding:UTF-8 -*
"""
The Browse_warning_paste module
====================
This module saved signaled paste (logged as 'warning') in redis for further usage
like browsing by category
Its input comes from other modules, namely:
Credential, CreditCard, SQLinjection, CVE, Keys, Mail and Phone
"""
import redis
import time
from datetime import datetime, timedelta
from packages import Paste
from pubsublogger import publisher
from Helper import Process
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = 'Browse_warning_paste'
p = Process(config_section)
server = redis.StrictRedis(
host=p.config.get("Redis_Level_DB", "host"),
port=p.config.get("Redis_Level_DB", "port"),
db=p.config.get("Redis_Level_DB", "db"))
# FUNCTIONS #
publisher.info("Script duplicate started")
while True:
message = p.get_from_set()
if message is not None:
module_name, p_path = message.split(';')
#PST = Paste.Paste(p_path)
else:
publisher.debug("Script Attribute is idling 10s")
time.sleep(10)
continue
# Add in redis
# Format in set: WARNING_moduleName -> p_path
key = "WARNING_" + module_name
print key + ' -> ' + p_path
server.sadd(key, p_path)
publisher.info('Saved in warning paste {}'.format(p_path))
#print 'Saved in warning paste {}'.format(p_path)

View file

@ -1,10 +1,12 @@
#!/usr/bin/env python2
# -*-coding:UTF-8 -*
import time
import sys
from packages import Paste
from pubsublogger import publisher
from Helper import Process
import re
from pyfaup.faup import Faup
if __name__ == "__main__":
publisher.port = 6380
@ -13,15 +15,17 @@ if __name__ == "__main__":
p = Process(config_section)
publisher.info("Find credentials")
faup = Faup()
critical = 8
regex_web = "((?:https?:\/\/)[-_0-9a-zA-Z]+\.[0-9a-zA-Z]+)"
regex_cred = "[a-zA-Z0-9._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:[a-zA-Z0-9\_\-]+"
regex_site_for_stats = "@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:"
while True:
message = p.get_from_set()
if message is None:
publisher.debug("Script Credential is Idling 10s")
print('Sleeping')
time.sleep(10)
continue
@ -37,11 +41,12 @@ if __name__ == "__main__":
if len(creds) == 0:
continue
sites = set(re.findall(regex_web, content))
sites= re.findall(regex_web, content) #Use to count occurences
sites_set = set(re.findall(regex_web, content))
message = 'Checked {} credentials found.'.format(len(creds))
if sites:
message += ' Related websites: {}'.format(', '.join(sites))
if sites_set:
message += ' Related websites: {}'.format(', '.join(sites_set))
to_print = 'Credential;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message)
@ -51,8 +56,33 @@ if __name__ == "__main__":
print("========> Found more than 10 credentials in this file : {}".format(filepath))
publisher.warning(to_print)
#Send to duplicate
p.populate_set_out(filepath)
if sites:
print("=======> Probably on : {}".format(', '.join(sites)))
p.populate_set_out(filepath, 'Duplicate')
#Send to BrowseWarningPaste
p.populate_set_out('credential;{}'.format(filepath), 'BrowseWarningPaste')
#Put in form, count occurences, then send to moduleStats
creds_sites = {}
site_occurence = re.findall(regex_site_for_stats, content)
for site in site_occurence:
site_domain = site[1:-1]
if site_domain in creds_sites.keys():
creds_sites[site_domain] += 1
else:
creds_sites[site_domain] = 1
for url in sites:
faup.decode(url)
domain = faup.get()['domain']
if domain in creds_sites.keys():
creds_sites[domain] += 1
else:
creds_sites[domain] = 1
for site, num in creds_sites.iteritems(): # Send for each different site to moduleStats
print 'credential;{};{};{}'.format(num, site, paste.p_date)
p.populate_set_out('credential;{};{};{}'.format(num, site, paste.p_date), 'ModuleStats')
if sites_set:
print("=======> Probably on : {}".format(', '.join(sites_set)))
else:
publisher.info(to_print)

View file

@ -66,7 +66,9 @@ if __name__ == "__main__":
publisher.warning('{}Checked {} valid number(s)'.format(
to_print, len(creditcard_set)))
#Send to duplicate
p.populate_set_out(filename)
p.populate_set_out(filepath, 'Redis_Duplicate')
#send to Browse_warning_paste
p.populate_set_out('creditcard;{}'.format(filename), 'BrowseWarningPaste')
else:
publisher.info('{}CreditCard related'.format(to_print))
else:

View file

@ -40,9 +40,9 @@ if __name__ == "__main__":
# REDIS #
r_serv1 = redis.StrictRedis(
host=p.config.get("Redis_Level_DB", "host"),
port=p.config.get("Redis_Level_DB", "port"),
db=p.config.get("Redis_Level_DB", "db"))
host=p.config.get("Redis_Level_DB_Curve", "host"),
port=p.config.get("Redis_Level_DB_Curve", "port"),
db=p.config.get("Redis_Level_DB_Curve", "db"))
# FUNCTIONS #
publisher.info("Script Curve started")

View file

@ -25,6 +25,11 @@ def search_cve(message):
print('{} contains CVEs'.format(paste.p_name))
publisher.warning('{} contains CVEs'.format(paste.p_name))
#send to Browse_warning_paste
p.populate_set_out('cve;{}'.format(filepath), 'BrowseWarningPaste')
#Send to duplicate
p.populate_set_out(filepath, 'Duplicate')
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
# Port of the redis instance used by pubsublogger
@ -53,5 +58,3 @@ if __name__ == '__main__':
# Do something with the message from the queue
search_cve(message)
# (Optional) Send that thing to the next queue
#p.populate_set_out(something_has_been_done)

View file

@ -7,10 +7,12 @@ The Duplicate module
This huge module is, in short term, checking duplicates.
Its input comes from other modules, namely:
Credential, CreditCard, Keys, Mails and Phone
Credential, CreditCard, Keys, Mails, SQLinjectionDetection, CVE and Phone
This one differ from v1 by only using redis and not json file stored on disk
Perform comparisions with ssdeep and tlsh
Requirements:
-------------
@ -22,6 +24,7 @@ import time
from datetime import datetime, timedelta
import json
import ssdeep
import tlsh
from packages import Paste
from pubsublogger import publisher
@ -36,8 +39,12 @@ if __name__ == "__main__":
p = Process(config_section)
maximum_month_range = int(p.config.get("Modules_Duplicates", "maximum_month_range"))
threshold_duplicate = int(p.config.get("Modules_Duplicates", "threshold_duplicate"))
min_paste_size = float(p.config.get("Modules_Duplicates", "min_paste_size"))
threshold_duplicate_ssdeep = int(p.config.get("Modules_Duplicates", "threshold_duplicate_ssdeep"))
threshold_duplicate_tlsh = int(p.config.get("Modules_Duplicates", "threshold_duplicate_tlsh"))
threshold_set = {}
threshold_set['ssdeep'] = threshold_duplicate_ssdeep
threshold_set['tlsh'] = threshold_duplicate_tlsh
min_paste_size = float(p.config.get("Modules_Duplicates", "min_paste_size"))
# REDIS #
dico_redis = {}
@ -47,7 +54,7 @@ if __name__ == "__main__":
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
host=p.config.get("Redis_Level_DB", "host"), port=year,
db=month)
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
# FUNCTIONS #
publisher.info("Script duplicate started")
@ -70,10 +77,11 @@ if __name__ == "__main__":
continue
# the paste is too small
if (PST._get_p_size() < min_paste_size):
if (PST._get_p_size() < min_paste_size):
continue
PST._set_p_hash_kind("ssdeep")
PST._set_p_hash_kind("tlsh")
# Assignate the correct redis connexion
r_serv1 = dico_redis[PST.p_date.year + PST.p_date.month]
@ -86,7 +94,7 @@ if __name__ == "__main__":
curr_date_range = date_today - timedelta(days = diff_month*30.4166666)
to_append = str(curr_date_range.year)+str(curr_date_range.month).zfill(2)
dico_range_list.append(to_append)
# Use all dico in range
dico_range_list = dico_range_list[0:maximum_month_range]
@ -95,43 +103,51 @@ if __name__ == "__main__":
r_serv0 = dico_redis[yearly_index]
r_serv0.incr("current_index")
index = r_serv0.get("current_index")+str(PST.p_date)
# Open selected dico range
# Open selected dico range
opened_dico = []
for dico_name in dico_range_list:
opened_dico.append([dico_name, dico_redis[dico_name]])
# retrieve hash from paste
paste_hash = PST._get_p_hash()
paste_hashes = PST._get_p_hash()
# Go throught the Database of the dico (of the month)
for curr_dico_name, curr_dico_redis in opened_dico:
for dico_hash in curr_dico_redis.smembers('HASHS'):
try:
percent = ssdeep.compare(dico_hash, paste_hash)
if percent > threshold_duplicate:
# Go throught the Database of the dico filter (month)
r_serv_dico = dico_redis[curr_dico_name]
# index of paste
index_current = r_serv_dico.get(dico_hash)
paste_path = r_serv_dico.get(index_current)
if paste_path != None:
hash_dico[dico_hash] = (paste_path, percent)
for hash_type, paste_hash in paste_hashes.iteritems():
for dico_hash in curr_dico_redis.smembers('HASHS_'+hash_type):
try:
if hash_type == 'ssdeep':
percent = 100-ssdeep.compare(dico_hash, paste_hash)
else:
percent = tlsh.diffxlen(dico_hash, paste_hash)
#print 'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent)
except:
# ssdeep hash not comparable
print 'ssdeep hash not comparable, cleaning bad hash: '+dico_hash
curr_dico_redis.srem('HASHS', dico_hash)
threshold_duplicate = threshold_set[hash_type]
if percent < threshold_duplicate:
percent = 100 - percent if hash_type == 'ssdeep' else percent #recovert the correct percent value for ssdeep
# Go throught the Database of the dico filter (month)
r_serv_dico = dico_redis[curr_dico_name]
# index of paste
index_current = r_serv_dico.get(dico_hash)
paste_path = r_serv_dico.get(index_current)
if paste_path != None:
hash_dico[dico_hash] = (hash_type, paste_path, percent)
print '['+hash_type+'] '+'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent)
except Exception,e:
print str(e)
#print 'hash not comparable, bad hash: '+dico_hash+' , current_hash: '+paste_hash
# Add paste in DB after checking to prevent its analysis twice
# hash_i -> index_i AND index_i -> PST.PATH
# hash_type_i -> index_i AND index_i -> PST.PATH
r_serv1.set(index, PST.p_path)
r_serv1.sadd("INDEX", index)
# Adding the hash in Redis
r_serv1.set(paste_hash, index)
r_serv1.sadd("HASHS", paste_hash)
# Adding hashes in Redis
for hash_type, paste_hash in paste_hashes.iteritems():
r_serv1.set(paste_hash, index)
r_serv1.sadd("HASHS_"+hash_type, paste_hash)
##################### Similarity found #######################
# if there is data in this dictionnary
@ -153,7 +169,7 @@ if __name__ == "__main__":
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
#print '{}Processed in {} sec'.format(to_print, y-x)
except IOError:
to_print = 'Duplicate;{};{};{};'.format(
PST.p_source, PST.p_date, PST.p_name)

View file

@ -52,7 +52,7 @@ if __name__ == '__main__':
else:
# TODO Store the name of the empty paste inside a Redis-list.
print "Empty Paste: not processed"
publisher.debug("Empty Paste: {0} not processed".format(paste))
publisher.debug("Empty Paste: {0} not processed".format(message))
continue
else:
print "Empty Queues: Waiting..."

View file

@ -17,7 +17,9 @@ def search_gpg(message):
if '-----BEGIN PGP MESSAGE-----' in content:
publisher.warning('{} has a PGP enc message'.format(paste.p_name))
#Send to duplicate
p.populate_set_out(message)
p.populate_set_out(message, 'Duplicate')
#send to Browse_warning_paste
p.populate_set_out('keys;{}'.format(message), 'BrowseWarningPaste')
if __name__ == '__main__':
@ -49,4 +51,3 @@ if __name__ == '__main__':
search_gpg(message)
# (Optional) Send that thing to the next queue
#p.populate_set_out(something_has_been_done)

View file

@ -67,6 +67,7 @@ function launching_lvldb {
db1_y='2013'
db2_y='2014'
db3_y='2016'
db4_y='3016'
nb_db=13
screen -dmS "LevelDB"
@ -78,6 +79,10 @@ function launching_lvldb {
screen -S "LevelDB" -X screen -t "2014" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'2014/ -P '$db2_y' -M '$nb_db'; read x'
sleep 0.1
screen -S "LevelDB" -X screen -t "2016" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'2016/ -P '$db3_y' -M '$nb_db'; read x'
# For Curve
sleep 0.1
screen -S "LevelDB" -X screen -t "3016" bash -c 'redis-leveldb -H '$lvdbhost' -D '$lvdbdir'3016/ -P '$db4_y' -M '$nb_db'; read x'
}
function launching_logs {
@ -140,6 +145,12 @@ function launching_scripts {
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
sleep 0.1
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
sleep 0.1
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
sleep 0.1
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
sleep 0.1
screen -S "Script" -X screen -t "Browse_warning_paste" bash -c './Browse_warning_paste.py; read x'
}
#If no params, display the help

View file

@ -61,9 +61,16 @@ if __name__ == "__main__":
if MX_values[0] > is_critical:
publisher.warning(to_print)
#Send to duplicate
p.populate_set_out(filename)
p.populate_set_out(filename, 'Duplicate')
else:
publisher.info(to_print)
#Send to ModuleStats
for mail in MX_values[1]:
print 'mail;{};{};{}'.format(1, mail, PST.p_date)
p.populate_set_out('mail;{};{};{}'.format(1, mail, PST.p_date), 'ModuleStats')
p.populate_set_out('mail;{}'.format(filename), 'BrowseWarningPaste')
prec_filename = filename
else:

186
bin/ModuleStats.py Executable file
View file

@ -0,0 +1,186 @@
#!/usr/bin/env python2
# -*-coding:UTF-8 -*
"""
Template for new modules
"""
import time
import datetime
import redis
import os
from packages import lib_words
from packages.Date import Date
from pubsublogger import publisher
from Helper import Process
from packages import Paste
# Config Var
max_set_cardinality = 7
def get_date_range(num_day):
curr_date = datetime.date.today()
date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2))
date_list = []
for i in range(0, num_day+1):
date_list.append(date.substract_day(i))
return date_list
def compute_most_posted(server, message):
module, num, keyword, paste_date = message.split(';')
redis_progression_name_set = 'top_'+ module +'_set'
# Add/Update in Redis
prev_score = server.hget(paste_date, module+'-'+keyword)
if prev_score is not None:
ok = server.hset(paste_date, module+'-'+keyword, int(prev_score) + int(num))
else:
ok = server.hset(paste_date, module+'-'+keyword, int(num))
# Compute Most Posted
date = get_date_range(0)[0]
# check if this keyword is eligible for progression
keyword_total_sum = 0
curr_value = server.hget(date, module+'-'+keyword)
keyword_total_sum += int(curr_value) if curr_value is not None else 0
if keyword in server.smembers(redis_progression_name_set): # if it is already in the set
return
if (server.scard(redis_progression_name_set) < max_set_cardinality):
server.sadd(redis_progression_name_set, keyword)
else: #not in the set
#Check value for all members
member_set = []
for keyw in server.smembers(redis_progression_name_set):
keyw_value = server.hget(paste_date, module+'-'+keyw)
if keyw_value is not None:
member_set.append((keyw, int(keyw_value)))
else: #No data for this set for today
member_set.append((keyw, int(0)))
member_set.sort(key=lambda tup: tup[1])
if len(member_set) > 0:
if member_set[0][1] < keyword_total_sum:
#remove min from set and add the new one
print module + ': adding ' +keyword+ '(' +str(keyword_total_sum)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_progression_name_set, member_set[0][0])
server.sadd(redis_progression_name_set, keyword)
def compute_provider_info(server, path):
redis_avg_size_name_set = 'top_size_set'
redis_providers_name_set = 'providers_set'
paste = Paste.Paste(path)
paste_size = paste._get_p_size()
paste_provider = paste.p_source
paste_date = paste._get_p_date()
new_avg = paste_size
# Add/Update in Redis
prev_num_paste = server.hget(paste_provider+'_num', paste_date)
if prev_num_paste is not None:
ok = server.hset(paste_provider+'_num', paste_date, int(prev_num_paste)+1)
prev_sum_size = server.hget(paste_provider+'_size', paste_date)
if prev_sum_size is not None:
ok = server.hset(paste_provider+'_size', paste_date, float(prev_sum_size)+paste_size)
new_avg = (float(prev_sum_size)+paste_size) / (int(prev_num_paste)+1)
else:
ok = server.hset(paste_provider+'_size', paste_date, paste_size)
else:
ok = server.hset(paste_provider+'_num', paste_date, 1)
prev_num_paste = 0
#
# Compute Most Posted
#
# Size
if paste_provider not in server.smembers(redis_avg_size_name_set): # if it is already in the set
if (server.scard(redis_avg_size_name_set) < max_set_cardinality):
server.sadd(redis_avg_size_name_set, paste_provider)
else: #set full capacity
#Check value for all members
member_set = []
for provider in server.smembers(redis_avg_size_name_set):
curr_avg = 0.0
curr_size = server.hget(provider+'_size', paste_date)
curr_num = server.hget(provider+'_num', paste_date)
if (curr_size is not None) and (curr_num is not None):
curr_avg = float(curr_size) / float(curr_num)
member_set.append((provider, curr_avg))
member_set.sort(key=lambda tup: tup[1])
if member_set[0][1] < new_avg:
#remove min from set and add the new one
print 'Size - adding ' +paste_provider+ '(' +str(new_avg)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_avg_size_name_set, member_set[0][0])
server.sadd(redis_avg_size_name_set, paste_provider)
# Num
if paste_provider not in server.smembers(redis_providers_name_set): # if it is already in the set
if (server.scard(redis_providers_name_set) < max_set_cardinality):
server.sadd(redis_providers_name_set, paste_provider)
else: #set full capacity
#Check value for all members
member_set = []
for provider in server.smembers(redis_providers_name_set):
curr_num = 0
curr_num = server.hget(provider+'_num', paste_date)
if curr_num is not None:
member_set.append((provider, int(curr_num)))
member_set.sort(key=lambda tup: tup[1])
if len(member_set) > 0:
if member_set[0][1] < int(prev_num_paste)+1:
#remove min from set and add the new one
print 'Num - adding ' +paste_provider+ '(' +str(int(prev_num_paste)+1)+') in set and removing '+member_set[0][0]+'('+str(member_set[0][1])+')'
server.srem(redis_providers_name_set, member_set[0][0])
server.sadd(redis_providers_name_set, paste_provider)
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
# Port of the redis instance used by pubsublogger
publisher.port = 6380
# Script is the default channel used for the modules.
publisher.channel = 'Script'
# Section name in bin/packages/modules.cfg
config_section = 'ModuleStats'
# Setup the I/O queues
p = Process(config_section)
# Sent to the logging a description of the module
publisher.info("Makes statistics about valid URL")
# REDIS #
r_serv_trend = redis.StrictRedis(
host=p.config.get("Redis_Level_DB_Trending", "host"),
port=p.config.get("Redis_Level_DB_Trending", "port"),
db=p.config.get("Redis_Level_DB_Trending", "db"))
# Endless loop getting messages from the input queue
while True:
# Get one message from the input queue
message = p.get_from_set()
if message is None:
publisher.debug("{} queue is empty, waiting".format(config_section))
print 'sleeping'
time.sleep(20)
continue
else:
# Do something with the message from the queue
if len(message.split(';')) > 1:
compute_most_posted(r_serv_trend, message)
else:
compute_provider_info(r_serv_trend, message)

View file

@ -23,8 +23,10 @@ def search_phone(message):
if len(results) > 4:
print results
publisher.warning('{} contains PID (phone numbers)'.format(paste.p_name))
#send to Browse_warning_paste
p.populate_set_out('phone;{}'.format(message), 'BrowseWarningPaste')
#Send to duplicate
p.populate_set_out(message)
p.populate_set_out(message, 'Duplicate')
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)

151
bin/SQLInjectionDetection.py Executable file
View file

@ -0,0 +1,151 @@
#!/usr/bin/env python2
# -*-coding:UTF-8 -*
"""
Sql Injection module
"""
import time
import string
import urllib2
import re
from pubsublogger import publisher
from Helper import Process
from packages import Paste
from pyfaup.faup import Faup
# Config Var
regex_injection = []
word_injection = []
word_injection_suspect = []
# Classic atome injection
regex_injection1 = "([[AND |OR ]+[\'|\"]?[0-9a-zA-Z]+[\'|\"]?=[\'|\"]?[0-9a-zA-Z]+[\'|\"]?])"
regex_injection.append(regex_injection1)
# Time-based attack
regex_injection2 = ["SLEEP\([0-9]+", "BENCHMARK\([0-9]+", "WAIT FOR DELAY ", "WAITFOR DELAY"]
regex_injection2 = re.compile('|'.join(regex_injection2))
regex_injection.append(regex_injection2)
# Interesting keyword
word_injection1 = [" IF ", " ELSE ", " CASE ", " WHEN ", " END ", " UNION ", "SELECT ", " FROM ", " ORDER BY ", " WHERE ", " DELETE ", " DROP ", " UPDATE ", " EXEC "]
word_injection.append(word_injection1)
# Database special keywords
word_injection2 = ["@@version", "POW(", "BITAND(", "SQUARE("]
word_injection.append(word_injection2)
# Html keywords
word_injection3 = ["<script>"]
word_injection.append(word_injection3)
# Suspect char
word_injection_suspect1 = ["\'", "\"", ";", "<", ">"]
word_injection_suspect += word_injection_suspect1
# Comment
word_injection_suspect2 = ["--", "#", "/*"]
word_injection_suspect += word_injection_suspect2
def analyse(url, path):
faup.decode(url)
url_parsed = faup.get()
resource_path = url_parsed['resource_path']
query_string = url_parsed['query_string']
result_path = 0
result_query = 0
if resource_path is not None:
result_path = is_sql_injection(resource_path)
if query_string is not None:
result_query = is_sql_injection(query_string)
if (result_path > 0) or (result_query > 0):
paste = Paste.Paste(path)
if (result_path > 1) or (result_query > 1):
print "Detected SQL in URL: "
print urllib2.unquote(url)
to_print = 'SQLInjection;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, "Detected SQL in URL")
publisher.warning(to_print)
#Send to duplicate
p.populate_set_out(path, 'Duplicate')
#send to Browse_warning_paste
p.populate_set_out('sqlinjection;{}'.format(path), 'BrowseWarningPaste')
else:
print "Potential SQL injection:"
print urllib2.unquote(url)
to_print = 'SQLInjection;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, "Potential SQL injection")
publisher.info(to_print)
# Try to detect if the url passed might be an sql injection by appliying the regex
# defined above on it.
def is_sql_injection(url_parsed):
line = urllib2.unquote(url_parsed)
line = string.upper(line)
result = []
result_suspect = []
for regex in regex_injection:
temp_res = re.findall(regex, line)
if len(temp_res)>0:
result.append(temp_res)
for word_list in word_injection:
for word in word_list:
temp_res = string.find(line, string.upper(word))
if temp_res!=-1:
result.append(line[temp_res:temp_res+len(word)])
for word in word_injection_suspect:
temp_res = string.find(line, string.upper(word))
if temp_res!=-1:
result_suspect.append(line[temp_res:temp_res+len(word)])
if len(result)>0:
print result
return 2
elif len(result_suspect)>0:
print result_suspect
return 1
else:
return 0
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
# Port of the redis instance used by pubsublogger
publisher.port = 6380
# Script is the default channel used for the modules.
publisher.channel = 'Script'
# Section name in bin/packages/modules.cfg
config_section = 'SQLInjectionDetection'
# Setup the I/O queues
p = Process(config_section)
# Sent to the logging a description of the module
publisher.info("Try to detect SQL injection")
faup = Faup()
# Endless loop getting messages from the input queue
while True:
# Get one message from the input queue
message = p.get_from_set()
if message is None:
publisher.debug("{} queue is empty, waiting".format(config_section))
time.sleep(10)
continue
else:
# Do something with the message from the queue
url, date, path = message.split()
analyse(url, path)

View file

@ -3,6 +3,7 @@
import redis
import pprint
import time
import os
import dns.exception
from packages import Paste
from packages import lib_refine
@ -39,6 +40,10 @@ if __name__ == "__main__":
port=p.config.getint("Redis_Cache", "port"),
db=p.config.getint("Redis_Cache", "db"))
# Protocol file path
protocolsfile_path = os.path.join(os.environ['AIL_HOME'],
p.config.get("Directories", "protocolsfile"))
# Country to log as critical
cc_critical = p.config.get("Url", "cc_critical")
@ -52,7 +57,14 @@ if __name__ == "__main__":
prec_filename = None
faup = Faup()
url_regex = "(http|https|ftp)\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(com|edu|gov|int|mil|net|org|biz|arpa|info|name|pro|aero|coop|museum|[a-zA-Z]{2}))(\:[0-9]+)*(/($|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*"
# Get all uri from protocolsfile (Used for Curve)
uri_scheme = ""
with open(protocolsfile_path, 'r') as scheme_file:
for scheme in scheme_file:
uri_scheme += scheme[:-1]+"|"
uri_scheme = uri_scheme[:-1]
url_regex = "("+uri_scheme+")\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(com|edu|gov|int|mil|net|org|biz|arpa|info|name|pro|aero|coop|museum|[a-zA-Z]{2}))(\:[0-9]+)*(/($|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*"
while True:
if message is not None:
@ -66,7 +78,7 @@ if __name__ == "__main__":
matching_url = re.search(url_regex, PST.get_p_content())
url = matching_url.group(0)
to_send = "{} {}".format(url, PST._get_p_date())
to_send = "{} {} {}".format(url, PST._get_p_date(), filename)
p.populate_set_out(to_send, 'Url')
faup.decode(url)
@ -103,10 +115,11 @@ if __name__ == "__main__":
print hostl, asn, cc, \
pycountry.countries.get(alpha2=cc).name
if cc == cc_critical:
publisher.warning(
'Url;{};{};{};Detected {} {}'.format(
to_print = 'Url;{};{};{};Detected {} {}'.format(
PST.p_source, PST.p_date, PST.p_name,
hostl, cc))
hostl, cc)
#publisher.warning(to_print)
print to_print
else:
print hostl, asn, cc

View file

@ -15,57 +15,78 @@ from Helper import Process
from pyfaup.faup import Faup
# Config Var
threshold_need_to_look = 50
range_to_look = 10
threshold_to_plot = 1 # 500%
to_plot = set()
clean_frequency = 10 # minutes
threshold_total_sum = 200 # Above this value, a keyword is eligible for a progression
threshold_increase = 1.0 # The percentage representing the keyword occurence since num_day_to_look
max_set_cardinality = 10 # The cardinality of the progression set
num_day_to_look = 5 # the detection of the progression start num_day_to_look in the past
def analyse(server, field_name):
def analyse(server, field_name, date, url_parsed):
field = url_parsed[field_name]
if field is not None:
prev_score = server.hget(field, date)
if prev_score is not None:
server.hset(field, date, int(prev_score) + 1)
else:
server.hset(field, date, 1)
if field_name == "domain": #save domain in a set for the monthly plot
domain_set_name = "domain_set_" + date[0:6]
server.sadd(domain_set_name, field)
print "added in " + domain_set_name +": "+ field
def get_date_range(num_day):
curr_date = datetime.date.today()
date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2))
date_list = []
def analyse_and_progression(server, field_name):
field = url_parsed[field_name]
if field is not None:
prev_score = server.hget(field, date)
if prev_score is not None:
print field + ' prev_score:' + prev_score
server.hset(field, date, int(prev_score) + 1)
if int(prev_score) + 1 > threshold_need_to_look: # threshold for false possitive
if(check_for_progression(server, field, date)):
to_plot.add(field)
else:
server.hset(field, date, 1)
for i in range(0, num_day+1):
date_list.append(date.substract_day(i))
return date_list
def compute_progression(server, field_name, num_day, url_parsed):
redis_progression_name = 'top_progression_'+field_name
redis_progression_name_set = 'top_progression_'+field_name+'_set'
def check_for_progression(server, field, date):
previous_data = set()
tot_sum = 0
for i in range(0, range_to_look):
curr_value = server.hget(field, Date(date).substract_day(i))
if curr_value is None: # no further data
break
else:
curr_value = int(curr_value)
previous_data.add(curr_value)
tot_sum += curr_value
if i == 0:
today_val = curr_value
keyword = url_parsed[field_name]
if keyword is not None:
date_range = get_date_range(num_day)
print 'totsum=' + str(tot_sum)
print 'div=' + str(tot_sum / today_val)
if tot_sum / today_val >= threshold_to_plot:
return True
else:
return False
# check if this keyword is eligible for progression
keyword_total_sum = 0
value_list = []
for date in date_range: # get value up to date_range
curr_value = server.hget(keyword, date)
value_list.append(int(curr_value if curr_value is not None else 0))
keyword_total_sum += int(curr_value) if curr_value is not None else 0
oldest_value = value_list[-1] if value_list[-1] != 0 else 1 #Avoid zero division
# The progression is based on the ratio: value[i] / value[i-1]
keyword_increase = 0
value_list_reversed = value_list[:]
value_list_reversed.reverse()
for i in range(1, len(value_list_reversed)):
divisor = value_list_reversed[i-1] if value_list_reversed[i-1] != 0 else 1
keyword_increase += value_list_reversed[i] / divisor
# filter
if (keyword_total_sum > threshold_total_sum) and (keyword_increase > threshold_increase):
if server.sismember(redis_progression_name_set, keyword): #if keyword is in the set
server.hset(redis_progression_name, keyword, keyword_increase) #update its value
elif (server.scard(redis_progression_name_set) < max_set_cardinality):
server.sadd(redis_progression_name_set, keyword)
else: #not in the set
#Check value for all members
member_set = []
for keyw in server.smembers(redis_progression_name_set):
member_set += (keyw, int(server.hget(redis_progression_name, keyw)))
member_set.sort(key=lambda tup: tup[1])
if member_set[0] < keyword_increase:
#remove min from set and add the new one
server.srem(redis_progression_name_set, member_set[0])
server.sadd(redis_progression_name_set, keyword)
if __name__ == '__main__':
@ -89,18 +110,18 @@ if __name__ == '__main__':
host=p.config.get("Redis_Level_DB", "host"),
port=p.config.get("Redis_Level_DB", "port"),
db=p.config.get("Redis_Level_DB", "db"))
r_serv2 = redis.StrictRedis(
host=p.config.get("Redis_Level_DB_Domain", "host"),
port=p.config.get("Redis_Level_DB_Domain", "port"),
db=p.config.get("Redis_Level_DB_Domain", "db"))
r_serv_trend = redis.StrictRedis(
host=p.config.get("Redis_Level_DB_Trending", "host"),
port=p.config.get("Redis_Level_DB_Trending", "port"),
db=p.config.get("Redis_Level_DB_Trending", "db"))
# FILE CURVE SECTION #
csv_path_proto = os.path.join(os.environ['AIL_HOME'],
p.config.get("Directories", "protocolstrending_csv"))
protocolsfile_path = os.path.join(os.environ['AIL_HOME'],
p.config.get("Directories", "protocolsfile"))
p.config.get("Directories", "protocolsfile"))
csv_path_tld = os.path.join(os.environ['AIL_HOME'],
p.config.get("Directories", "tldstrending_csv"))
tldsfile_path = os.path.join(os.environ['AIL_HOME'],
@ -119,35 +140,41 @@ if __name__ == '__main__':
if message is None:
if generate_new_graph:
generate_new_graph = False
print 'Building graph'
today = datetime.date.today()
year = today.year
month = today.month
lib_words.create_curve_with_word_file(r_serv1, csv_path_proto,
print 'Building protocol graph'
lib_words.create_curve_with_word_file(r_serv_trend, csv_path_proto,
protocolsfile_path, year,
month)
lib_words.create_curve_with_word_file(r_serv1, csv_path_tld,
print 'Building tld graph'
lib_words.create_curve_with_word_file(r_serv_trend, csv_path_tld,
tldsfile_path, year,
month)
lib_words.create_curve_with_list(r_serv2, csv_path_domain,
to_plot, year, month)
print 'Building domain graph'
lib_words.create_curve_from_redis_set(r_serv_trend, csv_path_domain,
"domain", year,
month)
print 'end building'
publisher.debug("{} queue is empty, waiting".format(config_section))
print 'sleeping'
time.sleep(5)
time.sleep(5*60)
continue
else:
generate_new_graph = True
# Do something with the message from the queue
url, date = message.split()
url, date, path = message.split()
faup.decode(url)
url_parsed = faup.get()
analyse(r_serv1, 'scheme') # Scheme analysis
analyse(r_serv1, 'tld') # Tld analysis
analyse_and_progression(r_serv2, 'domain') # Domain analysis
analyse(r_serv_trend, 'scheme', date, url_parsed) #Scheme analysis
analyse(r_serv_trend, 'tld', date, url_parsed) #Tld analysis
analyse(r_serv_trend, 'domain', date, url_parsed) #Domain analysis
compute_progression(r_serv_trend, 'scheme', num_day_to_look, url_parsed)
compute_progression(r_serv_trend, 'tld', num_day_to_look, url_parsed)
compute_progression(r_serv_trend, 'domain', num_day_to_look, url_parsed)

View file

@ -2,6 +2,7 @@ import hashlib
import crcmod
import mmh3
import ssdeep
import tlsh
class Hash(object):
@ -36,4 +37,7 @@ class Hash(object):
elif self.name == "ssdeep":
hash = ssdeep.hash(string)
elif self.name == "tlsh":
hash = tlsh.hash(string)
return hash

View file

@ -86,8 +86,8 @@ class Paste(object):
self.p_source = var[-5]
self.p_encoding = None
self.p_hash_kind = None
self.p_hash = None
self.p_hash_kind = {}
self.p_hash = {}
self.p_langage = None
self.p_nb_lines = None
self.p_max_length_line = None
@ -159,13 +159,13 @@ class Paste(object):
.. seealso:: Hash.py Object to get the available hashs.
"""
self.p_hash_kind = Hash(hashkind)
self.p_hash_kind[hashkind] = (Hash(hashkind))
def _get_p_hash(self):
"""
Setting the hash of the paste as a kind of "uniq" identificator
:return: hash string (md5, sha1....)
:return: a dictionnary of hash string (md5, sha1....)
:Example: PST._get_p_hash()
@ -174,7 +174,8 @@ class Paste(object):
.. seealso:: _set_p_hash_kind("md5")
"""
self.p_hash = self.p_hash_kind.Calculate(self.get_p_content())
for hash_name, the_hash in self.p_hash_kind.iteritems():
self.p_hash[hash_name] = the_hash.Calculate(self.get_p_content())
return self.p_hash
def _get_p_language(self):
@ -202,42 +203,6 @@ class Paste(object):
def _get_p_size(self):
return self.p_size
def _get_hash_lines(self, min=1, start=1, jump=10):
"""
Returning all the lines of the paste hashed.
:param min: -- (int) Minimum line length to be hashed.
:param start: -- (int) Number the line where to start.
:param jump: -- (int) Granularity of the hashing 0 or 1 means no jumps
(Maximum Granularity)
:return: a set([]) of hash.
.. warning:: Using a set here mean that this function will only return uniq hash.
If the paste is composed with 1000 time the same line, this function will return
just once the line.
This choice was made to avoid a certain redundancy and useless hash checking.
:Example: PST._get_hash_lines(1, 1, 0)
.. note:: You need first to "declare which kind of hash you want to use
before using this function
.. seealso:: _set_p_hash_kind("md5")
"""
S = set([])
f = self.get_p_content_as_file()
for num, line in enumerate(f, start):
if len(line) >= min:
if jump > 1:
if (num % jump) == 1:
S.add(self.p_hash_kind.Calculate(line))
else:
S.add(self.p_hash_kind.Calculate(line))
return S
def is_duplicate(self, obj, min=1, percent=50, start=1, jump=10):
"""
Returning the percent of similarity with another paste.
@ -329,7 +294,10 @@ class Paste(object):
self.store.hset(self.p_path, attr_name, json.dumps(value))
def _get_from_redis(self, r_serv):
return r_serv.hgetall(self.p_hash)
ans = {}
for hash_name, the_hash in self.p_hash:
ans[hash_name] = r_serv.hgetall(the_hash)
return ans
def _get_top_words(self, sort=False):
"""

View file

@ -1,5 +1,7 @@
[Directories]
bloomfilters = Blooms
#Duplicate_ssdeep
dicofilters = Dicos
pastes = PASTES
wordtrending_csv = var/www/static/csv/wordstrendingdata
@ -15,22 +17,23 @@ domainstrending_csv = var/www/static/csv/domainstrendingdata
##### Flask #####
[Flask]
#Number of minutes displayed for the number of processed pastes.
minute_processed_paste = 10
#Maximum number of character to display in the toolip
max_preview_char = 250
max_preview_char = 250
#Maximum number of character to display in the modal
max_preview_modal = 800
max_preview_modal = 800
#Default number of header to display in trending graphs
default_display = 10
#Number of minutes displayed for the number of processed pastes.
minute_processed_paste = 10
#### Modules ####
[Modules_Duplicates]
#Number of month to look back
maximum_month_range = 3
#The value where two pastes are considerate duplicate.
threshold_duplicate = 50
#The value where two pastes are considerate duplicate for ssdeep.
threshold_duplicate_ssdeep = 50
#The value where two pastes are considerate duplicate for tlsh.
threshold_duplicate_tlsh = 100
#Minimum size of the paste considered
min_paste_size = 0.3
@ -57,15 +60,20 @@ port = 6379
db = 1
##### LevelDB #####
[Redis_Level_DB_Curve]
host = localhost
port = 3016
db = 0
[Redis_Level_DB]
host = localhost
port = 2016
db = 0
[Redis_Level_DB_Domain]
[Redis_Level_DB_Trending]
host = localhost
port = 2016
db = 3
db = 0
[Redis_Level_DB_Hashs]
host = localhost

View file

@ -88,7 +88,7 @@ def create_curve_with_word_file(r_serv, csvfilename, feederfilename, year, month
with open(feederfilename, 'rb') as f:
# words of the files
words = sorted([word.strip() for word in f if word.strip()[0:2]!='//' ])
words = sorted([word.strip() for word in f if word.strip()[0:2]!='//' and word.strip()!='' ])
headers = ['Date'] + words
with open(csvfilename+'.csv', 'wb') as f:
@ -112,7 +112,7 @@ def create_curve_with_word_file(r_serv, csvfilename, feederfilename, year, month
row.append(value)
writer.writerow(row)
def create_curve_with_list(server, csvfilename, to_plot, year, month):
def create_curve_from_redis_set(server, csvfilename, set_to_plot, year, month):
"""Create a csv file used with dygraph.
:param r_serv: -- connexion to redis database
@ -122,15 +122,17 @@ def create_curve_with_list(server, csvfilename, to_plot, year, month):
:param month: -- (integer) The month to process
This function create a .csv file using datas in redis.
It's checking if the words contained in to_plot and
It's checking if the words contained in set_to_plot and
their respectives values by days exists.
"""
first_day = date(year, month, 01)
last_day = date(year, month, calendar.monthrange(year, month)[1])
words = sorted(to_plot)
redis_set_name = set_to_plot + "_set_" + str(year) + str(month).zfill(2)
words = list(server.smembers(redis_set_name))
headers = ['Date'] + words
with open(csvfilename+'.csv', 'wb') as f:
writer = csv.writer(f)

View file

@ -1,6 +1,6 @@
[Global]
subscribe = ZMQ_Global
publish = Redis_Global
publish = Redis_Global,Redis_ModuleStats
[Duplicates]
subscribe = Redis_Duplicate
@ -30,12 +30,12 @@ subscribe = Redis_Global
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve
[CreditCards]
subscribe = Redis_CreditCards
publish = Redis_Duplicate
subscribe = Redis_CreditCard
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
[Mail]
subscribe = Redis_Mail
publish = Redis_Duplicate
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
[Onion]
subscribe = Redis_Onion
@ -52,23 +52,38 @@ publish = Redis_Url,ZMQ_Url
[WebStats]
subscribe = Redis_Url
[SQLInjectionDetection]
subscribe = Redis_Url
publish = Redis_BrowseWarningPaste,Redis_Duplicate
[ModuleStats]
subscribe = Redis_ModuleStats
[Browse_warning_paste]
subscribe = Redis_BrowseWarningPaste
#[send_to_queue]
#subscribe = Redis_Cve
#publish = Redis_BrowseWarningPaste
[Release]
subscribe = Redis_Global
[Credential]
subscribe = Redis_Credential
publish = Redis_Duplicate
publish = Redis_Duplicate,Redis_ModuleStats,Redis_BrowseWarningPaste
[Cve]
subscribe = Redis_Cve
publish = Redis_BrowseWarningPaste,Redis_Duplicate
[Phone]
subscribe = Redis_Global
publish = Redis_Duplicate
publish = Redis_Duplicate,Redis_BrowseWarningPaste
[SourceCode]
subscribe = Redis_SourceCode
[Keys]
subscribe = Redis_Global
publish = Redis_Duplicate
publish = Redis_Duplicate,Redis_BrowseWarningPaste

View file

@ -96,5 +96,5 @@ rtmfp
ipps
pkcs11
acct
example
example
vnc

View file

@ -39,6 +39,12 @@ echo '/usr/local/lib' | sudo tee -a /etc/ld.so.conf.d/faup.conf
sudo ldconfig
popd
# tlsh
test ! -d tlsh && git clone git://github.com/trendmicro/tlsh.git
pushd tlsh/
./make
popd
# REDIS LEVEL DB #
test ! -d redis-leveldb/ && git clone https://github.com/KDr2/redis-leveldb.git
pushd redis-leveldb/
@ -72,6 +78,10 @@ pushd faup/src/lib/bindings/python/
python setup.py install
popd
# Py tlsh
pushd tlsh/py_ext
python setup.py build
python setup.py install
# Download the necessary NLTK corpora
HOME=$(pwd) python -m textblob.download_corpora

View file

@ -4,14 +4,18 @@
import redis
import ConfigParser
import json
import datetime
from flask import Flask, render_template, jsonify, request
import flask
import os
import sys
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
import Paste
from Date import Date
# CONFIG #
tlsh_to_percent = 1000.0 #Use to display the estimated percentage instead of a raw value
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
if not os.path.exists(configfile):
raise Exception('Unable to find the configuration file. \
@ -35,6 +39,15 @@ r_serv_log = redis.StrictRedis(
port=cfg.getint("Redis_Log", "port"),
db=cfg.getint("Redis_Log", "db"))
r_serv_charts = redis.StrictRedis(
host=cfg.get("Redis_Level_DB_Trending", "host"),
port=cfg.getint("Redis_Level_DB_Trending", "port"),
db=cfg.getint("Redis_Level_DB_Trending", "db"))
r_serv_db = redis.StrictRedis(
host=cfg.get("Redis_Level_DB", "host"),
port=cfg.getint("Redis_Level_DB", "port"),
db=cfg.getint("Redis_Level_DB", "db"))
app = Flask(__name__, static_url_path='/static/')
@ -67,13 +80,32 @@ def parseStringToList(the_string):
strList += c
else:
the_list = strList.split(',')
if len(the_list) == 2:
if len(the_list) == 3:
elemList = elemList + the_list
elif len(the_list) == 2:
elemList.append(the_list)
elif len(the_list) > 1:
elemList.append(the_list[1:])
strList = ""
return elemList
def parseStringToList2(the_string):
if the_string == []:
return []
else:
res = []
tab_str = the_string.split('], [')
tab_str[0] = tab_str[0][1:]+']'
tab_str[len(tab_str)-1] = '['+tab_str[len(tab_str)-1][:-1]
res.append(parseStringToList(tab_str[0]))
for i in range(1, len(tab_str)-2):
tab_str[i] = '['+tab_str[i]+']'
res.append(parseStringToList(tab_str[i]))
if len(tab_str) > 1:
res.append(parseStringToList(tab_str[len(tab_str)-1]))
return res
def showpaste(content_range):
requested_path = request.args.get('paste', '')
paste = Paste.Paste(requested_path)
@ -86,20 +118,98 @@ def showpaste(content_range):
p_mime = paste.p_mime
p_lineinfo = paste.get_lines_info()
p_content = paste.get_p_content().decode('utf-8', 'ignore')
p_duplicate_full_list = parseStringToList(paste._get_p_duplicate())
p_duplicate_full_list = parseStringToList2(paste._get_p_duplicate())
p_duplicate_list = []
p_simil_list = []
p_hashtype_list = []
for dup_list in p_duplicate_full_list:
path, simil_percent = dup_list
if dup_list[0] == "tlsh":
dup_list[2] = int(((tlsh_to_percent - float(dup_list[2])) / tlsh_to_percent)*100)
else:
dup_list[2] = int(dup_list[2])
p_duplicate_full_list.sort(lambda x,y: cmp(x[2], y[2]), reverse=True)
# Combine multiple duplicate paste name and format for display
new_dup_list = []
dup_list_removed = []
for dup_list_index in range(0, len(p_duplicate_full_list)):
if dup_list_index in dup_list_removed:
continue
indices = [i for i, x in enumerate(p_duplicate_full_list) if x[1] == p_duplicate_full_list[dup_list_index][1]]
hash_types = []
comp_vals = []
for i in indices:
hash_types.append(p_duplicate_full_list[i][0])
comp_vals.append(p_duplicate_full_list[i][2])
dup_list_removed.append(i)
hash_types = str(hash_types).replace("[","").replace("]","") if len(hash_types)==1 else str(hash_types)
comp_vals = str(comp_vals).replace("[","").replace("]","") if len(comp_vals)==1 else str(comp_vals)
new_dup_list.append([hash_types.replace("'", ""), p_duplicate_full_list[dup_list_index][1], comp_vals])
# Create the list to pass to the webpage
for dup_list in new_dup_list:
hash_type, path, simil_percent = dup_list
p_duplicate_list.append(path)
p_simil_list.append(simil_percent)
p_hashtype_list.append(hash_type)
if content_range != 0:
p_content = p_content[0:content_range]
return render_template("show_saved_paste.html", date=p_date, source=p_source, encoding=p_encoding, language=p_language, size=p_size, mime=p_mime, lineinfo=p_lineinfo, content=p_content, initsize=len(p_content), duplicate_list = p_duplicate_list, simil_list = p_simil_list)
return render_template("show_saved_paste.html", date=p_date, source=p_source, encoding=p_encoding, language=p_language, size=p_size, mime=p_mime, lineinfo=p_lineinfo, content=p_content, initsize=len(p_content), duplicate_list = p_duplicate_list, simil_list = p_simil_list, hashtype_list = p_hashtype_list)
def getPastebyType(server, module_name):
all_path = []
for path in server.smembers('WARNING_'+module_name):
all_path.append(path)
return all_path
def get_date_range(num_day):
curr_date = datetime.date.today()
date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2))
date_list = []
for i in range(0, num_day+1):
date_list.append(date.substract_day(i))
return date_list
# Iterate over elements in the module provided and return the today data or the last data
# return format: [('passed_days', num_of_passed_days), ('elem_name1', elem_value1), ('elem_name2', elem_value2)]]
def get_top_relevant_data(server, module_name):
redis_progression_name_set = 'top_'+ module_name +'_set'
days = 0
for date in get_date_range(15):
member_set = []
for keyw in server.smembers(redis_progression_name_set):
redis_progression_name = module_name+'-'+keyw
keyw_value = server.hget(date ,redis_progression_name)
keyw_value = keyw_value if keyw_value is not None else 0
member_set.append((keyw, int(keyw_value)))
member_set.sort(key=lambda tup: tup[1], reverse=True)
if member_set[0][1] == 0: #No data for this date
days += 1
continue
else:
member_set.insert(0, ("passed_days", days))
return member_set
# ========= CACHE CONTROL ========
@app.after_request
def add_header(response):
"""
Add headers to both force latest IE rendering engine or Chrome Frame,
and also to cache the rendered page for 10 minutes.
"""
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
# ============ ROUTES ============
@app.route("/_logs")
def logs():
@ -110,6 +220,120 @@ def logs():
def stuff():
return jsonify(row1=get_queues(r_serv))
@app.route("/_progressionCharts", methods=['GET'])
def progressionCharts():
attribute_name = request.args.get('attributeName')
trending_name = request.args.get('trendingName')
bar_requested = True if request.args.get('bar') == "true" else False
if (bar_requested):
num_day = int(request.args.get('days'))
bar_values = []
date_range = get_date_range(num_day)
# Retreive all data from the last num_day
for date in date_range:
curr_value = r_serv_charts.hget(attribute_name, date)
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], int(curr_value if curr_value is not None else 0)])
bar_values.insert(0, attribute_name)
return jsonify(bar_values)
else:
redis_progression_name = 'top_progression_'+trending_name
redis_progression_name_set = 'top_progression_'+trending_name+'_set'
# Iterate over element in top_x_set and retreive their value
member_set = []
for keyw in r_serv_charts.smembers(redis_progression_name_set):
keyw_value = r_serv_charts.hget(redis_progression_name, keyw)
keyw_value = keyw_value if keyw_value is not None else 0
member_set.append((keyw, int(keyw_value)))
member_set.sort(key=lambda tup: tup[1], reverse=True)
if len(member_set) == 0:
member_set.append(("No relevant data", int(100)))
return jsonify(member_set)
@app.route("/_moduleCharts", methods=['GET'])
def modulesCharts():
keyword_name = request.args.get('keywordName')
module_name = request.args.get('moduleName')
bar_requested = True if request.args.get('bar') == "true" else False
if (bar_requested):
num_day = int(request.args.get('days'))
bar_values = []
date_range = get_date_range(num_day)
# Retreive all data from the last num_day
for date in date_range:
curr_value = r_serv_charts.hget(date, module_name+'-'+keyword_name)
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], int(curr_value if curr_value is not None else 0)])
bar_values.insert(0, keyword_name)
return jsonify(bar_values)
else:
member_set = get_top_relevant_data(r_serv_charts, module_name)
if len(member_set) == 0:
member_set.append(("No relevant data", int(100)))
return jsonify(member_set)
@app.route("/_providersChart", methods=['GET'])
def providersChart():
keyword_name = request.args.get('keywordName')
module_name = request.args.get('moduleName')
bar_requested = True if request.args.get('bar') == "true" else False
if (bar_requested):
num_day = int(request.args.get('days'))
bar_values = []
date_range = get_date_range(num_day)
# Retreive all data from the last num_day
for date in date_range:
curr_value_size = r_serv_charts.hget(keyword_name+'_'+'size', date)
curr_value_num = r_serv_charts.hget(keyword_name+'_'+'num', date)
if module_name == "size":
curr_value_num = curr_value_num if curr_value_num is not None else 0
curr_value_num = curr_value_num if int(curr_value_num) != 0 else 10000000000
curr_value = float(curr_value_size if curr_value_size is not None else 0.0) / float(curr_value_num)
else:
curr_value = float(curr_value_num if curr_value_num is not None else 0.0)
bar_values.append([date[0:4]+'/'+date[4:6]+'/'+date[6:8], curr_value])
bar_values.insert(0, keyword_name)
return jsonify(bar_values)
else:
redis_provider_name_set = 'top_size_set' if module_name == "size" else 'providers_set'
# Iterate over element in top_x_set and retreive their value
member_set = []
for keyw in r_serv_charts.smembers(redis_provider_name_set):
redis_provider_name_size = keyw+'_'+'size'
redis_provider_name_num = keyw+'_'+'num'
keyw_value_size = r_serv_charts.hget(redis_provider_name_size, get_date_range(0)[0])
keyw_value_size = keyw_value_size if keyw_value_size is not None else 0.0
keyw_value_num = r_serv_charts.hget(redis_provider_name_num, get_date_range(0)[0])
if keyw_value_num is not None:
keyw_value_num = int(keyw_value_num)
else:
if module_name == "size":
keyw_value_num = 10000000000
else:
keyw_value_num = 0
if module_name == "size":
member_set.append((keyw, float(keyw_value_size)/float(keyw_value_num)))
else:
member_set.append((keyw, float(keyw_value_num)))
member_set.sort(key=lambda tup: tup[1], reverse=True)
if len(member_set) == 0:
member_set.append(("No relevant data", float(100)))
return jsonify(member_set)
@app.route("/search", methods=['POST'])
def search():
@ -173,6 +397,38 @@ def trending():
default_display = cfg.get("Flask", "default_display")
return render_template("Trending.html", default_display = default_display)
@app.route("/browseImportantPaste/", methods=['GET'])
def browseImportantPaste():
module_name = request.args.get('moduleName')
return render_template("browse_important_paste.html")
@app.route("/importantPasteByModule/", methods=['GET'])
def importantPasteByModule():
module_name = request.args.get('moduleName')
all_content = []
paste_date = []
paste_linenum = []
all_path = []
for path in getPastebyType(r_serv_db, module_name):
all_path.append(path)
paste = Paste.Paste(path)
content = paste.get_p_content().decode('utf8', 'ignore')
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
all_content.append(content[0:content_range])
curr_date = str(paste._get_p_date())
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
paste_date.append(curr_date)
paste_linenum.append(paste.get_lines_info()[0])
return render_template("important_paste_by_module.html", all_path=all_path, content=all_content, paste_date=paste_date, paste_linenum=paste_linenum, char_to_display=max_preview_modal)
@app.route("/moduletrending/")
def moduletrending():
return render_template("Moduletrending.html")
@app.route("/showsavedpaste/") #completely shows the paste in a new tab
def showsavedpaste():

View file

@ -1,3 +1,14 @@
var time_since_last_pastes_num;
//If we do not received info from global, set pastes_num to 0
function checkIfReceivedData(){
if ((new Date().getTime() - time_since_last_pastes_num) > 45*1000)
window.paste_num_tabvar = 0;
setTimeout(checkIfReceivedData, 45*1000);
}
setTimeout(checkIfReceivedData, 45*1000);
function initfunc( csvay, scroot) {
window.csv = csvay;
window.scroot = scroot;
@ -102,9 +113,11 @@ function create_log_table(obj_json) {
var chansplit = obj_json.channel.split('.');
var parsedmess = obj_json.data.split(';');
if (parsedmess[0] == "Global"){
var paste_processed = parsedmess[4].split(" ")[2];
window.paste_num_tabvar = paste_processed;
time_since_last_pastes_num = new Date().getTime();
return;
}
@ -219,6 +232,11 @@ function create_queue_table() {
}
$(document).ready(function () {
if (typeof glob_tabvar == "undefined")
location.reload();
if (typeof glob_tabvar.row1 == "undefined")
location.reload();
var data = [];
var data2 = [];
var tmp_tab = [];

View file

@ -0,0 +1,317 @@
/* Already defined variable (Before the input)
*
* var chart_1_num_day = 5;
* var chart_2_num_day = 15;
*
*/
/* VARIABLES */
var pie_threshold = 0.05
var options = {
series: { pie: { show: true,
radius: 3/5,
combine: {
color: '#999',
threshold: pie_threshold
},
label: {
show: true,
radius: 1,
formatter: labelFormatter,
background: {
opacity: 0.5,
color: '#000'
}
}
}
},
grid: { hoverable: true, clickable: true },
legend: { show: false },
};
/* Linked graph - remember the data */
var plot_data_old = []
var plot_old = []
/* FUNCTIONS */
function labelFormatter(label, series) {
return "<div style='font-size:8pt; text-align:center; padding:2px; color:white;'>"
+ label + "<br/>" + Math.round(series.percent) + "%</div>";
}
/* Plot, and bind chart listener */
function plot_top_graph(module_name, init){
/**** Pie Chart ****/
// moduleCharts is used the decide the url to request data
var moduleCharts = "size" == module_name ? "providersChart" : ("num" == module_name ? "providersChart" : "moduleCharts");
var tot_sum = 0; // used to detect elements placed in 'Other' pie's part
var data_other = []; // used to detect elements placed in 'Other' pie's part
var createPie = $.getJSON($SCRIPT_ROOT+"/_"+moduleCharts+"?moduleName="+module_name+"&num_day="+chart_1_num_day,
function(data) {
var temp_data_pie = [];
for(i=0; i<data.length; i++){
if (i==0 && data[0][0] == "passed_days"){ // If there is no data today, take it from the past
if (data[0][1] > 0 && data[0][1] < 7){ // If data is [1:6] day(s) old, put the panel in yellow
$("#day-"+module_name).text(data[0][1] + " Day(s) ago ");
$("#panel-"+module_name).removeClass("panel-green");
$("#panel-"+module_name).addClass("panel-yellow");
} else if (data[0][1] > 6) { // data old of more than 7 days, put the panel in red
$("#day-"+module_name).text(data[0][1] + " Day(s) ago ");
$("#panel-"+module_name).removeClass("panel-green");
$("#panel-"+module_name).addClass("panel-red");
}
} else {
temp_data_pie.push({label: data[i][0], data: data[i][1]});
tot_sum += data[i][1];
}
}
for(i=0; i<temp_data_pie.length; i++){ // Detect element below a certain threshold
if (parseInt(temp_data_pie[i].data) / tot_sum < pie_threshold)
data_other.push(temp_data_pie[i].label);
}
$.plot($("#flot-pie-chart-"+module_name), temp_data_pie, options);
if (init){ //prevent multiple binding due to the refresh function
$("#flot-pie-chart-"+module_name).bind("plotclick", function (event, pos, item) {
if (item == null)
return;
var clicked_label = item.series.label;
if (module_name == "size"){ // if Provider pie chart clicked, draw the two bar charts
update_bar_chart(moduleCharts, module_name, "#flot-bar-chart-"+module_name, clicked_label,
item.series.color, "%m/%d", false);
update_bar_chart(moduleCharts, "num", "#flot-bar-chart-"+"num", clicked_label,
item.series.color, "%m/%d", true);
}
else if (module_name == "num"){
update_bar_chart(moduleCharts, module_name, "#flot-bar-chart-"+module_name, clicked_label,
item.series.color, "%m/%d", false);
update_bar_chart(moduleCharts, "size", "#flot-bar-chart-"+"size", clicked_label,
item.series.color, "%m/%d", true);
} else {
update_bar_chart(moduleCharts, module_name, "#flot-bar-chart-"+module_name, clicked_label,
item.series.color, "%m/%d", true);
}
});
}
});
/**** Bar Chart ****/
function update_bar_chart(chartUrl, module_name, chartID, involved_item, serie_color, timeformat, can_bind){
var num_day = chart_1_num_day;
var barOptions = {
series: {
bars: { show: true, barWidth: 82800000 },
lines: { show: false, fill: true }
},
xaxis: {
mode: "time",
timeformat: timeformat,
tickSize: [1, 'day'],
minTickSize: [1, "day"]
},
grid: { hoverable: true },
legend: { show: true,
noColumns: 0,
position: "nw"
},
};
var plot; // will contain the current plotting object
/* If part 'Other' has been clicked */
if (involved_item == "Other"){
var all_other_temp_data = []; // the data_bar of all series
var temp_data_bar; //the data_bar associated with one serie
var promises = []; // Use to plot when everything has been received
var involved_item
for(i=0; i<data_other.length; i++){ // Get data for elements summed up in the part 'Other'
involved_item = data_other[i];
var request = $.getJSON($SCRIPT_ROOT+"/_"+chartUrl+"?keywordName="+involved_item+"&moduleName="+module_name+"&bar=true"+"&days="+num_day,
function(data) {
temp_data_bar = []
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
var offset = (data_other.length/2 - data_other.indexOf(data[0]))*10000000
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime() + offset, data[i][1].toFixed(2)]);
}
// Insert temp_data_bar in order so that color and alignement correspond for the provider graphs
all_other_temp_data.splice(data_other.indexOf(data[0]), 0, [ data[0], temp_data_bar, data_other.indexOf(data[0])]);
}
)
promises.push(request);
}
/* When everything has been received, start the plotting process */
$.when.apply($, promises).done( function (arg) {
var dataBar = []
for(i=0; i<all_other_temp_data.length; i++) //format data for the plot
dataBar.push({bars: { barWidth: 8280000, order: all_other_temp_data[i][2] }, label: all_other_temp_data[i][0], data: all_other_temp_data[i][1]});
plot = $.plot($(chartID), dataBar, {
series: {
stack: false,
lines: { show: false, fill: true, steps: false },
bars: { show: true},
},
xaxis: {
mode: "time",
timeformat: timeformat,
tickSize: [1, 'day'],
minTickSize: [1, "day"]
},
yaxis: {
//transform: function (v) { return v < 1 ? v : Math.log(v); }
},
grid: { hoverable: true },
legend: { show: true,
noColumns: 1,
position: "nw"
},
tooltip: true,
tooltipOpts: { content: "x: %x, y: %y" },
colors: ["#72a555", "#ab62c0", "#c57c3c", "#638ccc", "#ca5670"]
})
/* rememeber the data for the two provider graph */
if (chartUrl == "providersChart"){
if (plot_data_old.length>1){ // avoid adding plot_data for previous clicked pie part
plot_data_old = [];
plot_old = [];
}
plot_data_old.push(plot.getData());
plot_old.push(plot);
}
if (can_bind){ // avoid binding two listener for provider graphs
binder(module_name);
if (module_name == "size") // bind the linked provider graph
binder("num");
else if (module_name == "num")
binder("size");
}
});
} else { // Normal pie's part clicked
$.getJSON($SCRIPT_ROOT+"/_"+chartUrl+"?keywordName="+involved_item+"&moduleName="+module_name+"&bar=true"+"&days="+num_day,
function(data) {
var temp_data_bar = []
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime(), data[i][1].toFixed(2)]);
}
var barData = {
label: involved_item,
data: temp_data_bar,
color: serie_color
};
plot = $.plot($(chartID), [barData], barOptions);
/* rememeber the data for the two graph */
if ((module_name == "size") || (module_name == "num")) { // Add only for the provider graph
if (plot_data_old.length>1){ // avoid adding plot_data for previous clicked pie part
plot_data_old = [];
plot_old = [];
}
plot_data_old.push(plot.getData());
plot_old.push(plot);
}
if (can_bind){ // avoid binding two listener for provider graphs
$("#flot-bar-chart-"+module_name).unbind( "plothover.customHandler" );
binder(module_name);
if (module_name == "size"){ // bind the linked provider graph
$("#flot-bar-chart-"+"num").unbind( "plothover.customHandler" );
binder("num");
}
else if (module_name == "num"){
$("#flot-bar-chart-"+"size").unbind( "plothover.customHandler" );
binder("size");
}
}
});
}
}; // end update_bar_chart
} // end plot_top_graph
/* Bind a listener to the graph to display the value under the cursor in the approriate div */
function binder(module_name){
$("#flot-bar-chart-"+module_name).bind("plothover.customHandler", function (event, pos, item) {
if (item) { // a correct item is hovered
var x = item.datapoint[0]
var y = item.datapoint[1]
var date = new Date(parseInt(x));
var formated_date = date.getMonth()+'/'+date.getDate();
var color = item.series.color;
var color_opac = "rgba" + color.slice(3, color.length-1)+",0.15)";
// display the hovered value in the chart div
$("#tooltip_graph-"+module_name).html(item.series.label + " of " + formated_date + " = <b>" + y+"</b>")
.css({padding: "2px", width: 'auto', 'background': color_opac , 'border': "3px solid "+color})
.fadeIn(200);
/* If provider bar chart hovered, highlight and display associated value */
if (module_name == "size" || module_name == "num"){
new_module_name = module_name == "size" ? "num" : "size";
/* Used to get the corresponding associated value for providers charts */
var plot_obj = plot_data_old[0]; //contain series
for(serie=0; serie<plot_obj.length; serie++){ //for all series
var data_other = plot_obj[serie].data;
for(i=0; i<data_other.length; i++){ //for all datapoints
if (data_other[i][0] == date.getTime()){
if(y == data_other[i][1]){ // get the correct data and plot object
var other_graph_plot = plot_old[1];
var curr_data_other = plot_data_old[1][serie].data[i][1];
} else {
var other_graph_plot = plot_old[0];
var curr_data_other = data_other[i][1];
}
var datapoint = i;
var the_serie = serie;
}
}
}
$("#tooltip_graph-"+new_module_name).html(item.series.label + " of " + formated_date + " = <b>" + curr_data_other+"</b>")
.css({padding: "2px", width: 'auto', 'background': color_opac, 'border': "3px solid "+color})
.fadeIn(200);
// clean up other highlighted bar
for(i=0; i<data_other.length; i++)
for(s=0; s<plot_obj.length; s++)
other_graph_plot.unhighlight(s, i);
other_graph_plot.highlight(the_serie, datapoint);
}
} else {
// No correct item hovered, clean up the highlighted one
for(i=0; i<plot_old.length; i++)
for(j=0; j<plot_data_old[0][0].data.length; j++)
plot_old[i].unhighlight(0, j);
}
});
}

View file

@ -112,19 +112,21 @@ function Graph(id_pannel, path, header_size){
// display the top headers
function setVis(max_display){
headings = this.graph.getLabels();
var headings = this.graph.getLabels();
headings.splice(0,1);
var sorted_list = new Array();
today = new Date().getDate()-1; // Take the top from yesterday so that we can see the current evolution
var today = new Date().getDate();
//today = new Date().getDate()-1; // Could take the top from yesterday so that we can see the current evolution
for( i=0; i<headings.length; i++){
the_heading = headings[i];
//console.log('heading='+the_heading+' tab['+(today-1)+']['+(parseInt(i)+1)+']='+g.getValue(today-1, parseInt(i)+1));
var the_heading = headings[i];
//console.log('heading='+the_heading+' tab['+(today)+']['+(parseInt(i)+1)+']='+this.graph.getValue(today-1, parseInt(i)+1));
sorted_list.push({dom: the_heading, val: this.graph.getValue(today-1, parseInt(i)+1), index: parseInt(i)});
}
sorted_list.sort(function(a,b) {
return b.val - a.val;
});
var display_list = sorted_list.slice(0, max_display);
for( i=0; i<display_list.length; i++){
this.graph.setVisibility(display_list[i].index, true);

View file

@ -0,0 +1,211 @@
/* Already defined variable (Before the input)
*
* var chart_1_num_day = 5;
* var chart_2_num_day = 15;
*
*/
function plot_top_graph(trendingName, init){
/**** Flot Pie Chart ****/
var tot_sum = 0; // used to detect elements placed in 'Other' pie's part
var data_other = []; // used to detect elements placed in 'Other' pie's part
var pie_threshold = 0.05
var options = {
series: {
pie: {
show: true,
radius: 3/5,
combine: {
color: '#999',
threshold: pie_threshold
},
label: {
show: true,
radius: 1,
formatter: labelFormatter,
background: {
opacity: 0.5,
color: '#000'
}
}
}
},
grid: { hoverable: true, clickable: true },
legend: { show: false }
};
function labelFormatter(label, series) {
return "<div style='font-size:8pt; text-align:center; padding:2px; color:white;'>"
+ label + "<br/>" + Math.round(series.percent) + "%</div>";
}
// Graph1
$.getJSON($SCRIPT_ROOT+"/_progressionCharts?trendingName="+trendingName+"&num_day="+chart_1_num_day,
function(data) {
temp_data_pie = [];
for(i=0; i<data.length; i++){
temp_data_pie.push({label: data[i][0], data: data[i][1]});
tot_sum += data[i][1];
}
for(i=0; i<temp_data_pie.length; i++){ // Detect element below a certain threshold
if (parseInt(temp_data_pie[i].data) / tot_sum < pie_threshold)
data_other.push(temp_data_pie[i].label);
}
$.plot($("#flot-pie-chart1-"+trendingName), temp_data_pie, options);
if (init){ //prevent multiple binding due to the refresh function
setTimeout(function() {
$("#flot-pie-chart1-"+trendingName).bind("plotclick", function (event, pos, item) {
if (item == null)
return;
var clicked_label = item.series.label;
update_bar_chart("#flot-bar-chart1-"+trendingName, clicked_label, item.series.color, chart_1_num_day, "%m/%d");
update_bar_chart("#flot-bar-chart2-"+trendingName, clicked_label, item.series.color, chart_2_num_day);
});
}, 500);
}
});
// flot bar char
function update_bar_chart(chartID, involved_item, serie_color, num_day, timeformat, can_bind){
var barOptions = {
series: {
bars: { show: true, barWidth: 82800000 }
},
xaxis: {
mode: "time",
timeformat: timeformat,
tickSize: [1, 'day'],
minTickSize: [1, "day"]
},
grid: { hoverable: true },
legend: { show: true },
tooltip: true,
tooltipOpts: { content: "x: %x, y: %y" }
};
if (involved_item == "Other"){
var all_other_temp_data = []; // the data_bar of all series
var temp_data_bar; //the data_bar associated with one serie
var promises = []; // Use to plot when everything has been received
var involved_item
for(i=0; i<data_other.length; i++){ // Get data for elements summed up in the part 'Other'
involved_item = data_other[i];
var request = $.getJSON($SCRIPT_ROOT+"/_progressionCharts?attributeName="+involved_item+"&bar=true"+"&days="+num_day,
function(data) {
temp_data_bar = []
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
var offset = (data_other.length/2 - data_other.indexOf(data[0]))*10000000
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime() + offset, data[i][1].toFixed(2)]);
//console.log(new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime() + offset);
}
// Insert temp_data_bar in order so that color and alignement correspond for the provider graphs
all_other_temp_data.splice(data_other.indexOf(data[0]), 0, [ data[0], temp_data_bar, data_other.indexOf(data[0])]);
}
)
promises.push(request);
}
/* When everything has been received, start the plotting process */
$.when.apply($, promises).done( function (arg) {
var dataBar = []
for(i=0; i<all_other_temp_data.length; i++) //format data for the plot
dataBar.push({bars: { barWidth: 8280000, order: all_other_temp_data[i][2] }, label: all_other_temp_data[i][0], data: all_other_temp_data[i][1]});
$.plot($(chartID), dataBar, {
series: {
stack: false,
lines: { show: false, fill: true, steps: false },
bars: { show: true},
},
xaxis: {
mode: "time",
timeformat: timeformat,
tickSize: [1, 'day'],
minTickSize: [1, "day"]
},
yaxis: {
//transform: function (v) { return v < 1 ? v : Math.log(v); }
},
grid: { hoverable: true },
legend: { show: true,
noColumns: 1,
position: "nw"
},
tooltip: true,
tooltipOpts: { content: "x: %x, y: %y" },
colors: ["#72a555", "#ab62c0", "#c57c3c", "#638ccc", "#ca5670"]
})
});
} else {
$.getJSON($SCRIPT_ROOT+"/_progressionCharts?attributeName="+involved_item+"&bar=true"+"&days="+num_day,
function(data) {
var temp_data_bar = []
for(i=1; i<data.length; i++){
var curr_date = data[i][0].split('/');
temp_data_bar.push([new Date(curr_date[0], curr_date[1]-1, curr_date[2]).getTime(), data[i][1].toFixed(2)]);
}
var barData = {
label: involved_item,
data: temp_data_bar,
color: serie_color
};
$.plot($(chartID), [barData], barOptions);
});
}// end else
};
};
// Bar chart hover binder for the 2 graphs
function binder(module_name){
$("#flot-bar-chart1-"+module_name).bind("plothover.customHandler", function (event, pos, item) {
if (item) { // a correct item is hovered
var x = item.datapoint[0]
var y = item.datapoint[1]
var date = new Date(parseInt(x));
var formated_date = date.getMonth()+'/'+date.getDate();
var color = item.series.color;
var color_opac = "rgba" + color.slice(3, color.length-1)+",0.15)";
// display the hovered value in the chart div
$("#tooltip_graph1-"+module_name).html(item.series.label + " of " + formated_date + " = <b>" + y+"</b>")
.css({padding: "2px", width: 'auto', 'background': color_opac , 'border': "3px solid "+color})
.fadeIn(200);
}
});
$("#flot-bar-chart2-"+module_name).bind("plothover.customHandler", function (event, pos, item) {
if (item) { // a correct item is hovered
var x = item.datapoint[0]
var y = item.datapoint[1]
var date = new Date(parseInt(x));
var formated_date = date.getMonth()+'/'+date.getDate();
var color = item.series.color;
var color_opac = "rgba" + color.slice(3, color.length-1)+",0.15)";
// display the hovered value in the chart div
$("#tooltip_graph2-"+module_name).html(item.series.label + " of " + formated_date + " = <b>" + y+"</b>")
.css({padding: "2px", width: 'auto', 'background': color_opac , 'border': "3px solid "+color})
.fadeIn(200);
}
});
}

View file

@ -0,0 +1,91 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Analysis Information Leak framework Dashboard</title>
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dygraph_gallery.css') }}" rel="stylesheet" type="text/css" />
<!-- JS -->
<script type="text/javascript" src="{{ url_for('static', filename='js/dygraph-combined.js') }}"></script>
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.flot.pie.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script>
</head>
<body>
<div id="wrapper">
<nav class="navbar navbar-default navbar-static-top" role="navigation" style="margin-bottom: 0">
<div class="navbar-header">
<ul class="nav navbar-nav">
<li><a href="{{ url_for('index') }}"><i class="fa fa-dashboard fa-fw"></i> Dashboard</a></li>
<li><a href="{{ url_for('trending') }}"><i class="glyphicon glyphicon-stats"></i> Trending charts</a></li>
<li class="active"><a href="{{ url_for('moduletrending') }}"><i class="glyphicon glyphicon-stats"></i> Modules statistics</a></li>
<li><a href="{{ url_for('browseImportantPaste') }}"><i class="fa fa-search-plus "></i> Browse important pastes</a></li>
</ul>
</div>
<!-- /.navbar-top-links -->
<div class="navbar-default sidebar" role="navigation">
<div class="sidebar-collapse">
<ul class="nav" id="side-menu">
<li class="sidebar-search">
{% include 'searchbox.html' %}
</li>
</ul>
<!-- /#side-menu -->
</div>
<!-- /.sidebar-collapse -->
<a href="{{ url_for('index') }}"><img src="{{ url_for('static', filename='image/AIL.png') }}" /></a>
</div>
<!-- /.navbar-static-side -->
</nav>
<div id="page-wrapper">
</br>
{% include 'trending_graphs/Moduletrending.html' %}
</div>
<!-- /#page-wrapper -->
<script>
var chart_1_num_day = 5;
var chart_2_num_day = 15;
$SCRIPT_ROOT = {{ request.script_root|tojson|safe }};
</script>
<script src="{{ url_for('static', filename='js/moduleTrending.js') }}"></script>
<script>
$(document).ready(function(){
$("[align]").css({padding: "2px", width: 'auto', 'background': "rgba(102, 102, 102, 0.15)" , 'border': "3px solid rgb(102, 102, 102)"})
refreshPlot(true);
});
function refreshPlot(init){
refreshAnimation();
plot_top_graph("credential", init);
plot_top_graph("mail", init);
plot_top_graph("size", init);
plot_top_graph("num", init);
setTimeout(function(){ refreshPlot(false); }, 10000);
}
function refreshAnimation(){
$("[flash]").css('color', '#fece00');
setTimeout(function() { $("[flash]").css('color', 'black'); }, 1000);
}
</script>
</div>
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
</body>
</html>

View file

@ -5,6 +5,10 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="Cache-Control" content="no-cache, no-store, must-revalidate" />
<meta http-equiv="Pragma" content="no-cache" />
<meta http-equiv="Expires" content="0" />
<title>Analysis Information Leak framework Dashboard</title>
<!-- Core CSS -->
@ -15,7 +19,10 @@
<!-- JS -->
<script type="text/javascript" src="{{ url_for('static', filename='js/dygraph-combined.js') }}"></script>
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script>
<script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.flot.pie.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.flot.time.js') }}"></script>
<script>
var default_display = {{ default_display }};
var current_displayed_graph;
</script>
@ -29,6 +36,8 @@
<ul class="nav navbar-nav">
<li><a href="{{ url_for('index') }}"><i class="fa fa-dashboard fa-fw"></i> Dashboard</a></li>
<li class="active"><a href="{{ url_for('trending') }}"><i class="glyphicon glyphicon-stats"></i> Trending charts</a></li>
<li><a href="{{ url_for('moduletrending') }}"><i class="glyphicon glyphicon-stats"></i> Modules statistics</a></li>
<li><a href="{{ url_for('browseImportantPaste') }}"><i class="fa fa-search-plus "></i> Browse important pastes</a></li>
</ul>
</div>
<!-- /.navbar-top-links -->
@ -58,26 +67,34 @@
<!-- /.nav-tabs -->
<ul class="nav nav-tabs">
<li class="active"><a data-toggle="tab" href="#tld-tab" data-pannel="TldTrending" data-path="../static//csv/tldstrendingdata.csv">Top level domains</a></li>
<li><a data-toggle="tab" href="#domain-tab" data-pannel="DomainTrending" data-path="../static//csv/domainstrendingdata.csv">Domains</a></li>
<li class="active"><a data-toggle="tab" href="#tld-tab" data-attribute-name="tld" data-pannel="TldTrending" data-path="../static//csv/tldstrendingdata.csv">Top level domains</a></li>
<li><a data-toggle="tab" href="#domain-tab" data-attribute-name="domain" data-pannel="DomainTrending" data-path="../static//csv/domainstrendingdata.csv">Domains</a></li>
<li><a data-toggle="tab" href="#protocol-tab" data-attribute-name="scheme" data-pannel="ProtocolTrending" data-path="../static//csv/protocolstrendingdata.csv">Protocols</a></li>
<li><a data-toggle="tab" href="#words-tab" data-pannel="WordTrending" data-path="../static//csv/wordstrendingdata.csv">Words</a></li>
<li><a data-toggle="tab" href="#protocol-tab" data-pannel="ProtocolTrending" data-path="../static//csv/protocolstrendingdata.csv">Protocols</a></li>
</ul>
</br>
<script>
var chart_1_num_day = 5;
var chart_2_num_day = 15;
$SCRIPT_ROOT = {{ request.script_root|tojson|safe }};
</script>
<div class="tab-content">
<script type="text/javascript" src="{{ url_for('static', filename='js/trendingchart.js')}}"></script>
<div class="tab-content">
<div class="col-lg-12 tab-pane fade in active" id="tld-tab" >
{% include 'trending_graphs/Tldstrending.html' %}
</div>
<div class="col-lg-12 tab-pane fade" id="domain-tab">
{% include 'trending_graphs/Domainstrending.html' %}
</div>
<div class="col-lg-12 tab-pane fade" id="words-tab">
{% include 'trending_graphs/Wordstrending.html' %}
</div>
<div class="col-lg-12 tab-pane fade" id="protocol-tab">
{% include 'trending_graphs/Protocolstrending.html' %}
</div>
<div class="col-lg-12 tab-pane fade" id="words-tab">
{% include 'trending_graphs/Wordstrending.html' %}
</div>
</div> <!-- tab-content -->
<!-- /.row -->
</div>
@ -86,6 +103,25 @@
<!-- import graph function -->
<script src="{{ url_for('static', filename='js/plot-graph.js') }}"></script>
<script type="text/javascript">
var refresh_interval = 1000*60*2; //number of miliseconds between each call
var launched_refresher = []; //Avoid launching mutliple refresher
var active_tab_name = "tld"; //Avoid a redraw of the graph is the tab is not active
function refresh_top_chart(attr_name, immediate){
if (immediate){
plot_top_graph(attr_name, true);
binder(active_tab_name);
}
setTimeout(function() {
$("[flash-"+attr_name+"]").css('color', '#fece00');
setTimeout(function() { $("[flash-"+attr_name+"]").css('color', 'black'); }, 1000);
refresh_top_chart(attr_name, false);
if (active_tab_name == attr_name)
plot_top_graph(attr_name, false);
}, refresh_interval);
}
</script>
<!-- instanciate and plot graphs -->
<script type="text/javascript">
// Create, plot and set the limit of displayed headers
@ -102,14 +138,24 @@
// When a pannel is shown, create_and_plot.
$('.nav-tabs a').on('shown.bs.tab', function(event){
create_and_plot($(event.target).attr('data-pannel'), $(event.target).attr('data-path'));
create_and_plot($(event.target).attr('data-pannel'), $(event.target).attr('data-path'));
active_tab_name = $(event.target).attr('data-attribute-name')
//Top progression chart
if(launched_refresher.indexOf($(event.target).attr('data-attribute-name')) == -1){
launched_refresher.push($(event.target).attr('data-attribute-name'));
refresh_top_chart($(event.target).attr('data-attribute-name'), true);
}
});
</script>
<script>
$(document).ready(function(){
$("[align]").css({padding: "2px", width: 'auto', 'background': "rgba(102, 102, 102, 0.15)" , 'border': "3px solid rgb(102, 102, 102)"})
// Create the graph when the page has just loaded
create_and_plot("TldTrending", '../static//csv/tldstrendingdata.csv')
create_and_plot("TldTrending", '../static//csv/tldstrendingdata.csv')
//Top progression chart
refresh_top_chart("tld", true);
});
// Used when we modify the number of displayed curves

View file

@ -0,0 +1,171 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Analysis Information Leak framework Dashboard</title>
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/sb-admin-2.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.css') }}" rel="stylesheet" type="text/css" />
<script language="javascript" src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.js') }}"></script>
<style>
.tooltip-inner {
text-align: left;
height: 200%;
width: 200%;
max-width: 500px;
max-height: 500px;
font-size: 13px;
}
xmp {
white-space:pre-wrap;
word-wrap:break-word;
}
.modal-backdrop.fade {
opacity: 0;
}
</style>
</head>
<body>
<div id="wrapper">
<nav class="navbar navbar-default navbar-static-top" role="navigation" style="margin-bottom: 0">
<div class="navbar-header">
<ul class="nav navbar-nav">
<li><a href="{{ url_for('index') }}"><i class="fa fa-dashboard fa-fw"></i> Dashboard</a></li>
<li><a href="{{ url_for('trending') }}"><i class="glyphicon glyphicon-stats"></i> Trending charts</a></li>
<li><a href="{{ url_for('moduletrending') }}"><i class="glyphicon glyphicon-stats"></i> Modules statistics</a></li>
<li class="active"><a href="{{ url_for('browseImportantPaste') }}"><i class="fa fa-search-plus "></i> Browse important pastes</a></li>
</ul>
</div>
<!-- /.navbar-top-links -->
<div class="navbar-default sidebar" role="navigation">
<div class="sidebar-collapse">
<ul class="nav" id="side-menu">
<li class="sidebar-search">
{% include 'searchbox.html' %}
</li>
</ul>
<!-- /#side-menu -->
</div>
<!-- /.sidebar-collapse -->
<a href="{{ url_for('index') }}"><img src="{{ url_for('static', filename='image/AIL.png') }}" /></a>
</div>
<!-- /.navbar-static-side -->
</nav>
<!-- Modal -->
<div id="mymodal" class="modal fade" role="dialog">
<div class="modal-dialog modal-lg">
<!-- Modal content-->
<div id="mymodalcontent" class="modal-content">
<div id="mymodalbody" class="modal-body" max-width="850px">
<p>Loading paste information...</p>
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" height="26" width="26" style="margin: 4px;">
</div>
<div class="modal-footer">
<a id="button_show_path" target="_blank" href=""><button type="button" class="btn btn-info">Show saved paste</button></a>
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<div id="page-wrapper">
<div class="row">
<div class="col-lg-12">
<h1 class="page-header">Browse important pastes</h1>
</div>
<!-- /.col-lg-12 -->
</div>
<!-- /.row -->
<div class="row">
<!-- /.nav-tabs -->
<ul class="nav nav-tabs">
<li name='nav-pan' class="active"><a data-toggle="tab" href="#credential-tab" data-attribute-name="credential" data-panel="credential-panel">Credentials</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#creditcard-tab" data-attribute-name="creditcard" data-panel="creditcard-panel">Credit cards</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#sqlinjection-tab" data-attribute-name="sqlinjection" data-panel="sqlinjection-panel">SQL injections</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#cve-tab" data-attribute-name="cve" data-panel="cve-panel">CVEs</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#keys-tab" data-attribute-name="keys" data-panel="keys-panel">Keys</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#mail-tab" data-attribute-name="mail" data-panel="mail-panel">Mails</a></li>
<li name='nav-pan'><a data-toggle="tab" href="#phone-tab" data-attribute-name="phone" data-panel="phone-panel">Phones</a></li>
</ul>
</br>
<div class="tab-content">
<div class="col-lg-12 tab-pane fade in active" id="credential-tab" >
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="creditcard-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="sqlinjection-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="cve-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="keys-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="mail-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
<div class="col-lg-12 tab-pane fade" id="phone-tab">
<img id="loading-gif-modal" src="{{url_for('static', filename='image/loading.gif') }}" style="margin: 4px;">
</div>
</div> <!-- tab-content -->
<!-- /.row -->
</div>
<!-- /#page-wrapper -->
<!-- import graph function -->
<script>
$(document).ready(function(){
var dataPath = 'credential';
$.get("{{ url_for('importantPasteByModule') }}"+"?moduleName="+dataPath, function(data, status){
$('#'+dataPath+'-tab').html(data);
});
});
</script>
<script>
// When a pannel is shown, create the data-table.
var previous_tab = $('[data-attribute-name="credential');
var loading_gif = "<img id='loading-gif-modal' class='img-center' src=\"{{url_for('static', filename='image/loading.gif') }}\" height='26' width='26' style='margin: 4px;'>";
$('.nav-tabs a').on('shown.bs.tab', function(event){
var dataPath = $(event.target).attr('data-attribute-name');
$.get("{{ url_for('importantPasteByModule') }}"+"?moduleName="+dataPath, function(data, status){
var currentTab = $('[name].active').children();
$('#'+previous_tab.attr('data-attribute-name')+'-tab').html(loading_gif);
currentTab.removeClass( "active" );
$('#'+dataPath+'-tab').html(data);
$(event.target).parent().addClass( "active" );
previous_tab = currentTab;
});
});
</script>
</div>
</body>
</html>

View file

@ -0,0 +1,150 @@
<table class="table table-striped table-bordered table-hover" id="myTable">
<thead>
<tr>
<th>#</th>
<th style="max-width: 800px;">Path</th>
<th>Date</th>
<th># of lines</th>
<th>Action</th>
</tr>
</thead>
<tbody>
{% set i = 0 %}
{% for path in all_path %}
<tr>
<td> {{ i + 1 }}</td>
<td><a target="_blank" href="{{ url_for('showsavedpaste') }}?paste={{path}}&num={{i+1}}">{{ path }}</a></td>
<td>{{ paste_date[i] }}</td>
<td>{{ paste_linenum[i] }}</td>
<td><p><span class="glyphicon glyphicon-info-sign" data-toggle="tooltip" data-placement="left" title="{{ content[i] }} "></span> <button type="button" class="btn-link" data-num="{{ i + 1 }}" data-toggle="modal" data-target="#mymodal" data-url="{{ url_for('showsavedpaste') }}?paste={{ path }}&num={{ i+1 }}" data-path="{{ path }}"><span class="fa fa-search-plus"></span></button></p></td>
</tr>
{% set i = i + 1 %}
{% endfor %}
</tbody>
</table>
</br>
</br>
<script>
$(document).ready(function(){
$('[data-toggle="tooltip"]').tooltip();
$('#myTable').dataTable();
});
</script>
<!-- Dynamically update the modal -->
<script type="text/javascript">
// static data
var alert_message = '<div class="alert alert-info alert-dismissable"><button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button><strong>No more data.</strong> Full paste displayed.</div>';
var complete_paste = null;
var char_to_display = {{ char_to_display }};
var start_index = 0;
// On click, get html content from url and update the corresponding modal
$("[data-toggle='modal']").on("click.openmodal", function (event) {
event.preventDefault();
var modal=$(this);
var url = " {{ url_for('showpreviewpaste') }}?paste=" + $(this).attr('data-path') + "&num=" + $(this).attr('data-num');
$.get(url, function (data) {
$("#mymodalbody").html(data);
var button = $('<button type="button" id="load-more-button" class="btn btn-info btn-xs center-block" data-url="' + $(modal).attr('data-path') +'" data-toggle="tooltip" data-placement="bottom" title="Load more content"><span class="glyphicon glyphicon-download"></span></button>');
button.tooltip();
$("#mymodalbody").children(".panel-default").append(button);
$("#button_show_path").attr('href', $(modal).attr('data-url'));
$("#button_show_path").show('fast');
$("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF
if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed
nothing_to_display();
}
// On click, donwload all paste's content
$("#load-more-button").on("click", function (event) {
if (complete_paste == null) { //Donwload only once
$.get("{{ url_for('getmoredata') }}"+"?paste="+$(modal).attr('data-path'), function(data, status){
complete_paste = data;
update_preview();
});
} else {
update_preview();
}
});
});
});
// When the modal goes out, refresh it to normal content
$("#mymodal").on('hidden.bs.modal', function () {
$("#mymodalbody").html("<p>Loading paste information...</p>");
var loading_gif = "<img id='loading-gif-modal' class='img-center' src=\"{{url_for('static', filename='image/loading.gif') }}\" height='26' width='26' style='margin: 4px;'>";
$("#mymodalbody").append(loading_gif); // Show the loading GIF
$("#button_show_path").attr('href', '');
$("#button_show_path").hide();
complete_paste = null;
start_index = 0;
});
// Update the paste preview in the modal
function update_preview() {
if (start_index + char_to_display > complete_paste.length-1){ // end of paste reached
var final_index = complete_paste.length-1;
var flag_stop = true;
} else {
var final_index = start_index + char_to_display;
}
if (final_index != start_index){ // still have data to display
$("#mymodalbody").find("#paste-holder").text($("#mymodalbody").find("#paste-holder").text()+complete_paste.substring(start_index+1, final_index+1)); // Append the new content
start_index = final_index;
if (flag_stop)
nothing_to_display();
} else {
nothing_to_display();
}
}
// Update the modal when there is no more data
function nothing_to_display() {
var new_content = $(alert_message).hide();
$("#mymodalbody").find("#panel-body").append(new_content);
new_content.show('fast');
$("#load-more-button").hide();
}
// Use to bind the button with the new displayed data
// (The bind do not happens if the dataTable is in tabs and the clicked data is in another page)
$('#myTable').on( 'draw.dt', function () {
// On click, get html content from url and update the corresponding modal
$("[data-toggle='modal']").unbind('click.openmodal').on("click.openmodal", function (event) {
console.log('hi');
event.preventDefault();
var modal=$(this);
var url = " {{ url_for('showpreviewpaste') }}?paste=" + $(this).attr('data-path') + "&num=" + $(this).attr('data-num');
$.get(url, function (data) {
$("#mymodalbody").html(data);
var button = $('<button type="button" id="load-more-button" class="btn btn-info btn-xs center-block" data-url="' + $(modal).attr('data-path') +'" data-toggle="tooltip" data-placement="bottom" title="Load more content"><span class="glyphicon glyphicon-download"></span></button>');
button.tooltip();
$("#mymodalbody").children(".panel-default").append(button);
$("#button_show_path").attr('href', $(modal).attr('data-url'));
$("#button_show_path").show('fast');
$("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF
if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed
nothing_to_display();
}
// On click, donwload all paste's content
$("#load-more-button").on("click", function (event) {
if (complete_paste == null) { //Donwload only once
$.get("{{ url_for('getmoredata') }}"+"?paste="+$(modal).attr('data-path'), function(data, status){
complete_paste = data;
update_preview();
});
} else {
update_preview();
}
});
});
});
} );
</script>

View file

@ -18,6 +18,7 @@
<script src="{{ url_for('static', filename='js/jquery.flot.js') }}"></script>
<script>
window.default_minute = {{ default_minute }};
window.glob_tabvar = []; // Avoid undefined
function update_values() {
$SCRIPT_ROOT = {{ request.script_root|tojson|safe }};
$.getJSON($SCRIPT_ROOT+"/_stuff",
@ -37,6 +38,8 @@
<ul class="nav navbar-nav">
<li class="active"><a href="{{ url_for('index') }}"><i class="fa fa-dashboard fa-fw"></i> Dashboard</a></li>
<li><a href="{{ url_for('trending') }}"><i class="glyphicon glyphicon-stats"></i> Trending charts</a></li>
<li><a href="{{ url_for('moduletrending') }}"><i class="glyphicon glyphicon-stats"></i> Modules statistics</a></li>
<li><a href="{{ url_for('browseImportantPaste') }}"><i class="fa fa-search-plus "></i> Browse important pastes</a></li>
</ul>
</div>
<!-- /.navbar-top-links -->

View file

@ -43,17 +43,26 @@
</div>
<div class="panel-body" id="panel-body">
{% if duplicate_list|length == 0 %}
<h4> No Duplicate </h4>
<h3> No Duplicate </h3>
{% else %}
<h4> Duplicate list: </h4>
<h3> Duplicate list: </h3>
<table style="width:100%">
{% set i = 0 %}
<tr>
<th style="text-align:left;">Hash type</th><th style="text-align:left;">Paste info</th>
</tr>
{% for dup_path in duplicate_list %}
Similarity: {{ simil_list[i] }}% - <a target="_blank" href="{{ url_for('showsavedpaste') }}?paste={{ dup_path }}" id='dup_path'>{{ dup_path }}</a></br>
<tr>
<td>{{ hashtype_list[i] }}</td>
<td>Similarity: {{ simil_list[i] }}%</td>
<td><a target="_blank" href="{{ url_for('showsavedpaste') }}?paste={{ dup_path }}" id='dup_path'>{{ dup_path }}</a></td>
</tr>
{% set i = i + 1 %}
{% endfor %}
</table>
{% endif %}
<h4> Content: </h4>
<p data-initsize="{{ initsize }}"> <xmp id="paste-holder">{{ content }}</xmp></p>
<h3> Content: </h3>
<p data-initsize="{{ initsize }}"> <pre id="paste-holder">{{ content }}</pre></p>
</div>
</div>

View file

@ -1,3 +1,48 @@
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i id="flash-domain" class="glyphicon glyphicon-flash " flash-domain=""></i> Top Progression for the last 5 days
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph1-domain' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-3" id="flot-pie-chart1-domain" style="height:250px; width:48%;"></div>
<div class="flot-chart-content col-lg-3" id="flot-bar-chart1-domain" style="height:250px; width:48%; margin: 5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i id="flash-domain" class="glyphicon glyphicon-flash " flash-domain=""></i> Top Progression for the last 15 days
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph2-domain' align="right">No bar hovered</div>
</div>
<div class="">
<div class="flot-chart-content col-lg-3" id="flot-bar-chart2-domain" style="height:250px; width:100%; margin:5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
</div>
<!-- /.panel -->
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<i class="fa fa-bar-chart-o fa-fw"></i> Top Domain Trending

View file

@ -0,0 +1,96 @@
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-12">
<div id="panel-credential" class="panel panel-green">
<div class="panel-heading">
<i id="flash-tld" class="fa fa-unlock" flash=""></i> <strong> Credential</strong> - most posted domain
<b id="day-credential" class="pull-right">Today</b>
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph-credential' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-6" id="flot-pie-chart-credential" style="height:250px; width:33%;"></div>
<div class="flot-chart-content col-lg-6" id="flot-bar-chart-credential" style="height:250px; width:66%; margin-top: 5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
</div>
<!-- /.panel -->
</div>
</div>
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-12">
<div id="panel-mail" class="panel panel-green">
<div class="panel-heading">
<i id="flash-mail" class="fa fa-envelope" flash=""></i><strong> Mail</strong> - most posted domain (max 1 per paste)
<b id="day-mail" class="pull-right">Today</b>
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph-mail' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-6" id="flot-pie-chart-mail" style="height:250px; width:33%;"></div>
<div class="flot-chart-content col-lg-6" id="flot-bar-chart-mail" style="height:250px; width:66%; margin-top: 5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
</div>
<!-- /.panel -->
</div>
</div>
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-12">
<div class="panel panel-green">
<div class="panel-heading">
<i id="flash-size" class="glyphicon glyphicon-transfer" flash=""></i><strong> Provider</strong>
<b class="pull-right">Today</b>
</div>
<div class="panel-body">
<div class="">
<div class="col-lg-12">
<h4 class="col-lg-3">Average paste size by provider </h4>
<div class="flot-chart-content pull-right" id='tooltip_graph-size' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-6" id="flot-pie-chart-size" style="height:250px; width:33%;"></div>
<div class="flot-chart-content col-lg-6" id="flot-bar-chart-size" style="height:250px; width:66%;"><div class="alert alert-info">Click on a part</div></div>
</div>
<div class="col-lg-12">
<h4 class="col-lg-3">Number of paste by provider </h4>
<div class="flot-chart-content pull-right" id='tooltip_graph-num' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-6" id="flot-pie-chart-num" style="height:250px; width:33%;"></div>
<div class="flot-chart-content col-lg-6" id="flot-bar-chart-num" style="height:250px; width:66%;"><div class="alert alert-info">Click on a part</div></div>
</div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
</div>
<!-- /.panel -->
</div>
</div>

View file

@ -1,3 +1,48 @@
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i id="flash-scheme" class="glyphicon glyphicon-flash " flash-scheme=""></i> Top Progression for the last 5 days
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph1-scheme' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-3" id="flot-pie-chart1-scheme" style="height:250px; width:48%;"></div>
<div class="flot-chart-content col-lg-3" id="flot-bar-chart1-scheme" style="height:250px; width:48%; margin:5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i id="flash-scheme" class="glyphicon glyphicon-flash " flash-scheme=""></i> Top Progression for the last 15 days
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph2-scheme' align="right">No bar hovered</div>
</div>
<div class="">
<div class="flot-chart-content col-lg-3" id="flot-bar-chart2-scheme" style="height:250px; width:100%; margin:5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
</div>
<!-- /.panel -->
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<i class="fa fa-bar-chart-o fa-fw"></i> Protocols Trend

View file

@ -1,3 +1,50 @@
<div class="row">
<div class="col-lg-12">
<div class="row">
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i id="flash-tld" class="glyphicon glyphicon-flash " flash-tld=""></i> Top Progression for the last 5 days
</div>
<div class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph1-tld' align="right">No bar hovered</div>
</div>
<div class="row">
<div class="flot-chart-content col-lg-3" id="flot-pie-chart1-tld" style="height:250px; width:48%;"></div>
<div class="flot-chart-content col-lg-3" id="flot-bar-chart1-tld" style="height:250px; width:48%; margin-top: 5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<div class="">
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
<!-- /.panel -->
<div class="col-lg-6">
<div class="panel panel-default">
<div class="panel-heading">
<i class="glyphicon glyphicon-flash " flash-tld=""></i> Top Progression for the last 15 days
</div>
<div id="flash-tld" class="panel-body">
<div class="col-lg-12">
<div class="flot-chart-content pull-right" id='tooltip_graph2-tld' align="right">No bar hovered</div>
</div>
<div class="">
<div class="flot-chart-content col-lg-3" id="flot-bar-chart2-tld" style="height:250px; width:100%; margin-top: 5px;"><div class="alert alert-info">Click on a part</div></div>
</div>
<!-- /.row -->
</div>
<!-- /.panel-body -->
</div>
</div>
</div>
<!-- /.panel -->
</div>
</div>
<div class="panel panel-default">
<div class="panel-heading">
<i class="fa fa-bar-chart-o fa-fw"></i> Top Level Domain Trending

View file

@ -36,6 +36,7 @@ wget https://cdn.datatables.net/plug-ins/1.10.7/integration/bootstrap/3/dataTabl
#Ressource for graph
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.js -O ./static/js/jquery.flot.js
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.pie.js -O ./static/js/jquery.flot.pie.js
wget https://raw.githubusercontent.com/flot/flot/master/jquery.flot.time.js -O ./static/js/jquery.flot.time.js
mkdir -p ./static/image
pushd static/image