From 16c700ddb4b8b4f83b6da0786c83de95f1b0526c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 29 Jun 2018 10:02:29 +0200 Subject: [PATCH 01/31] new: add base64 display on paste + base64 statistics --- OVERVIEW.md | 31 +- bin/ApiKey.py | 3 +- bin/Base64.py | 64 +++- bin/Helper.py | 13 + bin/MISP_The_Hive_feeder.py | 2 + bin/Release.py | 5 +- bin/SentimentAnalysis.py | 1 + bin/Tags.py | 9 + bin/Tokenize.py | 5 +- bin/packages/Paste.py | 2 +- bin/submit_paste.py | 11 + var/www/modules/Flask_config.py | 15 + .../base64Decoded/Flask_base64Decoded.py | 248 ++++++++++++++ .../templates/base64Decoded.html | 306 ++++++++++++++++++ .../base64Decoded/templates/base64_type.html | 179 ++++++++++ .../templates/header_base64Decoded.html | 1 + var/www/modules/showpaste/Flask_showpaste.py | 72 ++++- .../showpaste/templates/show_saved_paste.html | 68 ++++ 18 files changed, 1008 insertions(+), 27 deletions(-) create mode 100644 var/www/modules/base64Decoded/Flask_base64Decoded.py create mode 100644 var/www/modules/base64Decoded/templates/base64Decoded.html create mode 100644 var/www/modules/base64Decoded/templates/base64_type.html create mode 100644 var/www/modules/base64Decoded/templates/header_base64Decoded.html diff --git a/OVERVIEW.md b/OVERVIEW.md index 72c8e236..b642431b 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -1,7 +1,7 @@ Overview ======== -Redis and LevelDB overview +Redis and ARDB overview -------------------------- * Redis on TCP port 6379 @@ -20,3 +20,32 @@ Redis and LevelDB overview - DB 0 - Lines duplicate - DB 1 - Hashs + +ARDB overview +--------------------------- + +* DB 7 - Metadata: + ----------------------------------------- BASE64 ---------------------------------------- + + HSET - 'metadata_hash:'+hash 'saved_path' saved_path + 'size' size + 'first_seen' first_seen + 'last_seen' last_seen + 'estimated_type' estimated_type + 'vt_link' vt_link + 'vt_report' vt_report + 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + + SET - 'base64_paste:'+paste hash * + + ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day + + ZADD - 'base64_hash'+hash paste * nb_seen_in_paste + + SET - 'hash_all_type' hash_type + + ZADD - 'base64_type:'+type date nb_seen + + + + diff --git a/bin/ApiKey.py b/bin/ApiKey.py index e7ded9b2..faf4b2d9 100755 --- a/bin/ApiKey.py +++ b/bin/ApiKey.py @@ -86,8 +86,7 @@ if __name__ == "__main__": if message is not None: - search_api_key(message) - + search_api_key(message) else: publisher.debug("Script ApiKey is Idling 10s") diff --git a/bin/Base64.py b/bin/Base64.py index 960ca6de..767f36f9 100755 --- a/bin/Base64.py +++ b/bin/Base64.py @@ -8,6 +8,7 @@ import time import os import datetime +import redis from pubsublogger import publisher @@ -31,7 +32,7 @@ def timeout_handler(signum, frame): signal.signal(signal.SIGALRM, timeout_handler) -def search_base64(content, message): +def search_base64(content, message, date): find = False base64_list = re.findall(regex_base64, content) if(len(base64_list) > 0): @@ -46,6 +47,8 @@ def search_base64(content, message): find = True hash = sha1(decode).hexdigest() + print(message) + print(hash) data = {} data['name'] = hash @@ -54,8 +57,36 @@ def search_base64(content, message): data['estimated type'] = type json_data = json.dumps(data) - save_base64_as_file(decode, type, hash, json_data) - print('found {} '.format(type)) + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby('base64_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file on this paste + if serv_metadata.zscore('base64_hash:'+hash, message) is None: + print('first') + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd('base64_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('base64_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.zincrby('base64_type:'+type, date_key, 1) + + save_base64_as_file(decode, type, hash, json_data, id) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby('base64_hash:'+hash, message, 1) # number of b64 on this paste if(find): publisher.warning('base64 decoded') @@ -68,10 +99,10 @@ def search_base64(content, message): msg = 'infoleak:automatic-detection="base64";{}'.format(message) p.populate_set_out(msg, 'Tags') -def save_base64_as_file(decode, type, hash, json_data): +def save_base64_as_file(decode, type, hash, json_data, id): - filename_b64 = os.path.join(os.environ['AIL_HOME'], - p.config.get("Directories", "base64"), type, hash[:2], hash) + local_filename_b64 = os.path.join(p.config.get("Directories", "base64"), type, hash[:2], hash) + filename_b64 = os.path.join(os.environ['AIL_HOME'], local_filename_b64) filename_json = os.path.join(os.environ['AIL_HOME'], p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') @@ -83,6 +114,10 @@ def save_base64_as_file(decode, type, hash, json_data): with open(filename_b64, 'wb') as f: f.write(decode) + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_b64) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_b64)) + with open(filename_json, 'w') as f: f.write(json_data) @@ -103,6 +138,12 @@ if __name__ == '__main__': p = Process(config_section) max_execution_time = p.config.getint("Base64", "max_execution_time") + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Base64 started") @@ -127,13 +168,12 @@ if __name__ == '__main__': # Do something with the message from the queue #print(filename) content = paste.get_p_content() - search_base64(content,message) - - # (Optional) Send that thing to the next queue - #p.populate_set_out(something_has_been_done) + date = str(paste._get_p_date()) + search_base64(content,message, date) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/Helper.py b/bin/Helper.py index 289dd956..a6ca9b49 100755 --- a/bin/Helper.py +++ b/bin/Helper.py @@ -135,6 +135,12 @@ class Process(object): db=self.config.get('RedisPubSub', 'db'), decode_responses=True) + self.serv_statistics = redis.StrictRedis( + host=self.config.get('ARDB_Statistics', 'host'), + port=self.config.get('ARDB_Statistics', 'port'), + db=self.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + self.moduleNum = os.getpid() def populate_set_in(self): @@ -181,6 +187,9 @@ class Process(object): self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) + + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) return message #except: @@ -217,3 +226,7 @@ class Process(object): time.sleep(1) continue self.pubsub.publish(message) + + def incr_module_timeout_statistic(self): + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_timeout:'+self.subscriber_name, 1) diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index b11c44cb..299c995e 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -155,6 +155,8 @@ if __name__ == "__main__": flag_the_hive = False r_serv_db.set('ail:thehive', False) print('Not connected to The HIVE') + else: + HiveApi = False if HiveApi != False and flag_the_hive: try: diff --git a/bin/Release.py b/bin/Release.py index 6e7a8277..dbe57122 100755 --- a/bin/Release.py +++ b/bin/Release.py @@ -61,7 +61,8 @@ if __name__ == "__main__": publisher.info(to_print) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/SentimentAnalysis.py b/bin/SentimentAnalysis.py index 34beea3f..8442befa 100755 --- a/bin/SentimentAnalysis.py +++ b/bin/SentimentAnalysis.py @@ -167,6 +167,7 @@ if __name__ == '__main__': try: Analyse(message, server) except TimeoutException: + p.incr_module_timeout_statistic() print ("{0} processing timeout".format(message)) continue else: diff --git a/bin/Tags.py b/bin/Tags.py index 15f8f837..0a178fef 100755 --- a/bin/Tags.py +++ b/bin/Tags.py @@ -11,6 +11,7 @@ This module create tags. import redis import time +import datetime from pubsublogger import publisher from Helper import Process @@ -41,6 +42,12 @@ if __name__ == '__main__': db=p.config.get("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=p.config.get('ARDB_Statistics', 'host'), + port=p.config.get('ARDB_Statistics', 'port'), + db=p.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Tags module started") @@ -67,4 +74,6 @@ if __name__ == '__main__': print(" tagged: {}".format(tag)) server_metadata.sadd('tag:'+path, tag) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_tagged:'+tag, 1) p.populate_set_out(message, 'MISP_The_Hive_feeder') diff --git a/bin/Tokenize.py b/bin/Tokenize.py index fdefeb6a..698b4fbc 100755 --- a/bin/Tokenize.py +++ b/bin/Tokenize.py @@ -60,8 +60,9 @@ if __name__ == "__main__": msg = '{} {} {}'.format(paste.p_path, word, score) p.populate_set_out(msg) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) else: diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py index d1e3f0d3..6942cb31 100755 --- a/bin/packages/Paste.py +++ b/bin/packages/Paste.py @@ -52,7 +52,7 @@ class Paste(object): :Example: - PST = Paste("/home/2013/ZEeGaez5.gz") + PST = Paste("/home/2013/01/12/ZEeGaez5.gz") """ diff --git a/bin/submit_paste.py b/bin/submit_paste.py index 49c8e1f0..db92d1db 100755 --- a/bin/submit_paste.py +++ b/bin/submit_paste.py @@ -48,6 +48,9 @@ def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name): print(' {} send to Global'.format(save_path)) r_serv_log_submit.sadd(uuid + ':paste_submit_link', full_path) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_paste', 1) + return 0 def addError(uuid, errorMessage): @@ -60,6 +63,8 @@ def addError(uuid, errorMessage): def abord_file_submission(uuid, errorMessage): addError(uuid, errorMessage) r_serv_log_submit.set(uuid + ':end', 1) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_abord', 1) remove_submit_uuid(uuid) @@ -154,6 +159,12 @@ if __name__ == "__main__": db=cfg.getint("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=cfg.get('ARDB_Statistics', 'host'), + port=cfg.getint('ARDB_Statistics', 'port'), + db=cfg.getint('ARDB_Statistics', 'db'), + decode_responses=True) + expire_time = 120 MAX_FILE_SIZE = 1000000000 ALLOWED_EXTENSIONS = ['txt', 'sh', 'pdf'] diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py index 80ef9f18..43c65060 100644 --- a/var/www/modules/Flask_config.py +++ b/var/www/modules/Flask_config.py @@ -143,3 +143,18 @@ DiffMaxLineLength = int(cfg.get("Flask", "DiffMaxLineLength"))#Use to display t bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info'] UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted') + + # VT +try: + from virusTotalKEYS import vt_key + if vt_key != '': + vt_auth = vt_key + vt_enabled = True + print('VT submission is enabled') + else: + vt_enabled = False + print('VT submission is disabled') +except: + vt_auth = {'apikey': cfg.get("Flask", "max_preview_char")} + vt_enabled = False + print('VT submission is disabled') diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py new file mode 100644 index 00000000..de91816c --- /dev/null +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +''' + Flask functions and routes for the trending modules page +''' +import redis +import os +import datetime +import json +from Date import Date +import requests +from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for + +# ============ VARIABLES ============ +import Flask_config + +app = Flask_config.app +cfg = Flask_config.cfg +r_serv_metadata = Flask_config.r_serv_metadata +vt_enabled = Flask_config.vt_enabled +vt_auth = Flask_config.vt_auth + +base64Decoded = Blueprint('base64Decoded', __name__, template_folder='templates') + +# ============ FUNCTIONS ============ + +def get_date_range(num_day): + curr_date = datetime.date.today() + date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2)) + date_list = [] + + for i in range(0, num_day+1): + date_list.append(date.substract_day(i)) + + return list(reversed(date_list)) + +def substract_date(date_from, date_to): + date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) + date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) + delta = date_to - date_from # timedelta + l_date = [] + for i in range(delta.days + 1): + date = date_from + datetime.timedelta(i) + l_date.append( date.strftime('%Y%m%d') ) + return l_date + +def one(): + return 1 + +# ============= ROUTES ============== + +@base64Decoded.route("/base64Decoded/", methods=['GET']) +def base64Decoded_page(): + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + type = request.args.get('type') + + #date_from = '20180628' + #date_to = '20180628' + + if type not in r_serv_metadata.smembers('hash_all_type'): + type = None + + date_range = [] + if date_from is not None and date_to is not None: + #change format + if len(date_from) != 8: + date_from = date_from[0:4] + date_from[5:7] + date_from[8:10] + date_to = date_to[0:4] + date_to[5:7] + date_to[8:10] + date_range = substract_date(date_from, date_to) + + if not date_range: + date_range.append(datetime.date.today().strftime("%Y%m%d")) + + # display day type bar chart + if len(date_range) == 1 and type is None: + daily_type_chart = True + daily_date = date_range[0] + else: + daily_type_chart = False + daily_date = None + + l_64 = set() + for date in date_range: + l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + if l_hash: + for hash in l_hash: + l_64.add(hash) + + num_day_sparkline = 6 + date_range_sparkline = get_date_range(num_day_sparkline) + + b64_metadata = [] + l_64 = list(l_64) + for hash in l_64: + # select requested base 64 type + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if type is not None: + if estimated_type is not None: + if estimated_type != type: + continue + + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + + if hash is not None and first_seen is not None and \ + last_seen is not None and \ + nb_seen_in_paste is not None and \ + size is not None: + + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + else: + b64_vt = False + b64_vt_link = '' + + sparklines_value = [] + for date_day in date_range_sparkline: + nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + sparklines_value.append(int(nb_seen_this_day)) + + b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, sparklines_value) ) + + return render_template("base64Decoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date) + +@base64Decoded.route('/base64Decoded/hash_by_type') +def hash_by_type(): + type = request.args.get('type') + type = 'text/plain' + return render_template('base64_type.html',type = type) + +@base64Decoded.route('/base64Decoded/hash_by_type_json') +def hash_by_type_json(): + type = request.args.get('type') + + num_day_type = 30 + date_range_sparkline = get_date_range(num_day_type) + + #verify input + if type in r_serv_metadata.smembers('hash_all_type'): + type_value = [] + for date in date_range_sparkline: + num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + if num_day_type is None: + num_day_type = 0 + date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] + type_value.append({ 'date' : date, 'value' : int( num_day_type )}) + + return jsonify(type_value) + else: + return jsonify() + +@base64Decoded.route('/base64Decoded/daily_type_json') +def daily_type_json(): + date = request.args.get('date') + + daily_type = set() + l_b64 = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + for hash in l_b64: + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if estimated_type is not None: + daily_type.add(estimated_type) + + type_value = [] + for day_type in daily_type: + num_day_type = r_serv_metadata.zscore('base64_type:'+day_type, date) + type_value.append({ 'date' : day_type, 'value' : int( num_day_type )}) + return jsonify(type_value) + +@base64Decoded.route('/base64Decoded/send_file_to_vt', methods=['POST']) +def send_file_to_vt(): + paste = request.form['paste'] + hash = request.form['hash'] + + b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params= {'apikey': vt_auth}) + json_response = response.json() + print(json_response) + + vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report', '') + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) + +@base64Decoded.route('/base64Decoded/update_vt_result') +def update_vt_result(): + hash = request.args.get('hash') + + params = {'apikey': vt_auth, 'resource': hash} + response = requests.get('https://www.virustotal.com/vtapi/v2/file/report',params=params) + if response.status_code == 200: + json_response = response.json() + response_code = json_response['response_code'] + # report exist + if response_code == 1: + total = json_response['total'] + positive = json_response['positives'] + + b64_vt_report = 'Detection {}/{}'.format(positive,total) + # no report found + elif response_code == 0: + b64_vt_report = 'No report found' + pass + # file in queue + elif response_code == -2: + b64_vt_report = 'File in queue' + pass + + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_report', b64_vt_report) + return jsonify(hash=hash, report_vt=b64_vt_report) + elif response.status_code == 403: + Flask_config.vt_enabled = False + print('VT is disabled') + return jsonify() + else: + # TODO FIXME make json response + return jsonify() + +# ========= REGISTRATION ========= +app.register_blueprint(base64Decoded) diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html new file mode 100644 index 00000000..66f7829a --- /dev/null +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -0,0 +1,306 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + + {% include 'navbar.html' %} + +
+
+
+

Base64 Files

+
+ +
+
+ + + {% if type %} +
+
+ {% endif %} + {% if daily_type_chart %} +
+
+ {% endif %} + + + {% if l_64|length != 0 %} +

Today Base64 files:

+ + + + + + + + + + + + + + + {% for b64 in l_64 %} + + + + + + + + + + + {% endfor %} + +
estimated typehashfirst seenlast seennb pastesizeVirus TotalTest
  {{ b64[1] }}{{ b64[2] }}{{ b64[5] }}{{ b64[6] }}{{ b64[3] }}{{ b64[4] }} + {% if vt_enabled %} + {% if not b64[7] %} +
+ + +
+ {% else %} + VT scan reports + {% endif %} + + {% else %} + Virus Total submission is disabled + {% endif %} +
+
+ {% else %} +

{{daily_date[0:4]}}/{{daily_date[4:6]}}/{{daily_date[6:8]}}, No base64

+ {% endif %} + +
+ + + + + + + + + + + + + diff --git a/var/www/modules/base64Decoded/templates/base64_type.html b/var/www/modules/base64Decoded/templates/base64_type.html new file mode 100644 index 00000000..64c25b19 --- /dev/null +++ b/var/www/modules/base64Decoded/templates/base64_type.html @@ -0,0 +1,179 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + + {% include 'navbar.html' %} + +
+
+
+

Base64 Files

+
+ + +
+
+ +
+ + + + + + + + + diff --git a/var/www/modules/base64Decoded/templates/header_base64Decoded.html b/var/www/modules/base64Decoded/templates/header_base64Decoded.html new file mode 100644 index 00000000..86d529a2 --- /dev/null +++ b/var/www/modules/base64Decoded/templates/header_base64Decoded.html @@ -0,0 +1 @@ +
  • base64Decoded
  • diff --git a/var/www/modules/showpaste/Flask_showpaste.py b/var/www/modules/showpaste/Flask_showpaste.py index 13c2cc45..d7f5253f 100644 --- a/var/www/modules/showpaste/Flask_showpaste.py +++ b/var/www/modules/showpaste/Flask_showpaste.py @@ -6,12 +6,14 @@ ''' import redis import json +import os import flask -from flask import Flask, render_template, jsonify, request, Blueprint, make_response +from flask import Flask, render_template, jsonify, request, Blueprint, make_response, redirect, url_for import difflib import ssdeep import Paste +import requests # ============ VARIABLES ============ import Flask_config @@ -28,13 +30,15 @@ DiffMaxLineLength = Flask_config.DiffMaxLineLength bootstrap_label = Flask_config.bootstrap_label misp_event_url = Flask_config.misp_event_url hive_case_url = Flask_config.hive_case_url +vt_enabled = Flask_config.vt_enabled showsavedpastes = Blueprint('showsavedpastes', __name__, template_folder='templates') # ============ FUNCTIONS ============ -def showpaste(content_range): - requested_path = request.args.get('paste', '') +def showpaste(content_range, requested_path): + vt_enabled = Flask_config.vt_enabled + paste = Paste.Paste(requested_path) p_date = str(paste._get_p_date()) p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4] @@ -118,7 +122,6 @@ def showpaste(content_range): else: automatic = False - tag_hash = ssdeep.hash(tag) if r_serv_statistics.sismember('tp:'+tag, requested_path): tag_status_tp = True else: @@ -130,6 +133,37 @@ def showpaste(content_range): list_tags.append( (tag, automatic, tag_status_tp, tag_status_fp) ) + l_64 = [] + # load base64 files + if r_serv_metadata.scard('base64_paste:'+requested_path) > 0: + set_b64 = r_serv_metadata.smembers('base64_paste:'+requested_path) + for hash in set_b64: + nb_in_file = int(r_serv_metadata.zscore('base64_hash:'+hash, requested_path)) + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + saved_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + print(b64_vt_report) + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + + l_64.append( (file_icon, estimated_type, hash, saved_path, nb_in_file, b64_vt, b64_vt_link, b64_vt_report) ) + if Flask_config.pymisp is False: misp = False else: @@ -157,13 +191,15 @@ def showpaste(content_range): hive_url = hive_case_url.replace('id_here', hive_case) return render_template("show_saved_paste.html", date=p_date, bootstrap_label=bootstrap_label, active_taxonomies=active_taxonomies, active_galaxies=active_galaxies, list_tags=list_tags, source=p_source, encoding=p_encoding, language=p_language, size=p_size, mime=p_mime, lineinfo=p_lineinfo, content=p_content, initsize=len(p_content), duplicate_list = p_duplicate_list, simil_list = p_simil_list, hashtype_list = p_hashtype_list, date_list=p_date_list, - misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) + l_64=l_64, vt_enabled=vt_enabled, misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) # ============ ROUTES ============ @showsavedpastes.route("/showsavedpaste/") #completely shows the paste in a new tab def showsavedpaste(): - return showpaste(0) + requested_path = request.args.get('paste', '') + print(requested_path) + return showpaste(0, requested_path) @showsavedpastes.route("/showsavedrawpaste/") #shows raw def showsavedrawpaste(): @@ -175,7 +211,8 @@ def showsavedrawpaste(): @showsavedpastes.route("/showpreviewpaste/") def showpreviewpaste(): num = request.args.get('num', '') - return "|num|"+num+"|num|"+showpaste(max_preview_modal) + requested_path = request.args.get('paste', '') + return "|num|"+num+"|num|"+showpaste(max_preview_modal, requested_path) @showsavedpastes.route("/getmoredata/") @@ -202,5 +239,26 @@ def showDiff(): the_html = htmlD.make_file(lines1, lines2) return the_html +@showsavedpastes.route('/send_file_to_vt/', methods=['POST']) +def send_file_to_vt(): + b64_path = request.form['b64_path'] + paste = request.form['paste'] + hash = request.form['hash'] + + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params=vt_auth) + json_response = response.json() + print(json_response) + + vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) + # ========= REGISTRATION ========= app.register_blueprint(showsavedpastes) diff --git a/var/www/modules/showpaste/templates/show_saved_paste.html b/var/www/modules/showpaste/templates/show_saved_paste.html index 1340f471..fec224d9 100644 --- a/var/www/modules/showpaste/templates/show_saved_paste.html +++ b/var/www/modules/showpaste/templates/show_saved_paste.html @@ -27,6 +27,11 @@ overflow-x: hidden; width:100%; } + + .red_table thead{ + background: #d91f2d; + color: #fff; + } @@ -372,6 +377,52 @@ {% endif %} + + {% if l_64|length != 0 %} +

    Base64 files:

    + + + + + + + + + + + {% for b64 in l_64 %} + + + + + + + {% endfor %} + +
    estimated typehashsaved_pathVirus Total
      {{ b64[1] }}{{ b64[2] }} ({{ b64[4] }}){{ b64[3] }} + {% if vt_enabled %} + {% if not b64[5] %} +
    + + + +
    + {% else %} + VT scan reports + {% endif %} + + + + {% else %} + Virus Total submission is disabled + {% endif %} +
    + {% endif %} +

    Content:

    [Raw content]

    {{ content }}

    @@ -406,9 +457,26 @@ }); $('#tableDup').DataTable(); + $('#tableb64').DataTable({ + "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], + "iDisplayLength": 5, + "order": [[ 1, "asc" ]] + }); }); + + @@ -20,6 +21,8 @@ + + @@ -58,14 +61,8 @@
    - {% if type %}
    - {% endif %} - {% if daily_type_chart %} -
    -
    - {% endif %}
    @@ -209,7 +206,9 @@ {% if daily_type_chart %} barchart_type('/base64Decoded/daily_type_json?date={{daily_date}}', '#barchart_type'); {% endif %} - + {% if not daily_type_chart and not daily_type_chart%} + barchartstack_type('url', 'id'); + {% endif %} }); +{% if not type and not daily_type_chart %} + +{% endif %} diff --git a/var/www/modules/base64Decoded/templates/base64_types.html b/var/www/modules/base64Decoded/templates/base64_types.html new file mode 100644 index 00000000..2c2f6d60 --- /dev/null +++ b/var/www/modules/base64Decoded/templates/base64_types.html @@ -0,0 +1,229 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + +
    +
    +
    +

    Base64 Files

    +
    + + +
    +
    + +
    + + + + + + + + + From c2a976e907ede7eb32215549451abbf5f868c058 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 9 Jul 2018 11:47:04 +0200 Subject: [PATCH 08/31] add: stacked barchart, resize chart --- .../base64Decoded/Flask_base64Decoded.py | 6 +- .../templates/base64Decoded.html | 358 +++++++++--------- .../base64Decoded/templates/base64_types.html | 44 +-- 3 files changed, 214 insertions(+), 194 deletions(-) diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 0d997ad5..0ecf2c2e 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -63,8 +63,8 @@ def base64Decoded_page(): date_to = request.args.get('date_to') type = request.args.get('type') - #date_from = '20180628' - #date_to = '20180628' + #date_from = '20180628' or date_from = '2018-06-28' + #date_to = '20180628' or date_to = '2018-06-28' if type is not None: #retrieve + char @@ -218,7 +218,7 @@ def range_type_json(): date_to = request.args.get('date_to') date_from = '20180601' - date_to = '20180706' + date_to = '20180709' date_range = [] if date_from is not None and date_to is not None: diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html index 78aae1db..60aceaae 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -39,6 +39,10 @@ } .bar:hover{ fill: brown; + cursor: pointer; + } + .bar_stack:hover{ + cursor: pointer; } .svgText { pointer-events: none; @@ -102,7 +106,11 @@ {% if l_64|length != 0 %} -

    Today Base64 files:

    + {% if date_from|string == date_to|string %} +

    {{ date_from }} Base64 files:

    + {% else %} +

    {{ date_from }} to {{ date_to }} Base64 files:

    + {% endif %} @@ -159,6 +167,7 @@ + + + + + + -{% if not type and not daily_type_chart %} - -{% endif %} diff --git a/var/www/modules/base64Decoded/templates/base64_types.html b/var/www/modules/base64Decoded/templates/base64_types.html index 2c2f6d60..addb0c37 100644 --- a/var/www/modules/base64Decoded/templates/base64_types.html +++ b/var/www/modules/base64Decoded/templates/base64_types.html @@ -48,23 +48,23 @@ -
    +
    @@ -82,7 +82,7 @@ var yAxis = d3.axisLeft(y); var color = d3.scaleOrdinal(d3.schemeSet3); -var svg = d3.select("#chart").append("svg") +var svg = d3.select("#barchart_type").append("svg") .attr("id", "thesvg") .attr("viewBox", "0 0 1000 500") .attr("width", width + margin.left + margin.right) @@ -92,20 +92,6 @@ var svg = d3.select("#chart").append("svg") function barchart_type(url, id) { - /*var stack = d3.stack() - stack.values(function (d) { return d.values; }) - .offset("zero") - .x(function (d) { return x(d.label); }) - .y(function (d) { return d.value; });*/ - - /*var area = d3.svg.area() - .interpolate("cardinal") - .x(function (d) { return x(d.label); }) - .y0(function (d) { return y(d.y0); }) - .y1(function (d) { return y(d.y0 + d.y); });*/ - - //var color = d3.scale.ordinal().range(["#001c9c","#101b4d","#475003","#9c8305","#d3c47c"]); - d3.json("/base64Decoded/range_type_json") .then(function(data){ @@ -126,6 +112,8 @@ function barchart_type(url, id) { d.total = d.mapping[d.mapping.length - 1].y1; }); + console.log(data) + x.domain(data.map(function (d) { return (d.date).substring(5); })); //E y.domain([0, d3.max(data, function (d) { return d.total; })]); @@ -165,6 +153,18 @@ function barchart_type(url, id) { .on("mouseout", function (d) { removePopovers(); }) .on("click", function(d){ window.location.href = "/base64Decoded/" +'?type='+ d.name +'&date_from='+d.label+'&date_to='+d.label; }); + data.forEach(function(d) { + if(d.total != 0){ + svg.append("text") + .attr("class", "bar") + .attr("dy", "-.35em") + .attr('x', x(d.date.substring(5)) + x.bandwidth()/2) + .attr('y', y(d.total)) + .style("text-anchor", "middle") + .text(d.total); + } + }); + drawLegend(varNames); }); @@ -213,14 +213,14 @@ function showPopover (d) { $(this).popover('show') } -VIZ.onResize = function () { +chart.onResize = function () { var aspect = 1000 / 500, chart = $("#thesvg"); var targetWidth = chart.parent().width(); chart.attr("width", targetWidth); chart.attr("height", targetWidth / aspect); } -window.VIZ = VIZ; +window.chart = chart; From fd02085495ba9695f6f55ce3152ade7dab808a2f Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 9 Jul 2018 17:07:59 +0200 Subject: [PATCH 09/31] add: show hash info --- .../base64Decoded/Flask_base64Decoded.py | 80 +++++--- .../templates/base64Decoded.html | 22 +-- .../base64Decoded/templates/showHash.html | 172 ++++++++++++++++++ 3 files changed, 237 insertions(+), 37 deletions(-) create mode 100644 var/www/modules/base64Decoded/templates/showHash.html diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 0ecf2c2e..6678451b 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -45,6 +45,31 @@ def substract_date(date_from, date_to): l_date.append( date.strftime('%Y%m%d') ) return l_date +def list_sparkline_values(date_range_sparkline, hash): + sparklines_value = [] + for date_day in date_range_sparkline: + nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + sparklines_value.append(int(nb_seen_this_day)) + return sparklines_value + +def get_file_icon(estimated_type): + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + + return file_icon + def one(): return 1 @@ -130,18 +155,7 @@ def base64Decoded_page(): nb_seen_in_paste is not None and \ size is not None: - file_type = estimated_type.split('/')[0] - # set file icon - if file_type == 'application': - file_icon = 'fa-file-o ' - elif file_type == 'audio': - file_icon = 'fa-file-video-o ' - elif file_type == 'image': - file_icon = 'fa-file-image-o' - elif file_type == 'text': - file_icon = 'fa-file-text-o' - else: - file_icon = 'fa-file' + file_icon = get_file_icon(estimated_type) if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): b64_vt = True @@ -150,12 +164,7 @@ def base64Decoded_page(): b64_vt = False b64_vt_link = '' - sparklines_value = [] - for date_day in date_range_sparkline: - nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) - if nb_seen_this_day is None: - nb_seen_this_day = 0 - sparklines_value.append(int(nb_seen_this_day)) + sparklines_value = list_sparkline_values(date_range_sparkline, hash) b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, sparklines_value) ) @@ -170,6 +179,38 @@ def hash_by_type(): type = 'text/plain' return render_template('base64_type.html',type = type) +@base64Decoded.route('/base64Decoded/base64_hash') +def base64_hash(): + hash = request.args.get('hash') + return render_template('base64_hash.html') + +@base64Decoded.route('/base64Decoded/showHash') +def showHash(): + hash = request.args.get('hash') + #hash = 'e02055d3efaad5d656345f6a8b1b6be4fe8cb5ea' + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + # hash not found + if estimated_type is None: + base64Decoded_page() + + else: + file_icon = get_file_icon(estimated_type) + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_all_pastes = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + + num_day_type = 6 + date_range_sparkline = get_date_range(num_day_type) + sparkline_values = list_sparkline_values(date_range_sparkline, hash) + + print(sparkline_values) + + return render_template('showHash.html', hash=hash, size=size, estimated_type=estimated_type, file_icon=file_icon, + first_seen=first_seen, + last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) + @base64Decoded.route('/base64Decoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') @@ -217,9 +258,6 @@ def range_type_json(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') - date_from = '20180601' - date_to = '20180709' - date_range = [] if date_from is not None and date_to is not None: #change format diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html index 60aceaae..b3a4d8a6 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -121,14 +121,14 @@ - + {% for b64 in l_64 %} - + @@ -302,7 +302,7 @@ var svg = d3.select("#barchart_type").append("svg") function barchart_type_stack(url, id) { - d3.json("/base64Decoded/range_type_json") + d3.json("/base64Decoded/range_type_json?date_from={{date_from}}&date_to={{date_to}}") .then(function(data){ var labelVar = 'date'; //A @@ -474,11 +474,7 @@ function barchart_type(url, id) { d.value = +d.value; }); - {% if daily_type_chart %} x.domain(data.map(function(d) { return d.date; })); - {% else %} - x.domain(data.map(function(d) { return d.date.substring(5); })); - {% endif %} y.domain([0, d3.max(data, function(d) { return d.value; })]); var label = svg.append("g") @@ -492,7 +488,9 @@ function barchart_type(url, id) { {% if daily_type_chart %} .attr("transform", "rotate(-20)" ); {% else %} - .attr("transform", "rotate(-70)" ); + .attr("transform", "rotate(-70)" ) + .attr("class", "bar") + .on("click", function (d) { window.location.href = "/base64Decoded/"+'?date_from='+d+'&date_to='+d }); {% endif %} svg.append("g") @@ -510,11 +508,7 @@ function barchart_type(url, id) { .enter().append("rect") .attr("class", "bar") //.style("fill", "steelblue") - {% if daily_type_chart %} .attr("x", function(d) { return x(d.date); }) - {% else %} - .attr("x", function(d) { return x(d.date.substring(5)); }) - {% endif %} .attr("width", x.bandwidth()) .attr("y", function(d) { return y(d.value); }) .attr("height", function(d) { return height - y(d.value); }) @@ -534,11 +528,7 @@ function barchart_type(url, id) { //.text(function(d) { return d.value; }); .text(d.value) .style("text-anchor", "middle") - {% if daily_type_chart %} .attr('x', x(d.date) + x.bandwidth()/2) - {% else %} - .attr('x', x(d.date.substring(5)) + x.bandwidth()/2) - {% endif %} .attr('y', y(d.value)); } }); diff --git a/var/www/modules/base64Decoded/templates/showHash.html b/var/www/modules/base64Decoded/templates/showHash.html new file mode 100644 index 00000000..95c1f5fb --- /dev/null +++ b/var/www/modules/base64Decoded/templates/showHash.html @@ -0,0 +1,172 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + + + + + {% include 'navbar.html' %} + +
    +
    + + +
    + + +
    +
    +

    {{ hash }} :

    +     + 6 / 26 +
    • + +
      +
      + +
    nb paste size Virus TotalTestSparkline
      {{ b64[1] }}{{ b64[2] }}{{ b64[2] }} {{ b64[5] }} {{ b64[6] }} {{ b64[3] }}
    + + + + + + + + + + + + + + + + + + +
    Estimated typeFirst_seenLast_seenSize (Kb)nb_seen_in_all_pastes
      {{ estimated_type }}{{ first_seen }}{{ last_seen }}{{ size }}{{ nb_seen_in_all_pastes }}
    +
    +
    +
    +
    + +
    + + +
    +
    + + + + + + + + + + + + + From 87b7facba83bb93b2c96f6f61e444bb8e1f96556 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Thu, 12 Jul 2018 17:07:17 +0200 Subject: [PATCH 10/31] add: base 64 node graph --- bin/Helper.py | 58 ++-- .../base64Decoded/Flask_base64Decoded.py | 86 ++++- .../templates/base64Decoded.html | 14 +- .../base64Decoded/templates/showHash.html | 308 ++++++++++++++++-- 4 files changed, 400 insertions(+), 66 deletions(-) diff --git a/bin/Helper.py b/bin/Helper.py index a6ca9b49..7c035649 100755 --- a/bin/Helper.py +++ b/bin/Helper.py @@ -167,39 +167,39 @@ class Process(object): return None else: - #try: - if '.gz' in message: - path = message.split(".")[-2].split("/")[-1] - #find start of path with AIL_HOME - index_s = message.find(os.environ['AIL_HOME']) - #Stop when .gz - index_e = message.find(".gz")+3 - if(index_s == -1): - complete_path = message[0:index_e] + try: + if '.gz' in message: + path = message.split(".")[-2].split("/")[-1] + #find start of path with AIL_HOME + index_s = message.find(os.environ['AIL_HOME']) + #Stop when .gz + index_e = message.find(".gz")+3 + if(index_s == -1): + complete_path = message[0:index_e] + else: + complete_path = message[index_s:index_e] + else: - complete_path = message[index_s:index_e] + path = "-" + complete_path = "?" - else: - path = "-" - complete_path = "?" + value = str(timestamp) + ", " + path + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) + self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) - value = str(timestamp) + ", " + path - self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) - self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) - self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) + return message - curr_date = datetime.date.today() - self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) - return message - - #except: - #print('except') - #path = "?" - #value = str(timestamp) + ", " + path - #self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) - #self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", "?") - #self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) - #return message + except: + print('except') + path = "?" + value = str(timestamp) + ", " + path + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) + self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", "?") + self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) + return message def populate_set_out(self, msg, channel=None): # multiproc diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 6678451b..084e75c9 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -70,6 +70,22 @@ def get_file_icon(estimated_type): return file_icon +def get_file_icon_text(estimated_type): + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon_text = '\uf15b' + elif file_type == 'audio': + file_icon_text = '\uf1c7' + elif file_type == 'image': + file_icon_text = '\uf03e' + elif file_type == 'text': + file_icon_text = '\uf15c' + else: + file_icon_text = '\uf15b' + + return file_icon_text + def one(): return 1 @@ -88,6 +104,9 @@ def base64Decoded_page(): date_to = request.args.get('date_to') type = request.args.get('type') + if type == 'All types': + type = None + #date_from = '20180628' or date_from = '2018-06-28' #date_to = '20180628' or date_to = '2018-06-28' @@ -189,10 +208,15 @@ def showHash(): hash = request.args.get('hash') #hash = 'e02055d3efaad5d656345f6a8b1b6be4fe8cb5ea' + # TODO FIXME show error + if hash is None: + return base64Decoded_page() + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') # hash not found + # TODO FIXME show error if estimated_type is None: - base64Decoded_page() + return base64Decoded_page() else: file_icon = get_file_icon(estimated_type) @@ -290,6 +314,66 @@ def range_type_json(): return jsonify(range_type) +@base64Decoded.route('/base64Decoded/hash_graph_node_json') +def hash_graph_node_json(): + hash = request.args.get('hash') + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + + if hash is not None and estimated_type is not None: + + nodes_set_hash = set() + nodes_set_paste = set() + links_set = set() + + url = hash + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + + nodes_set_hash.add((hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) + + #get related paste + l_pastes = r_serv_metadata.zrange('base64_hash:'+hash, 0, -1) + for paste in l_pastes: + url = paste + #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('base64_hash:'+hash, paste)) + nb_base64_in_paste = r_serv_metadata.scard('base64_paste:'+paste) + + nodes_set_paste.add((paste, 2,nb_base64_in_paste,url)) + links_set.add((hash, paste)) + + l_hash = r_serv_metadata.smembers('base64_paste:'+paste) + for child_hash in l_hash: + if child_hash != hash: + url = child_hash + first_seen = r_serv_metadata.hget('metadata_hash:'+child_hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+child_hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+child_hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+child_hash, 'size') + estimated_type = r_serv_metadata.hget('metadata_hash:'+child_hash, 'estimated_type') + + nodes_set_hash.add((child_hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) + links_set.add((child_hash, paste)) + + #l_pastes_child = r_serv_metadata.zrange('base64_hash:'+child_hash, 0, -1) + #for child_paste in l_pastes_child: + + nodes = [] + for node in nodes_set_hash: + nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('base64Decoded.showHash', hash=node[7]), 'hash': True}) + for node in nodes_set_paste: + nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('showsavedpastes.showsavedpaste', paste=node[3]), 'hash': False}) + links = [] + for link in links_set: + links.append({"source": link[0], "target": link[1]}) + json = {"nodes": nodes, "links": links} + return jsonify(json) + + else: + return jsonify({}) + @base64Decoded.route('/base64Decoded/base64_types') def base64_types(): date_from = 20180701 diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html index b3a4d8a6..fdbec240 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -25,6 +25,9 @@ @@ -102,16 +122,52 @@ + +
    +
    + +
    +
    + Graph +
    +
    +
    +
    +
    +
    +
    + +
    +
    +
    + Graph +
    +
    +
    + +
    +
    +
    + + + From fce324e370b0ec6f8fc51545be5ce6463af4075c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 13 Jul 2018 11:54:14 +0200 Subject: [PATCH 11/31] fix: release tiemeout alarm, chg:icon graph node --- bin/Release.py | 3 ++- .../modules/base64Decoded/Flask_base64Decoded.py | 4 ++-- .../modules/base64Decoded/templates/showHash.html | 14 ++++++++++---- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/bin/Release.py b/bin/Release.py index dbe57122..43c84b04 100755 --- a/bin/Release.py +++ b/bin/Release.py @@ -37,6 +37,7 @@ if __name__ == "__main__": regex = '|'.join(regexs) while True: + signal.alarm(max_execution_time) filepath = p.get_from_set() if filepath is None: publisher.debug("Script Release is Idling 10s") @@ -47,7 +48,7 @@ if __name__ == "__main__": paste = Paste.Paste(filepath) content = paste.get_p_content() - signal.alarm(max_execution_time) + #signal.alarm(max_execution_time) try: releases = set(re.findall(regex, content)) if len(releases) == 0: diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 084e75c9..c3a5c937 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -78,7 +78,7 @@ def get_file_icon_text(estimated_type): elif file_type == 'audio': file_icon_text = '\uf1c7' elif file_type == 'image': - file_icon_text = '\uf03e' + file_icon_text = '\uf1c5' elif file_type == 'text': file_icon_text = '\uf15c' else: @@ -354,7 +354,7 @@ def hash_graph_node_json(): size = r_serv_metadata.hget('metadata_hash:'+child_hash, 'size') estimated_type = r_serv_metadata.hget('metadata_hash:'+child_hash, 'estimated_type') - nodes_set_hash.add((child_hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) + nodes_set_hash.add((child_hash, 3, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) links_set.add((child_hash, paste)) #l_pastes_child = r_serv_metadata.zrange('base64_hash:'+child_hash, 0, -1) diff --git a/var/www/modules/base64Decoded/templates/showHash.html b/var/www/modules/base64Decoded/templates/showHash.html index 5de3346d..2796d572 100644 --- a/var/www/modules/base64Decoded/templates/showHash.html +++ b/var/www/modules/base64Decoded/templates/showHash.html @@ -50,6 +50,10 @@ pointer-events: none; } + .graph_node_icon { + pointer-events: none; + } + .node text { font: 8px sans-serif; pointer-events: auto; @@ -286,15 +290,17 @@ d3.json(url) .attr("r", function(d) { return (d.hash) ? 6 : 5; }) .attr("fill", function(d) { - if(!d.hash){return color(d.group)} - return 'white'; }); + if(!d.hash){ return color(d.group);} + if(d.group == 1){ return "orange";} + return "rgb(141, 211, 199)"; }); node.append('text') .attr('text-anchor', 'middle') .attr('dominant-baseline', 'central') - //.text(function(d) { return ICON_UNICODE[d.nodeType]; }); + .attr("class", "graph_node_icon") .attr('font-family', 'FontAwesome') - .attr('font-size', '12px' ) + .attr('font-size', '8px' ) + .attr('pointer-events', 'none') .text(function(d) { if(d.hash){ return d.icon From 6f69da0c0d26982731176c2a51b1b167fb6c183d Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 16 Jul 2018 10:32:41 +0200 Subject: [PATCH 12/31] fix: paste submit css --- bin/MISP_The_Hive_feeder.py | 3 ++- var/www/modules/PasteSubmit/templates/PasteSubmit.html | 2 ++ var/www/modules/base64Decoded/Flask_base64Decoded.py | 4 ++++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index 299c995e..a850bc2e 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -149,7 +149,6 @@ if __name__ == "__main__": if flag_the_hive: try: HiveApi = TheHiveApi(the_hive_url, the_hive_key, cert = the_hive_verifycert) - r_serv_db.set('ail:thehive', True) except: HiveApi = False flag_the_hive = False @@ -161,10 +160,12 @@ if __name__ == "__main__": if HiveApi != False and flag_the_hive: try: HiveApi.get_alert(0) + r_serv_db.set('ail:thehive', True) print('Connected to The HIVE:', the_hive_url) except thehive4py.exceptions.AlertException: HiveApi = False flag_the_hive = False + r_serv_db.set('ail:thehive', False) print('Not connected to The HIVE') while True: diff --git a/var/www/modules/PasteSubmit/templates/PasteSubmit.html b/var/www/modules/PasteSubmit/templates/PasteSubmit.html index ce1fb29f..25e19c50 100644 --- a/var/www/modules/PasteSubmit/templates/PasteSubmit.html +++ b/var/www/modules/PasteSubmit/templates/PasteSubmit.html @@ -31,6 +31,8 @@
    + +
    diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index c3a5c937..177908f4 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -235,6 +235,10 @@ def showHash(): first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) +@base64Decoded.route('/base64Decoded/test_json') +def test_json(): + return jsonify([{'date': "2018-09-09", 'value': 34}, {'date': "2018-09-10", 'value': 56}, {'date': "2018-09-11", 'value': 0}, {'date': "2018-09-12", 'value': 12}]) + @base64Decoded.route('/base64Decoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') From 412b012ddfb512296830e85b9c006ccba4d1554c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 16 Jul 2018 16:45:36 +0200 Subject: [PATCH 13/31] add: hash line_graph (nb/day) --- .../base64Decoded/Flask_base64Decoded.py | 35 ++++- .../base64Decoded/templates/showHash.html | 120 ++++++++++++++++++ 2 files changed, 151 insertions(+), 4 deletions(-) diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 177908f4..8284dd16 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -235,10 +235,6 @@ def showHash(): first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) -@base64Decoded.route('/base64Decoded/test_json') -def test_json(): - return jsonify([{'date': "2018-09-09", 'value': 34}, {'date': "2018-09-10", 'value': 56}, {'date': "2018-09-11", 'value': 0}, {'date': "2018-09-12", 'value': 12}]) - @base64Decoded.route('/base64Decoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') @@ -318,6 +314,37 @@ def range_type_json(): return jsonify(range_type) +@base64Decoded.route('/base64Decoded/hash_graph_line_json') +def hash_graph_line_json(): + hash = request.args.get('hash') + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + + #hash = '9c748d28d78a64aef99e7ba866a433eb635c6d7a' + + if date_from is None or date_to is None: + nb_days_seen_in_pastes = 30 + else: + # # TODO: # FIXME: + nb_days_seen_in_pastes = 30 + + date_range_seen_in_pastes = get_date_range(nb_days_seen_in_pastes) + + #verify input + if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: + json_seen_in_paste = [] + for date in date_range_seen_in_pastes: + nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] + json_seen_in_paste.append({ 'date' : date, 'value' : int( nb_seen_this_day )}) + + return jsonify(json_seen_in_paste) + else: + return jsonify() + + @base64Decoded.route('/base64Decoded/hash_graph_node_json') def hash_graph_node_json(): hash = request.args.get('hash') diff --git a/var/www/modules/base64Decoded/templates/showHash.html b/var/www/modules/base64Decoded/templates/showHash.html index 2796d572..e63b8cd1 100644 --- a/var/www/modules/base64Decoded/templates/showHash.html +++ b/var/www/modules/base64Decoded/templates/showHash.html @@ -74,6 +74,16 @@ .graph_panel { padding: unset; } + + .line_graph { + fill: none; + stroke: steelblue; + stroke-width: 2px; + stroke-linejoin: round; + stroke-linecap: round; + stroke-width: 1.5; + /*attr('stroke', '#bcbd22').*/ + } @@ -153,6 +163,17 @@
    + +
    +
    + Graph +
    +
    +
    +
    +
    +
    + @@ -166,6 +187,7 @@ sparklines("sparkline", {{ sparkline_values }}) all_graph.node_graph = create_graph('/base64Decoded/hash_graph_node_json?hash={{hash}}'); + all_graph.line_chart = create_line_chart('graph_line', '/base64Decoded/hash_graph_line_json?hash={{hash}}'); all_graph.onResize(); }); @@ -415,6 +437,104 @@ all_graph.onResize = function () { } window.all_graph = all_graph; + + + From d9c924c38d52a2cb150b5deaf51baf00b7274db8 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Tue, 17 Jul 2018 14:23:49 +0200 Subject: [PATCH 14/31] chg: use json to submit hash --- .../base64Decoded/Flask_base64Decoded.py | 40 ++++++++++---- .../templates/base64Decoded.html | 27 ++++++---- .../base64Decoded/templates/showHash.html | 53 +++++++++++++------ var/www/modules/showpaste/Flask_showpaste.py | 7 ++- .../showpaste/templates/show_saved_paste.html | 40 ++++++++------ 5 files changed, 111 insertions(+), 56 deletions(-) diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 8284dd16..2140b368 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -179,13 +179,18 @@ def base64Decoded_page(): if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): b64_vt = True b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') else: b64_vt = False b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + # hash never refreshed + if b64_vt_report is None: + b64_vt_report = '' sparklines_value = list_sparkline_values(date_range_sparkline, hash) - b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, sparklines_value) ) + b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, b64_vt_report, sparklines_value) ) l_type = r_serv_metadata.smembers('hash_all_type') @@ -229,9 +234,21 @@ def showHash(): date_range_sparkline = get_date_range(num_day_type) sparkline_values = list_sparkline_values(date_range_sparkline, hash) - print(sparkline_values) + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + # hash never refreshed + if b64_vt_report is None: + b64_vt_report = '' - return render_template('showHash.html', hash=hash, size=size, estimated_type=estimated_type, file_icon=file_icon, + return render_template('showHash.html', hash=hash, vt_enabled=vt_enabled, b64_vt=b64_vt, b64_vt_link=b64_vt_link, + b64_vt_report=b64_vt_report, + size=size, estimated_type=estimated_type, file_icon=file_icon, first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) @@ -411,10 +428,9 @@ def base64_types(): date_to = 20180706 return render_template('base64_types.html', date_from=date_from, date_to=date_to) -@base64Decoded.route('/base64Decoded/send_file_to_vt', methods=['POST']) -def send_file_to_vt(): - paste = request.form['paste'] - hash = request.form['hash'] +@base64Decoded.route('/base64Decoded/send_file_to_vt_js') +def send_file_to_vt_js(): + hash = request.args.get('hash') b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) @@ -427,11 +443,13 @@ def send_file_to_vt(): json_response = response.json() print(json_response) - vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' - r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) - b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report', '') + vt_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_link) + vt_report = 'Please Refresh' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_report', vt_report) + + return jsonify({'vt_link': vt_link, 'vt_report': vt_report}) - return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) @base64Decoded.route('/base64Decoded/update_vt_result') def update_vt_result(): diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html index fdbec240..bc71ed44 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -139,21 +139,21 @@ {% if vt_enabled %} {% if not b64[7] %} - - - - + {% else %} - VT scan reports + VT Report {% endif %} +
     {{ b64[9] }}
    + {% else %} Virus Total submission is disabled {% endif %} + @@ -240,6 +240,15 @@ }); } + function sendFileToVT(hash) { + //send file to vt + $.getJSON('/base64Decoded/send_file_to_vt_js?hash='+hash, + function(data) { + var content = ' '+ ' VT Report' +''; + $('#submit_vt_'+hash).remove(); + $('darkbutton_'+hash).append(content); + }); + } diff --git a/var/www/modules/base64Decoded/templates/showHash.html b/var/www/modules/base64Decoded/templates/showHash.html index e63b8cd1..cd7ccfa2 100644 --- a/var/www/modules/base64Decoded/templates/showHash.html +++ b/var/www/modules/base64Decoded/templates/showHash.html @@ -131,10 +131,26 @@
    - - + + {% if vt_enabled %} + {% if not b64_vt %} + + + + {% else %} + VT Report + {% endif %} + + {% else %} + Virus Total submission is disabled + {% endif %} +
    @@ -166,7 +182,7 @@
    - Graph + Graph
    @@ -196,12 +212,22 @@ }); From 524d5fe604d6ffc2af0bdca0ab1be99abf2c7fc6 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Wed, 18 Jul 2018 10:09:35 +0200 Subject: [PATCH 15/31] add: hash download --- .../base64Decoded/Flask_base64Decoded.py | 41 ++++++++++++++++++- .../base64Decoded/templates/showHash.html | 4 ++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py index 2140b368..0aef886e 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -9,8 +9,12 @@ import os import datetime import json from Date import Date + +from io import BytesIO +import zipfile + import requests -from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for +from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, send_file # ============ VARIABLES ============ import Flask_config @@ -252,6 +256,41 @@ def showHash(): first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) +@app.route('/base64Decoded/downloadHash') +def downloadHash(): + hash = request.args.get('hash') + # sanitize hash + hash = hash.split('/')[0] + + # hash exist + if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: + + b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + hash_content = '' + try: + with open(b64_full_path, 'rb') as f: + hash_content = f.read() + + # zip buffer + result = BytesIO() + temp = BytesIO() + temp.write(hash_content) + + with zipfile.ZipFile(result, "w") as zf: + #zf.setpassword(b"infected") + zf.writestr( hash, temp.getvalue()) + + filename = hash + '.zip' + result.seek(0) + + return send_file(result, attachment_filename=filename, as_attachment=True) + except Exception as e: + print(e) + return 'Server Error' + else: + return 'hash: ' + hash + " don't exist" + @base64Decoded.route('/base64Decoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') diff --git a/var/www/modules/base64Decoded/templates/showHash.html b/var/www/modules/base64Decoded/templates/showHash.html index cd7ccfa2..6153e3f8 100644 --- a/var/www/modules/base64Decoded/templates/showHash.html +++ b/var/www/modules/base64Decoded/templates/showHash.html @@ -151,6 +151,10 @@ Virus Total submission is disabled {% endif %} + + +
    From e25dd52d641347930b0efcdb8713187cd03c0d0a Mon Sep 17 00:00:00 2001 From: Terrtia Date: Wed, 18 Jul 2018 11:45:19 +0200 Subject: [PATCH 16/31] add: Binary decoder --- OVERVIEW.md | 6 + bin/Base64.py | 2 + bin/Binary.py | 180 ++++++++++++++++++ bin/packages/config.cfg.sample | 4 + bin/packages/modules.cfg | 4 + .../showpaste/templates/show_saved_paste.html | 2 +- 6 files changed, 197 insertions(+), 1 deletion(-) create mode 100755 bin/Binary.py diff --git a/OVERVIEW.md b/OVERVIEW.md index 471f635d..d4ad525b 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -36,12 +36,18 @@ ARDB overview 'vt_report' vt_report 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + SET - 'hash_base64_all_type' hash_type * + SET - 'hash_binary_all_type' hash_type * + SET - 'base64_paste:'+paste hash * + SET - 'binary_paste:'+paste hash * ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day ZADD - 'base64_hash'+hash paste * nb_seen_in_paste + ZADD - 'binary_hash'+hash paste * nb_seen_in_paste SET - 'hash_all_type' hash_type ZADD - 'base64_type:'+type date nb_seen + ZADD - 'binary_type:'+type date nb_seen diff --git a/bin/Base64.py b/bin/Base64.py index 767f36f9..e8b3fbc5 100755 --- a/bin/Base64.py +++ b/bin/Base64.py @@ -40,6 +40,7 @@ def search_base64(content, message, date): for b64 in base64_list: if len(b64) >= 40 : decode = base64.b64decode(b64) + print(decode) type = magic.from_buffer(decode, mime=True) #print(type) @@ -80,6 +81,7 @@ def search_base64(content, message, date): # create hash metadata serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) serv_metadata.sadd('hash_all_type', type) + serv_metadata.sadd('hash_base64_all_type', type) serv_metadata.zincrby('base64_type:'+type, date_key, 1) save_base64_as_file(decode, type, hash, json_data, id) diff --git a/bin/Binary.py b/bin/Binary.py new file mode 100755 index 00000000..00c422f6 --- /dev/null +++ b/bin/Binary.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + Binary module + + Dectect Binary and decode it +""" +import time +import os +import datetime +import redis + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +import re +from hashlib import sha1 +import magic +import json + +import signal + +class TimeoutException(Exception): + pass + +def timeout_handler(signum, frame): + raise TimeoutException + +signal.signal(signal.SIGALRM, timeout_handler) + +def decode_binary_string(s): + return ''.join(chr(int(s[i*8:i*8+8],2)) for i in range(len(s)//8)) + +def search_binary(content, message, date): + find = False + binary_list = re.findall(regex_binary, content) + if(len(binary_list) > 0): + + for binary in binary_list: + if len(binary) >= 40 : + decode = decode_binary_string(binary).encode() + print(decode) + print(message) + + type = magic.from_buffer(decode, mime=True) + + find = True + hash = sha1(decode).hexdigest() + print(hash) + + data = {} + data['name'] = hash + data['date'] = datetime.datetime.now().strftime("%d/%m/%y") + data['origin'] = message + data['estimated type'] = type + json_data = json.dumps(data) + + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby('binary_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file encoding on this paste + if serv_metadata.zscore('binary_hash:'+hash, message) is None: + print('first binary') + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd('binary_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('binary_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.sadd('hash_binary_all_type', type) + serv_metadata.zincrby('binary_type:'+type, date_key, 1) + + save_binary_as_file(decode, type, hash, json_data, id) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby('binary_hash:'+hash, message, 1) # number of b64 on this paste + + if(find): + publisher.warning('binary decoded') + #Send to duplicate + p.populate_set_out(message, 'Duplicate') + #send to Browse_warning_paste + msg = ('binary;{}'.format(message)) + p.populate_set_out( msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="binary";{}'.format(message) + p.populate_set_out(msg, 'Tags') + +def save_binary_as_file(decode, type, hash, json_data, id): + + local_filename_b64 = os.path.join(p.config.get("Directories", "base64"), type, hash[:2], hash) + filename_b64 = os.path.join(os.environ['AIL_HOME'], local_filename_b64) + + filename_json = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') + + dirname = os.path.dirname(filename_b64) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename_b64, 'wb') as f: + f.write(decode) + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_b64) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_b64)) + + with open(filename_json, 'w') as f: + f.write(json_data) + + + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Binary' + + # Setup the I/O queues + p = Process(config_section) + max_execution_time = p.config.getint("Binary", "max_execution_time") + + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + + # Sent to the logging a description of the module + publisher.info("Binary started") + + regex_binary = '[0-1]{40,}' + re.compile(regex_binary) + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + signal.alarm(max_execution_time) + try: + # Do something with the message from the queue + #print(filename) + content = paste.get_p_content() + date = str(paste._get_p_date()) + search_binary(content,message, date) + + except TimeoutException: + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue + else: + signal.alarm(0) diff --git a/bin/packages/config.cfg.sample b/bin/packages/config.cfg.sample index ae015d8e..eddd5165 100644 --- a/bin/packages/config.cfg.sample +++ b/bin/packages/config.cfg.sample @@ -61,6 +61,10 @@ max_execution_time = 90 path = Base64/ max_execution_time = 60 +[Binary] +path = Base64/ +max_execution_time = 60 + [Modules_Duplicates] #Number of month to look back maximum_month_range = 3 diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 71044cfb..0ad24d08 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -121,6 +121,10 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags +[Binary] +subscribe = Redis_Global +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags + [Bitcoin] subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags diff --git a/var/www/modules/showpaste/templates/show_saved_paste.html b/var/www/modules/showpaste/templates/show_saved_paste.html index a9eb29f3..a07b2742 100644 --- a/var/www/modules/showpaste/templates/show_saved_paste.html +++ b/var/www/modules/showpaste/templates/show_saved_paste.html @@ -393,7 +393,7 @@ {% for b64 in l_64 %}   {{ b64[1] }} - {{ b64[2] }} ({{ b64[4] }}) + {{ b64[2] }} ({{ b64[4] }}) {{ b64[3] }} {% if vt_enabled %} From 280e9d8af8b075b1fadd47ab1eee9cc6e61bbd6e Mon Sep 17 00:00:00 2001 From: Terrtia Date: Wed, 18 Jul 2018 13:47:06 +0200 Subject: [PATCH 17/31] fix: Binary decoder base2 string to bytes --- bin/Binary.py | 10 +++++++--- bin/LAUNCH.sh | 2 ++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bin/Binary.py b/bin/Binary.py index 00c422f6..29d6f2c5 100755 --- a/bin/Binary.py +++ b/bin/Binary.py @@ -30,9 +30,13 @@ def timeout_handler(signum, frame): signal.signal(signal.SIGALRM, timeout_handler) -def decode_binary_string(s): +def decode_binary_string(binary_string): return ''.join(chr(int(s[i*8:i*8+8],2)) for i in range(len(s)//8)) +def decode_binary(binary_string): + return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)])) + + def search_binary(content, message, date): find = False binary_list = re.findall(regex_binary, content) @@ -40,11 +44,11 @@ def search_binary(content, message, date): for binary in binary_list: if len(binary) >= 40 : - decode = decode_binary_string(binary).encode() - print(decode) + decode = decode_binary(binary) print(message) type = magic.from_buffer(decode, mime=True) + print(type) find = True hash = sha1(decode).hexdigest() diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 1f987479..d393d621 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -144,6 +144,8 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x' sleep 0.1 + screen -S "Script_AIL" -X screen -t "Binary" bash -c './Binary.py; read x' + sleep 0.1 screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x' sleep 0.1 screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x' From ad26f016e34c832a0718edae93e6d348f91fbf5e Mon Sep 17 00:00:00 2001 From: Terrtia Date: Thu, 19 Jul 2018 16:50:42 +0200 Subject: [PATCH 18/31] add: Decoder Module, decode binary, hex and base64 --- OVERVIEW.md | 14 ++++++++++---- bin/LAUNCH.sh | 2 ++ bin/Web.py | 1 + bin/packages/config.cfg.sample | 5 +++++ 4 files changed, 18 insertions(+), 4 deletions(-) diff --git a/OVERVIEW.md b/OVERVIEW.md index d4ad525b..80a6f337 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -36,18 +36,24 @@ ARDB overview 'vt_report' vt_report 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + SET - 'all_decoder' decoder* + + SET - 'hash_all_type' hash_type * SET - 'hash_base64_all_type' hash_type * SET - 'hash_binary_all_type' hash_type * + SET - 'hash_paste:'+paste hash * °°° SET - 'base64_paste:'+paste hash * SET - 'binary_paste:'+paste hash * + ZADD - 'hash_date:'+20180622 hash * °°° nb_seen_this_day ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day + ZADD - 'binary_date:'+20180622 hash * nb_seen_this_day - ZADD - 'base64_hash'+hash paste * nb_seen_in_paste - ZADD - 'binary_hash'+hash paste * nb_seen_in_paste - - SET - 'hash_all_type' hash_type + ZADD - 'nb_seen_hash:'+hash paste * °°° nb_seen_in_paste + ZADD - 'base64_hash:'+hash paste * nb_seen_in_paste + ZADD - 'binary_hash:'+hash paste * nb_seen_in_paste + ZADD - 'hash_type:'+type date °°° nb_seen ZADD - 'base64_type:'+type date nb_seen ZADD - 'binary_type:'+type date nb_seen diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index d393d621..64b4d552 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -146,6 +146,8 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Binary" bash -c './Binary.py; read x' sleep 0.1 + screen -S "Script_AIL" -X screen -t "Decoder" bash -c './Decoder.py; read x' + sleep 0.1 screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x' sleep 0.1 screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x' diff --git a/bin/Web.py b/bin/Web.py index 45e5bfbe..3d53e306 100755 --- a/bin/Web.py +++ b/bin/Web.py @@ -124,6 +124,7 @@ if __name__ == "__main__": except ipaddress.AddressValueError: continue cc = getattr(l, 'cc') + asn = '' if getattr(l, 'asn') is not None: asn = getattr(l, 'asn')[2:] #remobe b' diff --git a/bin/packages/config.cfg.sample b/bin/packages/config.cfg.sample index eddd5165..08b75324 100644 --- a/bin/packages/config.cfg.sample +++ b/bin/packages/config.cfg.sample @@ -2,6 +2,7 @@ bloomfilters = Blooms dicofilters = Dicos pastes = PASTES +hash = HASHS base64 = BASE64 wordtrending_csv = var/www/static/csv/wordstrendingdata @@ -65,6 +66,10 @@ max_execution_time = 60 path = Base64/ max_execution_time = 60 +[Hex] +path = Base64/ +max_execution_time = 60 + [Modules_Duplicates] #Number of month to look back maximum_month_range = 3 From 0e67b569063b146694755d7d471d05c173ad3b44 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Thu, 19 Jul 2018 16:52:09 +0200 Subject: [PATCH 19/31] add: Decoder module --- bin/Decoder.py | 230 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 230 insertions(+) create mode 100755 bin/Decoder.py diff --git a/bin/Decoder.py b/bin/Decoder.py new file mode 100755 index 00000000..b8ed92aa --- /dev/null +++ b/bin/Decoder.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + Decoder module + + Dectect Binary and decode it +""" +import time +import os +import redis +import base64 +from hashlib import sha1 +import magic +import json +import datetime + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +import re +import signal + +class TimeoutException(Exception): + pass + +def timeout_handler(signum, frame): + raise TimeoutException + +signal.signal(signal.SIGALRM, timeout_handler) + +def hex_decoder(hexStr): + #hexStr = ''.join( hex_string.split(" ") ) + print( bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)])) ) + return bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)])) + +def binary_decoder(binary_string): + return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)])) + +def base64_decoder(base64_string): + return base64.b64decode(base64_string) + +def decode_string(content, message, date, encoded_list, decoder_name, encoded_min_size): + find = False + print('list') + print(encoded_min_size) + print(encoded_list) + for encoded in encoded_list: + print(len(encoded)) + if len(encoded) >= encoded_min_size: + decode = decoder_function[decoder_name](encoded) + find = True + + save_hash(decoder_name, message, date, decode) + + #remove encoded from paste content + content = content.replace(encoded, '', 1) + + if(find): + set_out_paste(decoder_name, message) + + return content + +# # TODO: FIXME check db +def save_hash(decoder_name, message, date, decoded): + type = magic.from_buffer(decoded, mime=True) + print(type) + hash = sha1(decoded).hexdigest() + print(hash) + + data = {} + data['name'] = hash + data['date'] = datetime.datetime.now().strftime("%d/%m/%y") + data['origin'] = message + data['estimated type'] = type + json_data = json.dumps(data) + + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby(decoder_name+'_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file encoding on this paste + if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: + print('first '+decoder_name) + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.sadd('hash_'+ decoder_name +'_all_type', type) + serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) + + save_hash_on_disk(decoded, type, hash, json_data) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste + + +def save_hash_on_disk(decode, type, hash, json_data): + + local_filename_hash = os.path.join(p.config.get("Directories", "hash"), type, hash[:2], hash) + filename_hash = os.path.join(os.environ['AIL_HOME'], local_filename_hash) + + filename_json = os.path.join(os.environ['AIL_HOME'], + p.config.get("Directories", "hash"), type, hash[:2], hash + '.json') + + dirname = os.path.dirname(filename_hash) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename_hash, 'wb') as f: + f.write(decode) + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_hash) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_hash)) + + with open(filename_json, 'w') as f: + f.write(json_data) + +def set_out_paste(decoder_name, message): + publisher.warning(decoder_name+' decoded') + #Send to duplicate + p.populate_set_out(message, 'Duplicate') + #send to Browse_warning_paste + msg = (decoder_name+';{}'.format(message)) + p.populate_set_out( msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="'+decoder_name+'";{}'.format(message) + p.populate_set_out(msg, 'Tags') + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Decoder' + + # Setup the I/O queues + p = Process(config_section) + + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + + # Sent to the logging a description of the module + publisher.info("Decoder started") + + regex_binary = '[0-1]{40,}' + #regex_hex = '(0[xX])?[A-Fa-f0-9]{40,}' + regex_hex = '[A-Fa-f0-9]{40,}' + regex_base64 = '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)' + + '''re.compile(regex_binary) + re.compile(regex_hex) + re.compile(regex_base64)''' + + # map decoder function + decoder_function = {'binary':binary_decoder,'hex':hex_decoder, 'base64':base64_decoder} + + hex_max_execution_time = p.config.getint("Hex", "max_execution_time") + binary_max_execution_time = p.config.getint("Binary", "max_execution_time") + base64_max_execution_time = p.config.getint("Base64", "max_execution_time") + + # list all decoder yith regex, the order is use to search content by order + all_decoder = [ {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time}, + {'name': 'hex', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time}, + {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time}] + + for decoder in all_decoder: + serv_metadata.sadd('all_decoder', decoder['name']) + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + # Do something with the message from the queue + content = paste.get_p_content() + date = str(paste._get_p_date()) + + for decoder in all_decoder: # add threshold and size limit + print(decoder['name']) + + # max execution time on regex + signal.alarm(decoder['max_execution_time']) + try: + print(content) + encoded_list = re.findall(decoder['regex'], content) + #encoded_list = re.findall(decoder['regex'], content) + print(decoder['regex']) + print(encoded_list) + except TimeoutException: + encoded_list = [] + p.incr_module_timeout_statistic() # add encoder type + print ("{0} processing timeout".format(paste.p_path)) + continue + else: + signal.alarm(0) + + if(len(encoded_list) > 0): + content = decode_string(content, message, date, encoded_list, decoder['name'], decoder['encoded_min_size']) From 9a8e37fb0b9c2038e5d9c93b9234498578f34a49 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 20 Jul 2018 09:43:09 +0200 Subject: [PATCH 20/31] fix: decoder bdd --- OVERVIEW.md | 8 ++++---- bin/Decoder.py | 18 ++++++------------ bin/packages/modules.cfg | 4 ++++ 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/OVERVIEW.md b/OVERVIEW.md index 80a6f337..64f9179e 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -42,18 +42,18 @@ ARDB overview SET - 'hash_base64_all_type' hash_type * SET - 'hash_binary_all_type' hash_type * - SET - 'hash_paste:'+paste hash * °°° + SET - 'hash_paste:'+paste hash * SET - 'base64_paste:'+paste hash * SET - 'binary_paste:'+paste hash * - ZADD - 'hash_date:'+20180622 hash * °°° nb_seen_this_day + ZADD - 'hash_date:'+20180622 hash * nb_seen_this_day ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day ZADD - 'binary_date:'+20180622 hash * nb_seen_this_day - ZADD - 'nb_seen_hash:'+hash paste * °°° nb_seen_in_paste + ZADD - 'nb_seen_hash:'+hash paste * nb_seen_in_paste ZADD - 'base64_hash:'+hash paste * nb_seen_in_paste ZADD - 'binary_hash:'+hash paste * nb_seen_in_paste - ZADD - 'hash_type:'+type date °°° nb_seen + ZADD - 'hash_type:'+type date nb_seen ZADD - 'base64_type:'+type date nb_seen ZADD - 'binary_type:'+type date nb_seen diff --git a/bin/Decoder.py b/bin/Decoder.py index b8ed92aa..38975e67 100755 --- a/bin/Decoder.py +++ b/bin/Decoder.py @@ -32,7 +32,6 @@ signal.signal(signal.SIGALRM, timeout_handler) def hex_decoder(hexStr): #hexStr = ''.join( hex_string.split(" ") ) - print( bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)])) ) return bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)])) def binary_decoder(binary_string): @@ -43,11 +42,7 @@ def base64_decoder(base64_string): def decode_string(content, message, date, encoded_list, decoder_name, encoded_min_size): find = False - print('list') - print(encoded_min_size) - print(encoded_list) for encoded in encoded_list: - print(len(encoded)) if len(encoded) >= encoded_min_size: decode = decoder_function[decoder_name](encoded) find = True @@ -79,6 +74,7 @@ def save_hash(decoder_name, message, date, decoded): date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) date_key = date[0:4] + date[4:6] + date[6:8] + serv_metadata.zincrby('hash_date:'+date_key, hash, 1) serv_metadata.zincrby(decoder_name+'_date:'+date_key, hash, 1) # first time we see this hash @@ -93,14 +89,16 @@ def save_hash(decoder_name, message, date, decoded): print('first '+decoder_name) serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map - serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1)# hash - paste map # create hash metadata serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) serv_metadata.sadd('hash_all_type', type) serv_metadata.sadd('hash_'+ decoder_name +'_all_type', type) + serv_metadata.zincrby('hash_type:'+type, date_key, 1) serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) save_hash_on_disk(decoded, type, hash, json_data) @@ -171,9 +169,9 @@ if __name__ == '__main__': regex_hex = '[A-Fa-f0-9]{40,}' regex_base64 = '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)' - '''re.compile(regex_binary) + re.compile(regex_binary) re.compile(regex_hex) - re.compile(regex_base64)''' + re.compile(regex_base64) # map decoder function decoder_function = {'binary':binary_decoder,'hex':hex_decoder, 'base64':base64_decoder} @@ -213,11 +211,7 @@ if __name__ == '__main__': # max execution time on regex signal.alarm(decoder['max_execution_time']) try: - print(content) encoded_list = re.findall(decoder['regex'], content) - #encoded_list = re.findall(decoder['regex'], content) - print(decoder['regex']) - print(encoded_list) except TimeoutException: encoded_list = [] p.incr_module_timeout_statistic() # add encoder type diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 0ad24d08..4dc7edd4 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -117,6 +117,10 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags subscribe = Redis_ApiKey publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags +[Decoder] +subscribe = Redis_Global +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags + [Base64] subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags From bd5f83f0ebd8d8c060446c019d35e0c6ef5e492c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 20 Jul 2018 10:32:52 +0200 Subject: [PATCH 21/31] chg: refractor base64 encoded to hash --- OVERVIEW.md | 2 + bin/DbDump.py | 133 ++++++++++ bin/Decoder.py | 3 +- bin/Dox.py | 96 ++++++++ bin/LAUNCH.sh | 4 - bin/packages/modules.cfg | 8 - .../base64Decoded/templates/base64_type.html | 179 -------------- .../base64Decoded/templates/base64_types.html | 229 ------------------ .../templates/header_base64Decoded.html | 1 - .../Flask_hashDecoded.py} | 84 +++---- .../templates/hashDecoded.html} | 40 +-- .../templates/header_hashDecoded.html | 1 + .../templates/showHash.html | 10 +- var/www/modules/showpaste/Flask_showpaste.py | 8 +- .../showpaste/templates/show_saved_paste.html | 8 +- 15 files changed, 309 insertions(+), 497 deletions(-) create mode 100755 bin/DbDump.py create mode 100755 bin/Dox.py delete mode 100644 var/www/modules/base64Decoded/templates/base64_type.html delete mode 100644 var/www/modules/base64Decoded/templates/base64_types.html delete mode 100644 var/www/modules/base64Decoded/templates/header_base64Decoded.html rename var/www/modules/{base64Decoded/Flask_base64Decoded.py => hashDecoded/Flask_hashDecoded.py} (86%) rename var/www/modules/{base64Decoded/templates/base64Decoded.html => hashDecoded/templates/hashDecoded.html} (89%) create mode 100644 var/www/modules/hashDecoded/templates/header_hashDecoded.html rename var/www/modules/{base64Decoded => hashDecoded}/templates/showHash.html (97%) diff --git a/OVERVIEW.md b/OVERVIEW.md index 64f9179e..335e1d95 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -35,6 +35,8 @@ ARDB overview 'vt_link' vt_link 'vt_report' vt_report 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + 'base64_decoder' nb_encoded + 'binary_decoder' nb_encoded SET - 'all_decoder' decoder* diff --git a/bin/DbDump.py b/bin/DbDump.py new file mode 100755 index 00000000..0eb46d16 --- /dev/null +++ b/bin/DbDump.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + DbDump + + +""" + +import time + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +def get_lines(content): + + is_db_leak = False + + list_lines = content.splitlines() + list_separators = [] + if len(list_lines) > 0: + for line in list_lines: + list_separators.append(search_separator(line)) + + threshold_num_separator_line = 0 + # Minimum number of separator per line + threshold_min_separator_line = 7 + same_separator = 0 + num_separator = 0 + current_separator = '' + + for separator in list_separators: + if separator != '': + #same separator on the next line + if separator[0] == current_separator: + if abs(separator[1] - num_separator) <= threshold_num_separator_line: + if num_separator > threshold_min_separator_line: + same_separator += 1 + else: + num_separator = separator[1] + same_separator = 0 + else: + # FIXME: enhancement ? + num_separator = separator[1] + + if(same_separator >= 5): + is_db_leak = True + #different operator + else: + #change the current separator + current_separator = separator[0] + same_separator = 0 + num_separator = 0 + + return is_db_leak + + +def search_separator(line): + list_separator = [] + #count separators + #list_separator.append( (';', line.count(';')) ) + #list_separator.append( (',', line.count(',')) ) + list_separator.append( (';', line.count(';')) ) + list_separator.append( ('|', line.count('|')) ) + #list_separator.append( (':', line.count(':')) ) + + separator = '' + separator_number = 0 + + # line separator + for potential_separator in list_separator: + if potential_separator[1] > separator_number: + separator = potential_separator[0] + separator_number = potential_separator[1] + + return (separator, separator_number) + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'DbDump' + + # Setup the I/O queues + p = Process(config_section) + + # Sent to the logging a description of the module + publisher.info("DbDump started") + + + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + # Do something with the message from the queue + print(filename) + content = paste.get_p_content() + is_db_leak = get_lines(content) + + if is_db_leak: + + to_print = 'DbDump;{};{};{};'.format( + paste.p_source, paste.p_date, paste.p_name) + + print('found DbDump') + print(to_print) + publisher.warning('{}Checked found Database Dump;{}'.format( + to_print, paste.p_path)) + + msg = 'dbdump;{}'.format(filename) + p.populate_set_out(msg, 'alertHandler') + + msg = 'dbdump;{}'.format(filename) + p.populate_set_out(msg, 'Tags') + + #Send to duplicate + p.populate_set_out(filename, 'Duplicate') diff --git a/bin/Decoder.py b/bin/Decoder.py index 38975e67..cd37b9bf 100755 --- a/bin/Decoder.py +++ b/bin/Decoder.py @@ -59,6 +59,7 @@ def decode_string(content, message, date, encoded_list, decoder_name, encoded_mi # # TODO: FIXME check db def save_hash(decoder_name, message, date, decoded): + print(decoder_name) type = magic.from_buffer(decoded, mime=True) print(type) hash = sha1(decoded).hexdigest() @@ -88,6 +89,7 @@ def save_hash(decoder_name, message, date, decoded): if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: print('first '+decoder_name) serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map @@ -206,7 +208,6 @@ if __name__ == '__main__': date = str(paste._get_p_date()) for decoder in all_decoder: # add threshold and size limit - print(decoder['name']) # max execution time on regex signal.alarm(decoder['max_execution_time']) diff --git a/bin/Dox.py b/bin/Dox.py new file mode 100755 index 00000000..cb762867 --- /dev/null +++ b/bin/Dox.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The Dox Module +====================== + +This module is consuming the Redis-list created by the Categ module. + +""" + + +import pprint +import time +from packages import Paste +from packages import lib_refine +from pubsublogger import publisher +import re +import sys + +from Helper import Process + +if __name__ == "__main__": + publisher.port = 6380 + publisher.channel = "Script" + + config_section = 'Dox' + + p = Process(config_section) + + # FUNCTIONS # + publisher.info("Dox module") + + channel = 'dox_categ' + + regex = re.compile('name|age', re.IGNORECASE) + + while True: + message = p.get_from_set() + + + if message is not None: + filepath, count = message.split(' ') + filename, score = message.split() + paste = Paste.Paste(filename) + content = paste.get_p_content() + + count = 0 + + tmp = paste._get_word('name') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('Name') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('NAME') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('age') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('Age') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('AGE') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('address') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('Address') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('ADDRESS') + if (len(tmp) > 0): + count += tmp[1] + + #dox_list = re.findall(regex, content) + if(count > 0): + + #Send to duplicate + p.populate_set_out(filepath, 'Duplicate') + #Send to alertHandler + msg = 'dox;{}'.format(filepath) + p.populate_set_out(msg, 'alertHandler') + + print(filename) + print(content) + print('--------------------------------------------------------------------------------------') + + else: + publisher.debug("Script creditcard is idling 1m") + time.sleep(10) diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 64b4d552..5621287a 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -142,10 +142,6 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Keys" bash -c './Keys.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x' - sleep 0.1 - screen -S "Script_AIL" -X screen -t "Binary" bash -c './Binary.py; read x' - sleep 0.1 screen -S "Script_AIL" -X screen -t "Decoder" bash -c './Decoder.py; read x' sleep 0.1 screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x' diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 4dc7edd4..06217452 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -121,14 +121,6 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags -[Base64] -subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags - -[Binary] -subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags - [Bitcoin] subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags diff --git a/var/www/modules/base64Decoded/templates/base64_type.html b/var/www/modules/base64Decoded/templates/base64_type.html deleted file mode 100644 index 64c25b19..00000000 --- a/var/www/modules/base64Decoded/templates/base64_type.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - - - - Analysis Information Leak framework Dashboard - - - - - - - - - - - - - - - - - - - - {% include 'navbar.html' %} - -
    -
    -
    -

    Base64 Files

    -
    - - -
    -
    - -
    - - - - - - - - - diff --git a/var/www/modules/base64Decoded/templates/base64_types.html b/var/www/modules/base64Decoded/templates/base64_types.html deleted file mode 100644 index addb0c37..00000000 --- a/var/www/modules/base64Decoded/templates/base64_types.html +++ /dev/null @@ -1,229 +0,0 @@ - - - - - - - - Analysis Information Leak framework Dashboard - - - - - - - - - - - - - - - - - - - -
    -
    -
    -

    Base64 Files

    -
    - - -
    -
    - -
    - - - - - - - - - diff --git a/var/www/modules/base64Decoded/templates/header_base64Decoded.html b/var/www/modules/base64Decoded/templates/header_base64Decoded.html deleted file mode 100644 index aa76406c..00000000 --- a/var/www/modules/base64Decoded/templates/header_base64Decoded.html +++ /dev/null @@ -1 +0,0 @@ -
  • base64Decoded
  • diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py similarity index 86% rename from var/www/modules/base64Decoded/Flask_base64Decoded.py rename to var/www/modules/hashDecoded/Flask_hashDecoded.py index 0aef886e..16534e79 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -25,7 +25,7 @@ r_serv_metadata = Flask_config.r_serv_metadata vt_enabled = Flask_config.vt_enabled vt_auth = Flask_config.vt_auth -base64Decoded = Blueprint('base64Decoded', __name__, template_folder='templates') +hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates') # ============ FUNCTIONS ============ @@ -52,7 +52,7 @@ def substract_date(date_from, date_to): def list_sparkline_values(date_range_sparkline, hash): sparklines_value = [] for date_day in date_range_sparkline: - nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date_day, hash) if nb_seen_this_day is None: nb_seen_this_day = 0 sparklines_value.append(int(nb_seen_this_day)) @@ -94,16 +94,16 @@ def one(): return 1 # ============= ROUTES ============== -@base64Decoded.route("/base64Decoded/all_base64_search", methods=['POST']) -def all_base64_search(): +@hashDecoded.route("/hashDecoded/all_hash_search", methods=['POST']) +def all_hash_search(): date_from = request.form.get('date_from') date_to = request.form.get('date_to') type = request.form.get('type') print(type) - return redirect(url_for('base64Decoded.base64Decoded_page', date_from=date_from, date_to=date_to, type=type)) + return redirect(url_for('hashDecoded.hashDecoded_page', date_from=date_from, date_to=date_to, type=type)) -@base64Decoded.route("/base64Decoded/", methods=['GET']) -def base64Decoded_page(): +@hashDecoded.route("/hashDecoded/", methods=['GET']) +def hashDecoded_page(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') type = request.args.get('type') @@ -150,7 +150,7 @@ def base64Decoded_page(): l_64 = set() for date in date_range: - l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) if l_hash: for hash in l_hash: l_64.add(hash) @@ -198,34 +198,34 @@ def base64Decoded_page(): l_type = r_serv_metadata.smembers('hash_all_type') - return render_template("base64Decoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date, + return render_template("hashDecoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date, date_from=date_from, date_to=date_to) -@base64Decoded.route('/base64Decoded/hash_by_type') +@hashDecoded.route('/hashDecoded/hash_by_type') def hash_by_type(): type = request.args.get('type') type = 'text/plain' - return render_template('base64_type.html',type = type) + return render_template('hash_type.html',type = type) -@base64Decoded.route('/base64Decoded/base64_hash') -def base64_hash(): +@hashDecoded.route('/hashDecoded/hash_hash') +def hash_hash(): hash = request.args.get('hash') - return render_template('base64_hash.html') + return render_template('hash_hash.html') -@base64Decoded.route('/base64Decoded/showHash') +@hashDecoded.route('/hashDecoded/showHash') def showHash(): hash = request.args.get('hash') #hash = 'e02055d3efaad5d656345f6a8b1b6be4fe8cb5ea' # TODO FIXME show error if hash is None: - return base64Decoded_page() + return hashDecoded_page() estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') # hash not found # TODO FIXME show error if estimated_type is None: - return base64Decoded_page() + return hashDecoded_page() else: file_icon = get_file_icon(estimated_type) @@ -256,7 +256,7 @@ def showHash(): first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) -@app.route('/base64Decoded/downloadHash') +@app.route('/hashDecoded/downloadHash') def downloadHash(): hash = request.args.get('hash') # sanitize hash @@ -291,7 +291,7 @@ def downloadHash(): else: return 'hash: ' + hash + " don't exist" -@base64Decoded.route('/base64Decoded/hash_by_type_json') +@hashDecoded.route('/hashDecoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') @@ -305,7 +305,7 @@ def hash_by_type_json(): if type in r_serv_metadata.smembers('hash_all_type'): type_value = [] for date in date_range_sparkline: - num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+type, date) if num_day_type is None: num_day_type = 0 date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] @@ -315,12 +315,12 @@ def hash_by_type_json(): else: return jsonify() -@base64Decoded.route('/base64Decoded/daily_type_json') +@hashDecoded.route('/hashDecoded/daily_type_json') def daily_type_json(): date = request.args.get('date') daily_type = set() - l_b64 = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_b64 = r_serv_metadata.zrange('hash_date:' +date, 0, -1) for hash in l_b64: estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') if estimated_type is not None: @@ -328,12 +328,12 @@ def daily_type_json(): type_value = [] for day_type in daily_type: - num_day_type = r_serv_metadata.zscore('base64_type:'+day_type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+day_type, date) type_value.append({ 'date' : day_type, 'value' : int( num_day_type )}) return jsonify(type_value) -@base64Decoded.route('/base64Decoded/range_type_json') +@hashDecoded.route('/hashDecoded/range_type_json') def range_type_json(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') @@ -351,7 +351,7 @@ def range_type_json(): all_type = set() for date in date_range: - l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) if l_hash: for hash in l_hash: estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') @@ -362,7 +362,7 @@ def range_type_json(): day_type = {} day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8] for type in all_type: - num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+type, date) if num_day_type is None: num_day_type = 0 day_type[type]= num_day_type @@ -370,7 +370,7 @@ def range_type_json(): return jsonify(range_type) -@base64Decoded.route('/base64Decoded/hash_graph_line_json') +@hashDecoded.route('/hashDecoded/hash_graph_line_json') def hash_graph_line_json(): hash = request.args.get('hash') date_from = request.args.get('date_from') @@ -390,7 +390,7 @@ def hash_graph_line_json(): if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: json_seen_in_paste = [] for date in date_range_seen_in_pastes: - nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date, hash) + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date, hash) if nb_seen_this_day is None: nb_seen_this_day = 0 date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] @@ -401,7 +401,7 @@ def hash_graph_line_json(): return jsonify() -@base64Decoded.route('/base64Decoded/hash_graph_node_json') +@hashDecoded.route('/hashDecoded/hash_graph_node_json') def hash_graph_node_json(): hash = request.args.get('hash') @@ -422,16 +422,16 @@ def hash_graph_node_json(): nodes_set_hash.add((hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) #get related paste - l_pastes = r_serv_metadata.zrange('base64_hash:'+hash, 0, -1) + l_pastes = r_serv_metadata.zrange('nb_seen_hash:'+hash, 0, -1) for paste in l_pastes: url = paste - #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('base64_hash:'+hash, paste)) - nb_base64_in_paste = r_serv_metadata.scard('base64_paste:'+paste) + #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, paste)) + nb_hash_in_paste = r_serv_metadata.scard('hash_paste:'+paste) - nodes_set_paste.add((paste, 2,nb_base64_in_paste,url)) + nodes_set_paste.add((paste, 2,nb_hash_in_paste,url)) links_set.add((hash, paste)) - l_hash = r_serv_metadata.smembers('base64_paste:'+paste) + l_hash = r_serv_metadata.smembers('hash_paste:'+paste) for child_hash in l_hash: if child_hash != hash: url = child_hash @@ -444,12 +444,12 @@ def hash_graph_node_json(): nodes_set_hash.add((child_hash, 3, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) links_set.add((child_hash, paste)) - #l_pastes_child = r_serv_metadata.zrange('base64_hash:'+child_hash, 0, -1) + #l_pastes_child = r_serv_metadata.zrange('nb_seen_hash:'+child_hash, 0, -1) #for child_paste in l_pastes_child: nodes = [] for node in nodes_set_hash: - nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('base64Decoded.showHash', hash=node[7]), 'hash': True}) + nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('hashDecoded.showHash', hash=node[7]), 'hash': True}) for node in nodes_set_paste: nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('showsavedpastes.showsavedpaste', paste=node[3]), 'hash': False}) links = [] @@ -461,13 +461,13 @@ def hash_graph_node_json(): else: return jsonify({}) -@base64Decoded.route('/base64Decoded/base64_types') -def base64_types(): +@hashDecoded.route('/hashDecoded/hash_types') +def hash_types(): date_from = 20180701 date_to = 20180706 - return render_template('base64_types.html', date_from=date_from, date_to=date_to) + return render_template('hash_types.html', date_from=date_from, date_to=date_to) -@base64Decoded.route('/base64Decoded/send_file_to_vt_js') +@hashDecoded.route('/hashDecoded/send_file_to_vt_js') def send_file_to_vt_js(): hash = request.args.get('hash') @@ -490,7 +490,7 @@ def send_file_to_vt_js(): return jsonify({'vt_link': vt_link, 'vt_report': vt_report}) -@base64Decoded.route('/base64Decoded/update_vt_result') +@hashDecoded.route('/hashDecoded/update_vt_result') def update_vt_result(): hash = request.args.get('hash') @@ -525,4 +525,4 @@ def update_vt_result(): return jsonify() # ========= REGISTRATION ========= -app.register_blueprint(base64Decoded) +app.register_blueprint(hashDecoded) diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html similarity index 89% rename from var/www/modules/base64Decoded/templates/base64Decoded.html rename to var/www/modules/hashDecoded/templates/hashDecoded.html index bc71ed44..a82d90a5 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -59,7 +59,7 @@
    -

    Base64 Files

    +

    Hash Files

    @@ -76,7 +76,7 @@
    Select a date range : -
    +
    @@ -110,9 +110,9 @@ {% if l_64|length != 0 %} {% if date_from|string == date_to|string %} -

    {{ date_from }} Base64 files:

    +

    {{ date_from }} Hash files:

    {% else %} -

    {{ date_from }} to {{ date_to }} Base64 files:

    +

    {{ date_from }} to {{ date_to }} Hash files:

    {% endif %} @@ -131,7 +131,7 @@ {% for b64 in l_64 %} - + @@ -163,9 +163,9 @@
      {{ b64[1] }}{{ b64[2] }}{{ b64[2] }} {{ b64[5] }} {{ b64[6] }} {{ b64[3] }}
    {% else %} {% if date_from|string == date_to|string %} -

    {{ date_from }}, No base64

    +

    {{ date_from }}, No Hash

    {% else %} -

    {{ date_from }} to {{ date_to }}, No base64

    +

    {{ date_from }} to {{ date_to }}, No Hash

    {% endif %} {% endif %}
    @@ -176,7 +176,7 @@ + From 3479923f96490f84370eb09c1a916074a337d77d Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 23 Jul 2018 11:43:48 +0200 Subject: [PATCH 25/31] fix: [UI-Hash] pie decoder --- .../modules/hashDecoded/Flask_hashDecoded.py | 21 +++++++++++++++---- .../hashDecoded/templates/hashDecoded.html | 5 +++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py index 72d664d4..29f15275 100644 --- a/var/www/modules/hashDecoded/Flask_hashDecoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -350,10 +350,23 @@ def decoder_type_json(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') - type = request.args.get('type') - encoding = request.args.get('encoding') + typ = request.args.get('type') + + if typ == 'All types': + typ = None + + # verify file type input + if typ is not None: + #retrieve + char + typ = typ.replace(' ', '+') + if typ not in r_serv_metadata.smembers('hash_all_type'): + typ = None all_decoder = r_serv_metadata.smembers('all_decoder') + # sort DESC decoder for color + all_decoder = sorted(all_decoder, reverse=True) + print(all_decoder) + print(type(all_decoder)) date_range = [] if date_from is not None and date_to is not None: @@ -372,10 +385,10 @@ def decoder_type_json(): nb_decoded = {} for date in date_range: for decoder in all_decoder: - if type is None: + if typ is None: nb_decoded[decoder] = r_serv_metadata.get(decoder+'_decoded:'+date) else: - nb_decoded[decoder] = r_serv_metadata.hget(decoder+'_type:'+type, date) + nb_decoded[decoder] = r_serv_metadata.zscore(decoder+'_type:'+typ, date) if nb_decoded[decoder] is None: nb_decoded[decoder] = 0 diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html index 64c4b61c..6cffa9cc 100644 --- a/var/www/modules/hashDecoded/templates/hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -525,11 +525,12 @@ var arc_pie = d3.arc() .innerRadius(0) .outerRadius(radius_pie); -d3.json("/hashDecoded/decoder_type_json?date_from={{date_from}}&date_to={{date_to}}") +d3.json("/hashDecoded/decoder_type_json?date_from={{date_from}}&date_to={{date_to}}&type={{type}}") .then(function(data){ var pie_pie = d3.pie() - .value(function(d) { return d.value; }); + .value(function(d) { return d.value; }) + .sort(null); var path_pie = g_pie.selectAll('path') .data(pie_pie(data)) From 03a37af5cfe8e34c5cf43acc0288f08ed0cdf352 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Wed, 25 Jul 2018 10:47:36 +0200 Subject: [PATCH 26/31] chg: [Hash] add reprocess regex order --- bin/Decoder.py | 46 +++++++++++-------- .../modules/hashDecoded/Flask_hashDecoded.py | 4 +- .../hashDecoded/templates/hashDecoded.html | 11 +++-- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/bin/Decoder.py b/bin/Decoder.py index 7c9abb78..399cd4a8 100755 --- a/bin/Decoder.py +++ b/bin/Decoder.py @@ -85,29 +85,33 @@ def save_hash(decoder_name, message, date, decoded): else: serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) - # first time we see this file encoding on this paste - if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: - print('first '+decoder_name) + # first time we see this hash (all encoding) on this paste + if serv_metadata.zscore('nb_seen_hash:'+hash, message) is None: serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) - serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) - serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map - serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map - serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map - serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1)# hash - paste map - # create hash metadata serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) serv_metadata.sadd('hash_all_type', type) + + # first time we see this hash encoding on this paste + if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: + print('first '+decoder_name) + + serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + + # create hash metadata serv_metadata.sadd('hash_'+ decoder_name +'_all_type', type) - serv_metadata.zincrby('hash_type:'+type, date_key, 1) - serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) save_hash_on_disk(decoded, type, hash, json_data) print('found {} '.format(type)) - # duplicate - else: - serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste + + serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) + + serv_metadata.zincrby('hash_type:'+type, date_key, 1) + serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) + + serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map + serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste def save_hash_on_disk(decode, type, hash, json_data): @@ -182,12 +186,14 @@ if __name__ == '__main__': binary_max_execution_time = p.config.getint("Binary", "max_execution_time") base64_max_execution_time = p.config.getint("Base64", "max_execution_time") - # list all decoder yith regex, the order is use to search content by order - all_decoder = [ {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time}, - {'name': 'hexadecimal', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time}, - {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time}] + # list all decoder yith regex, + decoder_binary = {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time} + decoder_hexadecimal = {'name': 'hexadecimal', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time} + decoder_base64 = {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time} - for decoder in all_decoder: + decoder_order = [ decoder_base64, decoder_binary, decoder_hexadecimal, decoder_base64] + + for decoder in decoder_order: serv_metadata.sadd('all_decoder', decoder['name']) # Endless loop getting messages from the input queue @@ -207,7 +213,7 @@ if __name__ == '__main__': content = paste.get_p_content() date = str(paste._get_p_date()) - for decoder in all_decoder: # add threshold and size limit + for decoder in decoder_order: # add threshold and size limit # max execution time on regex signal.alarm(decoder['max_execution_time']) diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py index 29f15275..174b5e10 100644 --- a/var/www/modules/hashDecoded/Flask_hashDecoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -364,9 +364,7 @@ def decoder_type_json(): all_decoder = r_serv_metadata.smembers('all_decoder') # sort DESC decoder for color - all_decoder = sorted(all_decoder, reverse=True) - print(all_decoder) - print(type(all_decoder)) + all_decoder = sorted(all_decoder) date_range = [] if date_from is not None and date_to is not None: diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html index 6cffa9cc..84927981 100644 --- a/var/www/modules/hashDecoded/templates/hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -47,6 +47,9 @@ .bar_stack:hover{ cursor: pointer; } + .pie_path:hover{ + cursor: pointer; + } .svgText { pointer-events: none; } @@ -318,7 +321,7 @@ function sparklines(id, points) { @@ -301,7 +301,11 @@ var width = 400, var link; -var transform = d3.zoomIdentity; +var zoom = d3.zoom() + .scaleExtent([.2, 10]) + .on("zoom", zoomed); + +//var transform = d3.zoomIdentity; var color = d3.scaleOrdinal(d3.schemeCategory10); @@ -322,9 +326,9 @@ var svg_node = d3.select("#graph").append("svg") .call(d3.zoom().scaleExtent([1, 8]).on("zoom", zoomed)) .on("dblclick.zoom", null) -var container_graph = svg_node.append("g") - .attr("transform", "translate(40,0)") - .attr("transform", "scale(2)"); +var container_graph = svg_node.append("g"); + //.attr("transform", "translate(40,0)") + //.attr("transform", "scale(2)"); function create_graph(url){ @@ -372,6 +376,9 @@ d3.json(url) return d.icon } }); + zoom.translateTo(svg_node, 200, 200); + zoom.scaleTo(svg_node, 2); + /* node.append("text") .attr("dy", 3) .attr("dx", 7) @@ -403,7 +410,7 @@ d3.json(url) } function zoomed() { -container_graph.attr("transform", d3.event.transform); + container_graph.attr("transform", d3.event.transform); } function doubleclick (d) { From deaebaa4fd1125f02c7ec2c7218d90b05aca0c65 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 27 Jul 2018 13:51:29 +0200 Subject: [PATCH 28/31] fix: [UI-Hash] pie chart json --- var/www/modules/hashDecoded/Flask_hashDecoded.py | 14 ++++++++++---- var/www/static/css/tags.css | 1 - 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py index 174b5e10..79d5ba2a 100644 --- a/var/www/modules/hashDecoded/Flask_hashDecoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -380,15 +380,21 @@ def decoder_type_json(): if not date_range: date_range.append(datetime.date.today().strftime("%Y%m%d")) + print(date_range) + nb_decoded = {} + for decoder in all_decoder: + nb_decoded[decoder] = 0 + for date in date_range: for decoder in all_decoder: if typ is None: - nb_decoded[decoder] = r_serv_metadata.get(decoder+'_decoded:'+date) + nb_decod = r_serv_metadata.get(decoder+'_decoded:'+date) else: - nb_decoded[decoder] = r_serv_metadata.zscore(decoder+'_type:'+typ, date) - if nb_decoded[decoder] is None: - nb_decoded[decoder] = 0 + nb_decod = r_serv_metadata.zscore(decoder+'_type:'+typ, date) + + if nb_decod is not None: + nb_decoded[decoder] = nb_decoded[decoder] + int(nb_decod) to_json = [] for decoder in all_decoder: diff --git a/var/www/static/css/tags.css b/var/www/static/css/tags.css index baa0f673..779ce4ff 100644 --- a/var/www/static/css/tags.css +++ b/var/www/static/css/tags.css @@ -47,7 +47,6 @@ -webkit-transition: none; outline: none; display: block; - padding: 4px 6px; line-height: normal; overflow: hidden; height: auto; From e6b583ee7ae51ba9005162d447d9a379c8bfd732 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 24 Aug 2018 11:40:09 +0200 Subject: [PATCH 29/31] chg: [Hash] add Help on graph node --- .../modules/hashDecoded/Flask_hashDecoded.py | 8 +++--- .../hashDecoded/templates/showHash.html | 25 +++++++++++++++++++ 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py index 79d5ba2a..a40a5a00 100644 --- a/var/www/modules/hashDecoded/Flask_hashDecoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -62,7 +62,7 @@ def get_file_icon(estimated_type): file_type = estimated_type.split('/')[0] # set file icon if file_type == 'application': - file_icon = 'fa-file-o ' + file_icon = 'fa-file ' elif file_type == 'audio': file_icon = 'fa-file-video-o ' elif file_type == 'image': @@ -70,7 +70,7 @@ def get_file_icon(estimated_type): elif file_type == 'text': file_icon = 'fa-file-text-o' else: - file_icon = 'fa-file' + file_icon = 'fa-file-o' return file_icon @@ -380,8 +380,6 @@ def decoder_type_json(): if not date_range: date_range.append(datetime.date.today().strftime("%Y%m%d")) - print(date_range) - nb_decoded = {} for decoder in all_decoder: nb_decoded[decoder] = 0 @@ -583,7 +581,7 @@ def send_file_to_vt_js(): files = {'file': (hash, b64_content)} response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params= {'apikey': vt_auth}) json_response = response.json() - print(json_response) + #print(json_response) vt_link = json_response['permalink'].split('analysis')[0] + 'analysis/' r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_link) diff --git a/var/www/modules/hashDecoded/templates/showHash.html b/var/www/modules/hashDecoded/templates/showHash.html index ae2ddb6f..3a5b0443 100644 --- a/var/www/modules/hashDecoded/templates/showHash.html +++ b/var/www/modules/hashDecoded/templates/showHash.html @@ -194,6 +194,31 @@
    + + +
      +
    • +
    • +

      Double click on a node to open Hash/Paste

      + + Current Hash
      + + Hashs
      + + Pastes +

      +
    • +
    • + Hash Types: +
    • +
    • + Application
      + Audio
      + Image
      + Text
      + Other +
    • +
    From ab452841b1d9054225b683639f66738496c32cc6 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 24 Aug 2018 11:51:16 +0200 Subject: [PATCH 30/31] fix: [HashDecoded] change template title --- var/www/modules/hashDecoded/templates/hashDecoded.html | 2 +- var/www/modules/hashDecoded/templates/showHash.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html index 84927981..708f6117 100644 --- a/var/www/modules/hashDecoded/templates/hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -5,7 +5,7 @@ - Analysis Information Leak framework Dashboard + HashDecoded - AIL diff --git a/var/www/modules/hashDecoded/templates/showHash.html b/var/www/modules/hashDecoded/templates/showHash.html index 3a5b0443..b6799a5a 100644 --- a/var/www/modules/hashDecoded/templates/showHash.html +++ b/var/www/modules/hashDecoded/templates/showHash.html @@ -5,7 +5,7 @@ - Analysis Information Leak framework Dashboard + Hash Information - AIL From 6c981ea5e9482cf2395c33dd4e6628423ad5297f Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 24 Aug 2018 11:58:54 +0200 Subject: [PATCH 31/31] fix: [HashDecoded] typo --- .../modules/hashDecoded/templates/hashDecoded.html | 12 ++++++------ .../hashDecoded/templates/header_hashDecoded.html | 2 +- var/www/modules/hashDecoded/templates/showHash.html | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html index 708f6117..f65f647b 100644 --- a/var/www/modules/hashDecoded/templates/hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -5,7 +5,7 @@ - HashDecoded - AIL + HashesDecoded - AIL @@ -73,7 +73,7 @@
    -

    Hash Files

    +

    Hashed Files

    @@ -137,9 +137,9 @@ {% if l_64|length != 0 %} {% if date_from|string == date_to|string %} -

    {{ date_from }} Hash files:

    +

    {{ date_from }} Hashed files:

    {% else %} -

    {{ date_from }} to {{ date_to }} Hash files:

    +

    {{ date_from }} to {{ date_to }} Hashed files:

    {% endif %} @@ -190,9 +190,9 @@
    {% else %} {% if date_from|string == date_to|string %} -

    {{ date_from }}, No Hash

    +

    {{ date_from }}, No Hashes

    {% else %} -

    {{ date_from }} to {{ date_to }}, No Hash

    +

    {{ date_from }} to {{ date_to }}, No Hashes

    {% endif %} {% endif %}
    diff --git a/var/www/modules/hashDecoded/templates/header_hashDecoded.html b/var/www/modules/hashDecoded/templates/header_hashDecoded.html index ea3c4e27..69fb9da9 100644 --- a/var/www/modules/hashDecoded/templates/header_hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/header_hashDecoded.html @@ -1 +1 @@ -
  • hashDecoded
  • +
  • hashesDecoded
  • diff --git a/var/www/modules/hashDecoded/templates/showHash.html b/var/www/modules/hashDecoded/templates/showHash.html index b6799a5a..458c4c92 100644 --- a/var/www/modules/hashDecoded/templates/showHash.html +++ b/var/www/modules/hashDecoded/templates/showHash.html @@ -152,7 +152,7 @@ {% endif %} -
    @@ -203,7 +203,7 @@ Current Hash
    - Hashs
    + Hashes
    Pastes