From 16c700ddb4b8b4f83b6da0786c83de95f1b0526c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 29 Jun 2018 10:02:29 +0200 Subject: [PATCH] new: add base64 display on paste + base64 statistics --- OVERVIEW.md | 31 +- bin/ApiKey.py | 3 +- bin/Base64.py | 64 +++- bin/Helper.py | 13 + bin/MISP_The_Hive_feeder.py | 2 + bin/Release.py | 5 +- bin/SentimentAnalysis.py | 1 + bin/Tags.py | 9 + bin/Tokenize.py | 5 +- bin/packages/Paste.py | 2 +- bin/submit_paste.py | 11 + var/www/modules/Flask_config.py | 15 + .../base64Decoded/Flask_base64Decoded.py | 248 ++++++++++++++ .../templates/base64Decoded.html | 306 ++++++++++++++++++ .../base64Decoded/templates/base64_type.html | 179 ++++++++++ .../templates/header_base64Decoded.html | 1 + var/www/modules/showpaste/Flask_showpaste.py | 72 ++++- .../showpaste/templates/show_saved_paste.html | 68 ++++ 18 files changed, 1008 insertions(+), 27 deletions(-) create mode 100644 var/www/modules/base64Decoded/Flask_base64Decoded.py create mode 100644 var/www/modules/base64Decoded/templates/base64Decoded.html create mode 100644 var/www/modules/base64Decoded/templates/base64_type.html create mode 100644 var/www/modules/base64Decoded/templates/header_base64Decoded.html diff --git a/OVERVIEW.md b/OVERVIEW.md index 72c8e236..b642431b 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -1,7 +1,7 @@ Overview ======== -Redis and LevelDB overview +Redis and ARDB overview -------------------------- * Redis on TCP port 6379 @@ -20,3 +20,32 @@ Redis and LevelDB overview - DB 0 - Lines duplicate - DB 1 - Hashs + +ARDB overview +--------------------------- + +* DB 7 - Metadata: + ----------------------------------------- BASE64 ---------------------------------------- + + HSET - 'metadata_hash:'+hash 'saved_path' saved_path + 'size' size + 'first_seen' first_seen + 'last_seen' last_seen + 'estimated_type' estimated_type + 'vt_link' vt_link + 'vt_report' vt_report + 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + + SET - 'base64_paste:'+paste hash * + + ZADD - 'base64_date:'+20180622 hash * nb_seen_this_day + + ZADD - 'base64_hash'+hash paste * nb_seen_in_paste + + SET - 'hash_all_type' hash_type + + ZADD - 'base64_type:'+type date nb_seen + + + + diff --git a/bin/ApiKey.py b/bin/ApiKey.py index e7ded9b2..faf4b2d9 100755 --- a/bin/ApiKey.py +++ b/bin/ApiKey.py @@ -86,8 +86,7 @@ if __name__ == "__main__": if message is not None: - search_api_key(message) - + search_api_key(message) else: publisher.debug("Script ApiKey is Idling 10s") diff --git a/bin/Base64.py b/bin/Base64.py index 960ca6de..767f36f9 100755 --- a/bin/Base64.py +++ b/bin/Base64.py @@ -8,6 +8,7 @@ import time import os import datetime +import redis from pubsublogger import publisher @@ -31,7 +32,7 @@ def timeout_handler(signum, frame): signal.signal(signal.SIGALRM, timeout_handler) -def search_base64(content, message): +def search_base64(content, message, date): find = False base64_list = re.findall(regex_base64, content) if(len(base64_list) > 0): @@ -46,6 +47,8 @@ def search_base64(content, message): find = True hash = sha1(decode).hexdigest() + print(message) + print(hash) data = {} data['name'] = hash @@ -54,8 +57,36 @@ def search_base64(content, message): data['estimated type'] = type json_data = json.dumps(data) - save_base64_as_file(decode, type, hash, json_data) - print('found {} '.format(type)) + date_paste = '{}/{}/{}'.format(date[0:4], date[4:6], date[6:8]) + date_key = date[0:4] + date[4:6] + date[6:8] + + serv_metadata.zincrby('base64_date:'+date_key, hash, 1) + + # first time we see this hash + if not serv_metadata.hexists('metadata_hash:'+hash, 'estimated_type'): + serv_metadata.hset('metadata_hash:'+hash, 'first_seen', date_paste) + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + else: + serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) + + # first time we see this file on this paste + if serv_metadata.zscore('base64_hash:'+hash, message) is None: + print('first') + serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + + serv_metadata.sadd('base64_paste:'+message, hash) # paste - hash map + serv_metadata.zincrby('base64_hash:'+hash, message, 1)# hash - paste map + + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) + serv_metadata.sadd('hash_all_type', type) + serv_metadata.zincrby('base64_type:'+type, date_key, 1) + + save_base64_as_file(decode, type, hash, json_data, id) + print('found {} '.format(type)) + # duplicate + else: + serv_metadata.zincrby('base64_hash:'+hash, message, 1) # number of b64 on this paste if(find): publisher.warning('base64 decoded') @@ -68,10 +99,10 @@ def search_base64(content, message): msg = 'infoleak:automatic-detection="base64";{}'.format(message) p.populate_set_out(msg, 'Tags') -def save_base64_as_file(decode, type, hash, json_data): +def save_base64_as_file(decode, type, hash, json_data, id): - filename_b64 = os.path.join(os.environ['AIL_HOME'], - p.config.get("Directories", "base64"), type, hash[:2], hash) + local_filename_b64 = os.path.join(p.config.get("Directories", "base64"), type, hash[:2], hash) + filename_b64 = os.path.join(os.environ['AIL_HOME'], local_filename_b64) filename_json = os.path.join(os.environ['AIL_HOME'], p.config.get("Directories", "base64"), type, hash[:2], hash + '.json') @@ -83,6 +114,10 @@ def save_base64_as_file(decode, type, hash, json_data): with open(filename_b64, 'wb') as f: f.write(decode) + # create hash metadata + serv_metadata.hset('metadata_hash:'+hash, 'saved_path', local_filename_b64) + serv_metadata.hset('metadata_hash:'+hash, 'size', os.path.getsize(filename_b64)) + with open(filename_json, 'w') as f: f.write(json_data) @@ -103,6 +138,12 @@ if __name__ == '__main__': p = Process(config_section) max_execution_time = p.config.getint("Base64", "max_execution_time") + serv_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.getint("ARDB_Metadata", "port"), + db=p.config.getint("ARDB_Metadata", "db"), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Base64 started") @@ -127,13 +168,12 @@ if __name__ == '__main__': # Do something with the message from the queue #print(filename) content = paste.get_p_content() - search_base64(content,message) - - # (Optional) Send that thing to the next queue - #p.populate_set_out(something_has_been_done) + date = str(paste._get_p_date()) + search_base64(content,message, date) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/Helper.py b/bin/Helper.py index 289dd956..a6ca9b49 100755 --- a/bin/Helper.py +++ b/bin/Helper.py @@ -135,6 +135,12 @@ class Process(object): db=self.config.get('RedisPubSub', 'db'), decode_responses=True) + self.serv_statistics = redis.StrictRedis( + host=self.config.get('ARDB_Statistics', 'host'), + port=self.config.get('ARDB_Statistics', 'port'), + db=self.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + self.moduleNum = os.getpid() def populate_set_in(self): @@ -181,6 +187,9 @@ class Process(object): self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum), value) self.r_temp.set("MODULE_"+self.subscriber_name + "_" + str(self.moduleNum) + "_PATH", complete_path) self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) + + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) return message #except: @@ -217,3 +226,7 @@ class Process(object): time.sleep(1) continue self.pubsub.publish(message) + + def incr_module_timeout_statistic(self): + curr_date = datetime.date.today() + self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_timeout:'+self.subscriber_name, 1) diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index b11c44cb..299c995e 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -155,6 +155,8 @@ if __name__ == "__main__": flag_the_hive = False r_serv_db.set('ail:thehive', False) print('Not connected to The HIVE') + else: + HiveApi = False if HiveApi != False and flag_the_hive: try: diff --git a/bin/Release.py b/bin/Release.py index 6e7a8277..dbe57122 100755 --- a/bin/Release.py +++ b/bin/Release.py @@ -61,7 +61,8 @@ if __name__ == "__main__": publisher.info(to_print) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) diff --git a/bin/SentimentAnalysis.py b/bin/SentimentAnalysis.py index 34beea3f..8442befa 100755 --- a/bin/SentimentAnalysis.py +++ b/bin/SentimentAnalysis.py @@ -167,6 +167,7 @@ if __name__ == '__main__': try: Analyse(message, server) except TimeoutException: + p.incr_module_timeout_statistic() print ("{0} processing timeout".format(message)) continue else: diff --git a/bin/Tags.py b/bin/Tags.py index 15f8f837..0a178fef 100755 --- a/bin/Tags.py +++ b/bin/Tags.py @@ -11,6 +11,7 @@ This module create tags. import redis import time +import datetime from pubsublogger import publisher from Helper import Process @@ -41,6 +42,12 @@ if __name__ == '__main__': db=p.config.get("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=p.config.get('ARDB_Statistics', 'host'), + port=p.config.get('ARDB_Statistics', 'port'), + db=p.config.get('ARDB_Statistics', 'db'), + decode_responses=True) + # Sent to the logging a description of the module publisher.info("Tags module started") @@ -67,4 +74,6 @@ if __name__ == '__main__': print(" tagged: {}".format(tag)) server_metadata.sadd('tag:'+path, tag) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_tagged:'+tag, 1) p.populate_set_out(message, 'MISP_The_Hive_feeder') diff --git a/bin/Tokenize.py b/bin/Tokenize.py index fdefeb6a..698b4fbc 100755 --- a/bin/Tokenize.py +++ b/bin/Tokenize.py @@ -60,8 +60,9 @@ if __name__ == "__main__": msg = '{} {} {}'.format(paste.p_path, word, score) p.populate_set_out(msg) except TimeoutException: - print ("{0} processing timeout".format(paste.p_path)) - continue + p.incr_module_timeout_statistic() + print ("{0} processing timeout".format(paste.p_path)) + continue else: signal.alarm(0) else: diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py index d1e3f0d3..6942cb31 100755 --- a/bin/packages/Paste.py +++ b/bin/packages/Paste.py @@ -52,7 +52,7 @@ class Paste(object): :Example: - PST = Paste("/home/2013/ZEeGaez5.gz") + PST = Paste("/home/2013/01/12/ZEeGaez5.gz") """ diff --git a/bin/submit_paste.py b/bin/submit_paste.py index 49c8e1f0..db92d1db 100755 --- a/bin/submit_paste.py +++ b/bin/submit_paste.py @@ -48,6 +48,9 @@ def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name): print(' {} send to Global'.format(save_path)) r_serv_log_submit.sadd(uuid + ':paste_submit_link', full_path) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_paste', 1) + return 0 def addError(uuid, errorMessage): @@ -60,6 +63,8 @@ def addError(uuid, errorMessage): def abord_file_submission(uuid, errorMessage): addError(uuid, errorMessage) r_serv_log_submit.set(uuid + ':end', 1) + curr_date = datetime.date.today() + serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'submit_abord', 1) remove_submit_uuid(uuid) @@ -154,6 +159,12 @@ if __name__ == "__main__": db=cfg.getint("ARDB_Metadata", "db"), decode_responses=True) + serv_statistics = redis.StrictRedis( + host=cfg.get('ARDB_Statistics', 'host'), + port=cfg.getint('ARDB_Statistics', 'port'), + db=cfg.getint('ARDB_Statistics', 'db'), + decode_responses=True) + expire_time = 120 MAX_FILE_SIZE = 1000000000 ALLOWED_EXTENSIONS = ['txt', 'sh', 'pdf'] diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py index 80ef9f18..43c65060 100644 --- a/var/www/modules/Flask_config.py +++ b/var/www/modules/Flask_config.py @@ -143,3 +143,18 @@ DiffMaxLineLength = int(cfg.get("Flask", "DiffMaxLineLength"))#Use to display t bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info'] UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted') + + # VT +try: + from virusTotalKEYS import vt_key + if vt_key != '': + vt_auth = vt_key + vt_enabled = True + print('VT submission is enabled') + else: + vt_enabled = False + print('VT submission is disabled') +except: + vt_auth = {'apikey': cfg.get("Flask", "max_preview_char")} + vt_enabled = False + print('VT submission is disabled') diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/base64Decoded/Flask_base64Decoded.py new file mode 100644 index 00000000..de91816c --- /dev/null +++ b/var/www/modules/base64Decoded/Flask_base64Decoded.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +''' + Flask functions and routes for the trending modules page +''' +import redis +import os +import datetime +import json +from Date import Date +import requests +from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for + +# ============ VARIABLES ============ +import Flask_config + +app = Flask_config.app +cfg = Flask_config.cfg +r_serv_metadata = Flask_config.r_serv_metadata +vt_enabled = Flask_config.vt_enabled +vt_auth = Flask_config.vt_auth + +base64Decoded = Blueprint('base64Decoded', __name__, template_folder='templates') + +# ============ FUNCTIONS ============ + +def get_date_range(num_day): + curr_date = datetime.date.today() + date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2)) + date_list = [] + + for i in range(0, num_day+1): + date_list.append(date.substract_day(i)) + + return list(reversed(date_list)) + +def substract_date(date_from, date_to): + date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) + date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) + delta = date_to - date_from # timedelta + l_date = [] + for i in range(delta.days + 1): + date = date_from + datetime.timedelta(i) + l_date.append( date.strftime('%Y%m%d') ) + return l_date + +def one(): + return 1 + +# ============= ROUTES ============== + +@base64Decoded.route("/base64Decoded/", methods=['GET']) +def base64Decoded_page(): + date_from = request.args.get('date_from') + date_to = request.args.get('date_to') + type = request.args.get('type') + + #date_from = '20180628' + #date_to = '20180628' + + if type not in r_serv_metadata.smembers('hash_all_type'): + type = None + + date_range = [] + if date_from is not None and date_to is not None: + #change format + if len(date_from) != 8: + date_from = date_from[0:4] + date_from[5:7] + date_from[8:10] + date_to = date_to[0:4] + date_to[5:7] + date_to[8:10] + date_range = substract_date(date_from, date_to) + + if not date_range: + date_range.append(datetime.date.today().strftime("%Y%m%d")) + + # display day type bar chart + if len(date_range) == 1 and type is None: + daily_type_chart = True + daily_date = date_range[0] + else: + daily_type_chart = False + daily_date = None + + l_64 = set() + for date in date_range: + l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + if l_hash: + for hash in l_hash: + l_64.add(hash) + + num_day_sparkline = 6 + date_range_sparkline = get_date_range(num_day_sparkline) + + b64_metadata = [] + l_64 = list(l_64) + for hash in l_64: + # select requested base 64 type + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if type is not None: + if estimated_type is not None: + if estimated_type != type: + continue + + first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen') + last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen') + nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes') + size = r_serv_metadata.hget('metadata_hash:'+hash, 'size') + + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + + if hash is not None and first_seen is not None and \ + last_seen is not None and \ + nb_seen_in_paste is not None and \ + size is not None: + + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + else: + b64_vt = False + b64_vt_link = '' + + sparklines_value = [] + for date_day in date_range_sparkline: + nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) + if nb_seen_this_day is None: + nb_seen_this_day = 0 + sparklines_value.append(int(nb_seen_this_day)) + + b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, sparklines_value) ) + + return render_template("base64Decoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date) + +@base64Decoded.route('/base64Decoded/hash_by_type') +def hash_by_type(): + type = request.args.get('type') + type = 'text/plain' + return render_template('base64_type.html',type = type) + +@base64Decoded.route('/base64Decoded/hash_by_type_json') +def hash_by_type_json(): + type = request.args.get('type') + + num_day_type = 30 + date_range_sparkline = get_date_range(num_day_type) + + #verify input + if type in r_serv_metadata.smembers('hash_all_type'): + type_value = [] + for date in date_range_sparkline: + num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + if num_day_type is None: + num_day_type = 0 + date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] + type_value.append({ 'date' : date, 'value' : int( num_day_type )}) + + return jsonify(type_value) + else: + return jsonify() + +@base64Decoded.route('/base64Decoded/daily_type_json') +def daily_type_json(): + date = request.args.get('date') + + daily_type = set() + l_b64 = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + for hash in l_b64: + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + if estimated_type is not None: + daily_type.add(estimated_type) + + type_value = [] + for day_type in daily_type: + num_day_type = r_serv_metadata.zscore('base64_type:'+day_type, date) + type_value.append({ 'date' : day_type, 'value' : int( num_day_type )}) + return jsonify(type_value) + +@base64Decoded.route('/base64Decoded/send_file_to_vt', methods=['POST']) +def send_file_to_vt(): + paste = request.form['paste'] + hash = request.form['hash'] + + b64_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params= {'apikey': vt_auth}) + json_response = response.json() + print(json_response) + + vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report', '') + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) + +@base64Decoded.route('/base64Decoded/update_vt_result') +def update_vt_result(): + hash = request.args.get('hash') + + params = {'apikey': vt_auth, 'resource': hash} + response = requests.get('https://www.virustotal.com/vtapi/v2/file/report',params=params) + if response.status_code == 200: + json_response = response.json() + response_code = json_response['response_code'] + # report exist + if response_code == 1: + total = json_response['total'] + positive = json_response['positives'] + + b64_vt_report = 'Detection {}/{}'.format(positive,total) + # no report found + elif response_code == 0: + b64_vt_report = 'No report found' + pass + # file in queue + elif response_code == -2: + b64_vt_report = 'File in queue' + pass + + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_report', b64_vt_report) + return jsonify(hash=hash, report_vt=b64_vt_report) + elif response.status_code == 403: + Flask_config.vt_enabled = False + print('VT is disabled') + return jsonify() + else: + # TODO FIXME make json response + return jsonify() + +# ========= REGISTRATION ========= +app.register_blueprint(base64Decoded) diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/base64Decoded/templates/base64Decoded.html new file mode 100644 index 00000000..66f7829a --- /dev/null +++ b/var/www/modules/base64Decoded/templates/base64Decoded.html @@ -0,0 +1,306 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + + {% include 'navbar.html' %} + +
+
+
+

Base64 Files

+
+ +
+
+ + + {% if type %} +
+
+ {% endif %} + {% if daily_type_chart %} +
+
+ {% endif %} + + + {% if l_64|length != 0 %} +

Today Base64 files:

+ + + + + + + + + + + + + + + {% for b64 in l_64 %} + + + + + + + + + + + {% endfor %} + +
estimated typehashfirst seenlast seennb pastesizeVirus TotalTest
  {{ b64[1] }}{{ b64[2] }}{{ b64[5] }}{{ b64[6] }}{{ b64[3] }}{{ b64[4] }} + {% if vt_enabled %} + {% if not b64[7] %} +
+ + +
+ {% else %} + VT scan reports + {% endif %} + + {% else %} + Virus Total submission is disabled + {% endif %} +
+
+ {% else %} +

{{daily_date[0:4]}}/{{daily_date[4:6]}}/{{daily_date[6:8]}}, No base64

+ {% endif %} + +
+ + + + + + + + + + + + + diff --git a/var/www/modules/base64Decoded/templates/base64_type.html b/var/www/modules/base64Decoded/templates/base64_type.html new file mode 100644 index 00000000..64c25b19 --- /dev/null +++ b/var/www/modules/base64Decoded/templates/base64_type.html @@ -0,0 +1,179 @@ + + + + + + + + Analysis Information Leak framework Dashboard + + + + + + + + + + + + + + + + + + + + {% include 'navbar.html' %} + +
+
+
+

Base64 Files

+
+ + +
+
+ +
+ + + + + + + + + diff --git a/var/www/modules/base64Decoded/templates/header_base64Decoded.html b/var/www/modules/base64Decoded/templates/header_base64Decoded.html new file mode 100644 index 00000000..86d529a2 --- /dev/null +++ b/var/www/modules/base64Decoded/templates/header_base64Decoded.html @@ -0,0 +1 @@ +
  • base64Decoded
  • diff --git a/var/www/modules/showpaste/Flask_showpaste.py b/var/www/modules/showpaste/Flask_showpaste.py index 13c2cc45..d7f5253f 100644 --- a/var/www/modules/showpaste/Flask_showpaste.py +++ b/var/www/modules/showpaste/Flask_showpaste.py @@ -6,12 +6,14 @@ ''' import redis import json +import os import flask -from flask import Flask, render_template, jsonify, request, Blueprint, make_response +from flask import Flask, render_template, jsonify, request, Blueprint, make_response, redirect, url_for import difflib import ssdeep import Paste +import requests # ============ VARIABLES ============ import Flask_config @@ -28,13 +30,15 @@ DiffMaxLineLength = Flask_config.DiffMaxLineLength bootstrap_label = Flask_config.bootstrap_label misp_event_url = Flask_config.misp_event_url hive_case_url = Flask_config.hive_case_url +vt_enabled = Flask_config.vt_enabled showsavedpastes = Blueprint('showsavedpastes', __name__, template_folder='templates') # ============ FUNCTIONS ============ -def showpaste(content_range): - requested_path = request.args.get('paste', '') +def showpaste(content_range, requested_path): + vt_enabled = Flask_config.vt_enabled + paste = Paste.Paste(requested_path) p_date = str(paste._get_p_date()) p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4] @@ -118,7 +122,6 @@ def showpaste(content_range): else: automatic = False - tag_hash = ssdeep.hash(tag) if r_serv_statistics.sismember('tp:'+tag, requested_path): tag_status_tp = True else: @@ -130,6 +133,37 @@ def showpaste(content_range): list_tags.append( (tag, automatic, tag_status_tp, tag_status_fp) ) + l_64 = [] + # load base64 files + if r_serv_metadata.scard('base64_paste:'+requested_path) > 0: + set_b64 = r_serv_metadata.smembers('base64_paste:'+requested_path) + for hash in set_b64: + nb_in_file = int(r_serv_metadata.zscore('base64_hash:'+hash, requested_path)) + estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') + file_type = estimated_type.split('/')[0] + # set file icon + if file_type == 'application': + file_icon = 'fa-file-o ' + elif file_type == 'audio': + file_icon = 'fa-file-video-o ' + elif file_type == 'image': + file_icon = 'fa-file-image-o' + elif file_type == 'text': + file_icon = 'fa-file-text-o' + else: + file_icon = 'fa-file' + saved_path = r_serv_metadata.hget('metadata_hash:'+hash, 'saved_path') + if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'): + b64_vt = True + b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link') + print(b64_vt_report) + else: + b64_vt = False + b64_vt_link = '' + b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report') + + l_64.append( (file_icon, estimated_type, hash, saved_path, nb_in_file, b64_vt, b64_vt_link, b64_vt_report) ) + if Flask_config.pymisp is False: misp = False else: @@ -157,13 +191,15 @@ def showpaste(content_range): hive_url = hive_case_url.replace('id_here', hive_case) return render_template("show_saved_paste.html", date=p_date, bootstrap_label=bootstrap_label, active_taxonomies=active_taxonomies, active_galaxies=active_galaxies, list_tags=list_tags, source=p_source, encoding=p_encoding, language=p_language, size=p_size, mime=p_mime, lineinfo=p_lineinfo, content=p_content, initsize=len(p_content), duplicate_list = p_duplicate_list, simil_list = p_simil_list, hashtype_list = p_hashtype_list, date_list=p_date_list, - misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) + l_64=l_64, vt_enabled=vt_enabled, misp=misp, hive=hive, misp_eventid=misp_eventid, misp_url=misp_url, hive_caseid=hive_caseid, hive_url=hive_url) # ============ ROUTES ============ @showsavedpastes.route("/showsavedpaste/") #completely shows the paste in a new tab def showsavedpaste(): - return showpaste(0) + requested_path = request.args.get('paste', '') + print(requested_path) + return showpaste(0, requested_path) @showsavedpastes.route("/showsavedrawpaste/") #shows raw def showsavedrawpaste(): @@ -175,7 +211,8 @@ def showsavedrawpaste(): @showsavedpastes.route("/showpreviewpaste/") def showpreviewpaste(): num = request.args.get('num', '') - return "|num|"+num+"|num|"+showpaste(max_preview_modal) + requested_path = request.args.get('paste', '') + return "|num|"+num+"|num|"+showpaste(max_preview_modal, requested_path) @showsavedpastes.route("/getmoredata/") @@ -202,5 +239,26 @@ def showDiff(): the_html = htmlD.make_file(lines1, lines2) return the_html +@showsavedpastes.route('/send_file_to_vt/', methods=['POST']) +def send_file_to_vt(): + b64_path = request.form['b64_path'] + paste = request.form['paste'] + hash = request.form['hash'] + + b64_full_path = os.path.join(os.environ['AIL_HOME'], b64_path) + b64_content = '' + with open(b64_full_path, 'rb') as f: + b64_content = f.read() + + files = {'file': (hash, b64_content)} + response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files=files, params=vt_auth) + json_response = response.json() + print(json_response) + + vt_b64_link = json_response['permalink'].split('analysis')[0] + 'analysis/' + r_serv_metadata.hset('metadata_hash:'+hash, 'vt_link', vt_b64_link) + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=paste)) + # ========= REGISTRATION ========= app.register_blueprint(showsavedpastes) diff --git a/var/www/modules/showpaste/templates/show_saved_paste.html b/var/www/modules/showpaste/templates/show_saved_paste.html index 1340f471..fec224d9 100644 --- a/var/www/modules/showpaste/templates/show_saved_paste.html +++ b/var/www/modules/showpaste/templates/show_saved_paste.html @@ -27,6 +27,11 @@ overflow-x: hidden; width:100%; } + + .red_table thead{ + background: #d91f2d; + color: #fff; + } @@ -372,6 +377,52 @@ {% endif %} + + {% if l_64|length != 0 %} +

    Base64 files:

    + + + + + + + + + + + {% for b64 in l_64 %} + + + + + + + {% endfor %} + +
    estimated typehashsaved_pathVirus Total
      {{ b64[1] }}{{ b64[2] }} ({{ b64[4] }}){{ b64[3] }} + {% if vt_enabled %} + {% if not b64[5] %} +
    + + + +
    + {% else %} + VT scan reports + {% endif %} + + + + {% else %} + Virus Total submission is disabled + {% endif %} +
    + {% endif %} +

    Content:

    [Raw content]

    {{ content }}

    @@ -406,9 +457,26 @@ }); $('#tableDup').DataTable(); + $('#tableb64').DataTable({ + "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]], + "iDisplayLength": 5, + "order": [[ 1, "asc" ]] + }); }); + +