From bd5f83f0ebd8d8c060446c019d35e0c6ef5e492c Mon Sep 17 00:00:00 2001 From: Terrtia Date: Fri, 20 Jul 2018 10:32:52 +0200 Subject: [PATCH] chg: refractor base64 encoded to hash --- OVERVIEW.md | 2 + bin/DbDump.py | 133 ++++++++++ bin/Decoder.py | 3 +- bin/Dox.py | 96 ++++++++ bin/LAUNCH.sh | 4 - bin/packages/modules.cfg | 8 - .../base64Decoded/templates/base64_type.html | 179 -------------- .../base64Decoded/templates/base64_types.html | 229 ------------------ .../templates/header_base64Decoded.html | 1 - .../Flask_hashDecoded.py} | 84 +++---- .../templates/hashDecoded.html} | 40 +-- .../templates/header_hashDecoded.html | 1 + .../templates/showHash.html | 10 +- var/www/modules/showpaste/Flask_showpaste.py | 8 +- .../showpaste/templates/show_saved_paste.html | 8 +- 15 files changed, 309 insertions(+), 497 deletions(-) create mode 100755 bin/DbDump.py create mode 100755 bin/Dox.py delete mode 100644 var/www/modules/base64Decoded/templates/base64_type.html delete mode 100644 var/www/modules/base64Decoded/templates/base64_types.html delete mode 100644 var/www/modules/base64Decoded/templates/header_base64Decoded.html rename var/www/modules/{base64Decoded/Flask_base64Decoded.py => hashDecoded/Flask_hashDecoded.py} (86%) rename var/www/modules/{base64Decoded/templates/base64Decoded.html => hashDecoded/templates/hashDecoded.html} (89%) create mode 100644 var/www/modules/hashDecoded/templates/header_hashDecoded.html rename var/www/modules/{base64Decoded => hashDecoded}/templates/showHash.html (97%) diff --git a/OVERVIEW.md b/OVERVIEW.md index 64f9179e..335e1d95 100644 --- a/OVERVIEW.md +++ b/OVERVIEW.md @@ -35,6 +35,8 @@ ARDB overview 'vt_link' vt_link 'vt_report' vt_report 'nb_seen_in_all_pastes' nb_seen_in_all_pastes + 'base64_decoder' nb_encoded + 'binary_decoder' nb_encoded SET - 'all_decoder' decoder* diff --git a/bin/DbDump.py b/bin/DbDump.py new file mode 100755 index 00000000..0eb46d16 --- /dev/null +++ b/bin/DbDump.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" + DbDump + + +""" + +import time + +from pubsublogger import publisher + +from Helper import Process +from packages import Paste + +def get_lines(content): + + is_db_leak = False + + list_lines = content.splitlines() + list_separators = [] + if len(list_lines) > 0: + for line in list_lines: + list_separators.append(search_separator(line)) + + threshold_num_separator_line = 0 + # Minimum number of separator per line + threshold_min_separator_line = 7 + same_separator = 0 + num_separator = 0 + current_separator = '' + + for separator in list_separators: + if separator != '': + #same separator on the next line + if separator[0] == current_separator: + if abs(separator[1] - num_separator) <= threshold_num_separator_line: + if num_separator > threshold_min_separator_line: + same_separator += 1 + else: + num_separator = separator[1] + same_separator = 0 + else: + # FIXME: enhancement ? + num_separator = separator[1] + + if(same_separator >= 5): + is_db_leak = True + #different operator + else: + #change the current separator + current_separator = separator[0] + same_separator = 0 + num_separator = 0 + + return is_db_leak + + +def search_separator(line): + list_separator = [] + #count separators + #list_separator.append( (';', line.count(';')) ) + #list_separator.append( (',', line.count(',')) ) + list_separator.append( (';', line.count(';')) ) + list_separator.append( ('|', line.count('|')) ) + #list_separator.append( (':', line.count(':')) ) + + separator = '' + separator_number = 0 + + # line separator + for potential_separator in list_separator: + if potential_separator[1] > separator_number: + separator = potential_separator[0] + separator_number = potential_separator[1] + + return (separator, separator_number) + + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'DbDump' + + # Setup the I/O queues + p = Process(config_section) + + # Sent to the logging a description of the module + publisher.info("DbDump started") + + + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + if message is None: + + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(1) + continue + + filename = message + paste = Paste.Paste(filename) + + # Do something with the message from the queue + print(filename) + content = paste.get_p_content() + is_db_leak = get_lines(content) + + if is_db_leak: + + to_print = 'DbDump;{};{};{};'.format( + paste.p_source, paste.p_date, paste.p_name) + + print('found DbDump') + print(to_print) + publisher.warning('{}Checked found Database Dump;{}'.format( + to_print, paste.p_path)) + + msg = 'dbdump;{}'.format(filename) + p.populate_set_out(msg, 'alertHandler') + + msg = 'dbdump;{}'.format(filename) + p.populate_set_out(msg, 'Tags') + + #Send to duplicate + p.populate_set_out(filename, 'Duplicate') diff --git a/bin/Decoder.py b/bin/Decoder.py index 38975e67..cd37b9bf 100755 --- a/bin/Decoder.py +++ b/bin/Decoder.py @@ -59,6 +59,7 @@ def decode_string(content, message, date, encoded_list, decoder_name, encoded_mi # # TODO: FIXME check db def save_hash(decoder_name, message, date, decoded): + print(decoder_name) type = magic.from_buffer(decoded, mime=True) print(type) hash = sha1(decoded).hexdigest() @@ -88,6 +89,7 @@ def save_hash(decoder_name, message, date, decoded): if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: print('first '+decoder_name) serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) + serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map @@ -206,7 +208,6 @@ if __name__ == '__main__': date = str(paste._get_p_date()) for decoder in all_decoder: # add threshold and size limit - print(decoder['name']) # max execution time on regex signal.alarm(decoder['max_execution_time']) diff --git a/bin/Dox.py b/bin/Dox.py new file mode 100755 index 00000000..cb762867 --- /dev/null +++ b/bin/Dox.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The Dox Module +====================== + +This module is consuming the Redis-list created by the Categ module. + +""" + + +import pprint +import time +from packages import Paste +from packages import lib_refine +from pubsublogger import publisher +import re +import sys + +from Helper import Process + +if __name__ == "__main__": + publisher.port = 6380 + publisher.channel = "Script" + + config_section = 'Dox' + + p = Process(config_section) + + # FUNCTIONS # + publisher.info("Dox module") + + channel = 'dox_categ' + + regex = re.compile('name|age', re.IGNORECASE) + + while True: + message = p.get_from_set() + + + if message is not None: + filepath, count = message.split(' ') + filename, score = message.split() + paste = Paste.Paste(filename) + content = paste.get_p_content() + + count = 0 + + tmp = paste._get_word('name') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('Name') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('NAME') + if (len(tmp) > 0): + print(tmp) + count += tmp[1] + tmp = paste._get_word('age') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('Age') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('AGE') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('address') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('Address') + if (len(tmp) > 0): + count += tmp[1] + tmp = paste._get_word('ADDRESS') + if (len(tmp) > 0): + count += tmp[1] + + #dox_list = re.findall(regex, content) + if(count > 0): + + #Send to duplicate + p.populate_set_out(filepath, 'Duplicate') + #Send to alertHandler + msg = 'dox;{}'.format(filepath) + p.populate_set_out(msg, 'alertHandler') + + print(filename) + print(content) + print('--------------------------------------------------------------------------------------') + + else: + publisher.debug("Script creditcard is idling 1m") + time.sleep(10) diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 64b4d552..5621287a 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -142,10 +142,6 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Keys" bash -c './Keys.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x' - sleep 0.1 - screen -S "Script_AIL" -X screen -t "Binary" bash -c './Binary.py; read x' - sleep 0.1 screen -S "Script_AIL" -X screen -t "Decoder" bash -c './Decoder.py; read x' sleep 0.1 screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x' diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 4dc7edd4..06217452 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -121,14 +121,6 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags -[Base64] -subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags - -[Binary] -subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags - [Bitcoin] subscribe = Redis_Global publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags diff --git a/var/www/modules/base64Decoded/templates/base64_type.html b/var/www/modules/base64Decoded/templates/base64_type.html deleted file mode 100644 index 64c25b19..00000000 --- a/var/www/modules/base64Decoded/templates/base64_type.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - - - - Analysis Information Leak framework Dashboard - - - - - - - - - - - - - - - - - - - - {% include 'navbar.html' %} - -
-
-
-

Base64 Files

-
- - -
-
- -
- - - - - - - - - diff --git a/var/www/modules/base64Decoded/templates/base64_types.html b/var/www/modules/base64Decoded/templates/base64_types.html deleted file mode 100644 index addb0c37..00000000 --- a/var/www/modules/base64Decoded/templates/base64_types.html +++ /dev/null @@ -1,229 +0,0 @@ - - - - - - - - Analysis Information Leak framework Dashboard - - - - - - - - - - - - - - - - - - - -
-
-
-

Base64 Files

-
- - -
-
- -
- - - - - - - - - diff --git a/var/www/modules/base64Decoded/templates/header_base64Decoded.html b/var/www/modules/base64Decoded/templates/header_base64Decoded.html deleted file mode 100644 index aa76406c..00000000 --- a/var/www/modules/base64Decoded/templates/header_base64Decoded.html +++ /dev/null @@ -1 +0,0 @@ -
  • base64Decoded
  • diff --git a/var/www/modules/base64Decoded/Flask_base64Decoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py similarity index 86% rename from var/www/modules/base64Decoded/Flask_base64Decoded.py rename to var/www/modules/hashDecoded/Flask_hashDecoded.py index 0aef886e..16534e79 100644 --- a/var/www/modules/base64Decoded/Flask_base64Decoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -25,7 +25,7 @@ r_serv_metadata = Flask_config.r_serv_metadata vt_enabled = Flask_config.vt_enabled vt_auth = Flask_config.vt_auth -base64Decoded = Blueprint('base64Decoded', __name__, template_folder='templates') +hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates') # ============ FUNCTIONS ============ @@ -52,7 +52,7 @@ def substract_date(date_from, date_to): def list_sparkline_values(date_range_sparkline, hash): sparklines_value = [] for date_day in date_range_sparkline: - nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date_day, hash) + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date_day, hash) if nb_seen_this_day is None: nb_seen_this_day = 0 sparklines_value.append(int(nb_seen_this_day)) @@ -94,16 +94,16 @@ def one(): return 1 # ============= ROUTES ============== -@base64Decoded.route("/base64Decoded/all_base64_search", methods=['POST']) -def all_base64_search(): +@hashDecoded.route("/hashDecoded/all_hash_search", methods=['POST']) +def all_hash_search(): date_from = request.form.get('date_from') date_to = request.form.get('date_to') type = request.form.get('type') print(type) - return redirect(url_for('base64Decoded.base64Decoded_page', date_from=date_from, date_to=date_to, type=type)) + return redirect(url_for('hashDecoded.hashDecoded_page', date_from=date_from, date_to=date_to, type=type)) -@base64Decoded.route("/base64Decoded/", methods=['GET']) -def base64Decoded_page(): +@hashDecoded.route("/hashDecoded/", methods=['GET']) +def hashDecoded_page(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') type = request.args.get('type') @@ -150,7 +150,7 @@ def base64Decoded_page(): l_64 = set() for date in date_range: - l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) if l_hash: for hash in l_hash: l_64.add(hash) @@ -198,34 +198,34 @@ def base64Decoded_page(): l_type = r_serv_metadata.smembers('hash_all_type') - return render_template("base64Decoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date, + return render_template("hashDecoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date, date_from=date_from, date_to=date_to) -@base64Decoded.route('/base64Decoded/hash_by_type') +@hashDecoded.route('/hashDecoded/hash_by_type') def hash_by_type(): type = request.args.get('type') type = 'text/plain' - return render_template('base64_type.html',type = type) + return render_template('hash_type.html',type = type) -@base64Decoded.route('/base64Decoded/base64_hash') -def base64_hash(): +@hashDecoded.route('/hashDecoded/hash_hash') +def hash_hash(): hash = request.args.get('hash') - return render_template('base64_hash.html') + return render_template('hash_hash.html') -@base64Decoded.route('/base64Decoded/showHash') +@hashDecoded.route('/hashDecoded/showHash') def showHash(): hash = request.args.get('hash') #hash = 'e02055d3efaad5d656345f6a8b1b6be4fe8cb5ea' # TODO FIXME show error if hash is None: - return base64Decoded_page() + return hashDecoded_page() estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') # hash not found # TODO FIXME show error if estimated_type is None: - return base64Decoded_page() + return hashDecoded_page() else: file_icon = get_file_icon(estimated_type) @@ -256,7 +256,7 @@ def showHash(): first_seen=first_seen, last_seen=last_seen, nb_seen_in_all_pastes=nb_seen_in_all_pastes, sparkline_values=sparkline_values) -@app.route('/base64Decoded/downloadHash') +@app.route('/hashDecoded/downloadHash') def downloadHash(): hash = request.args.get('hash') # sanitize hash @@ -291,7 +291,7 @@ def downloadHash(): else: return 'hash: ' + hash + " don't exist" -@base64Decoded.route('/base64Decoded/hash_by_type_json') +@hashDecoded.route('/hashDecoded/hash_by_type_json') def hash_by_type_json(): type = request.args.get('type') @@ -305,7 +305,7 @@ def hash_by_type_json(): if type in r_serv_metadata.smembers('hash_all_type'): type_value = [] for date in date_range_sparkline: - num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+type, date) if num_day_type is None: num_day_type = 0 date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] @@ -315,12 +315,12 @@ def hash_by_type_json(): else: return jsonify() -@base64Decoded.route('/base64Decoded/daily_type_json') +@hashDecoded.route('/hashDecoded/daily_type_json') def daily_type_json(): date = request.args.get('date') daily_type = set() - l_b64 = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_b64 = r_serv_metadata.zrange('hash_date:' +date, 0, -1) for hash in l_b64: estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') if estimated_type is not None: @@ -328,12 +328,12 @@ def daily_type_json(): type_value = [] for day_type in daily_type: - num_day_type = r_serv_metadata.zscore('base64_type:'+day_type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+day_type, date) type_value.append({ 'date' : day_type, 'value' : int( num_day_type )}) return jsonify(type_value) -@base64Decoded.route('/base64Decoded/range_type_json') +@hashDecoded.route('/hashDecoded/range_type_json') def range_type_json(): date_from = request.args.get('date_from') date_to = request.args.get('date_to') @@ -351,7 +351,7 @@ def range_type_json(): all_type = set() for date in date_range: - l_hash = r_serv_metadata.zrange('base64_date:' +date, 0, -1) + l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1) if l_hash: for hash in l_hash: estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') @@ -362,7 +362,7 @@ def range_type_json(): day_type = {} day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8] for type in all_type: - num_day_type = r_serv_metadata.zscore('base64_type:'+type, date) + num_day_type = r_serv_metadata.zscore('hash_type:'+type, date) if num_day_type is None: num_day_type = 0 day_type[type]= num_day_type @@ -370,7 +370,7 @@ def range_type_json(): return jsonify(range_type) -@base64Decoded.route('/base64Decoded/hash_graph_line_json') +@hashDecoded.route('/hashDecoded/hash_graph_line_json') def hash_graph_line_json(): hash = request.args.get('hash') date_from = request.args.get('date_from') @@ -390,7 +390,7 @@ def hash_graph_line_json(): if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None: json_seen_in_paste = [] for date in date_range_seen_in_pastes: - nb_seen_this_day = r_serv_metadata.zscore('base64_date:'+date, hash) + nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date, hash) if nb_seen_this_day is None: nb_seen_this_day = 0 date = date[0:4] + '-' + date[4:6] + '-' + date[6:8] @@ -401,7 +401,7 @@ def hash_graph_line_json(): return jsonify() -@base64Decoded.route('/base64Decoded/hash_graph_node_json') +@hashDecoded.route('/hashDecoded/hash_graph_node_json') def hash_graph_node_json(): hash = request.args.get('hash') @@ -422,16 +422,16 @@ def hash_graph_node_json(): nodes_set_hash.add((hash, 1, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) #get related paste - l_pastes = r_serv_metadata.zrange('base64_hash:'+hash, 0, -1) + l_pastes = r_serv_metadata.zrange('nb_seen_hash:'+hash, 0, -1) for paste in l_pastes: url = paste - #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('base64_hash:'+hash, paste)) - nb_base64_in_paste = r_serv_metadata.scard('base64_paste:'+paste) + #nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, paste)) + nb_hash_in_paste = r_serv_metadata.scard('hash_paste:'+paste) - nodes_set_paste.add((paste, 2,nb_base64_in_paste,url)) + nodes_set_paste.add((paste, 2,nb_hash_in_paste,url)) links_set.add((hash, paste)) - l_hash = r_serv_metadata.smembers('base64_paste:'+paste) + l_hash = r_serv_metadata.smembers('hash_paste:'+paste) for child_hash in l_hash: if child_hash != hash: url = child_hash @@ -444,12 +444,12 @@ def hash_graph_node_json(): nodes_set_hash.add((child_hash, 3, first_seen, last_seen, estimated_type, nb_seen_in_paste, size, url)) links_set.add((child_hash, paste)) - #l_pastes_child = r_serv_metadata.zrange('base64_hash:'+child_hash, 0, -1) + #l_pastes_child = r_serv_metadata.zrange('nb_seen_hash:'+child_hash, 0, -1) #for child_paste in l_pastes_child: nodes = [] for node in nodes_set_hash: - nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('base64Decoded.showHash', hash=node[7]), 'hash': True}) + nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('hashDecoded.showHash', hash=node[7]), 'hash': True}) for node in nodes_set_paste: nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('showsavedpastes.showsavedpaste', paste=node[3]), 'hash': False}) links = [] @@ -461,13 +461,13 @@ def hash_graph_node_json(): else: return jsonify({}) -@base64Decoded.route('/base64Decoded/base64_types') -def base64_types(): +@hashDecoded.route('/hashDecoded/hash_types') +def hash_types(): date_from = 20180701 date_to = 20180706 - return render_template('base64_types.html', date_from=date_from, date_to=date_to) + return render_template('hash_types.html', date_from=date_from, date_to=date_to) -@base64Decoded.route('/base64Decoded/send_file_to_vt_js') +@hashDecoded.route('/hashDecoded/send_file_to_vt_js') def send_file_to_vt_js(): hash = request.args.get('hash') @@ -490,7 +490,7 @@ def send_file_to_vt_js(): return jsonify({'vt_link': vt_link, 'vt_report': vt_report}) -@base64Decoded.route('/base64Decoded/update_vt_result') +@hashDecoded.route('/hashDecoded/update_vt_result') def update_vt_result(): hash = request.args.get('hash') @@ -525,4 +525,4 @@ def update_vt_result(): return jsonify() # ========= REGISTRATION ========= -app.register_blueprint(base64Decoded) +app.register_blueprint(hashDecoded) diff --git a/var/www/modules/base64Decoded/templates/base64Decoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html similarity index 89% rename from var/www/modules/base64Decoded/templates/base64Decoded.html rename to var/www/modules/hashDecoded/templates/hashDecoded.html index bc71ed44..a82d90a5 100644 --- a/var/www/modules/base64Decoded/templates/base64Decoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -59,7 +59,7 @@
    -

    Base64 Files

    +

    Hash Files

    @@ -76,7 +76,7 @@
    Select a date range : -
    +
    @@ -110,9 +110,9 @@ {% if l_64|length != 0 %} {% if date_from|string == date_to|string %} -

    {{ date_from }} Base64 files:

    +

    {{ date_from }} Hash files:

    {% else %} -

    {{ date_from }} to {{ date_to }} Base64 files:

    +

    {{ date_from }} to {{ date_to }} Hash files:

    {% endif %} @@ -131,7 +131,7 @@ {% for b64 in l_64 %} - + @@ -163,9 +163,9 @@
      {{ b64[1] }}{{ b64[2] }}{{ b64[2] }} {{ b64[5] }} {{ b64[6] }} {{ b64[3] }}
    {% else %} {% if date_from|string == date_to|string %} -

    {{ date_from }}, No base64

    +

    {{ date_from }}, No Hash

    {% else %} -

    {{ date_from }} to {{ date_to }}, No base64

    +

    {{ date_from }} to {{ date_to }}, No Hash

    {% endif %} {% endif %}
    @@ -176,7 +176,7 @@