From 03a37af5cfe8e34c5cf43acc0288f08ed0cdf352 Mon Sep 17 00:00:00 2001 From: Terrtia Date: Wed, 25 Jul 2018 10:47:36 +0200 Subject: [PATCH] chg: [Hash] add reprocess regex order --- bin/Decoder.py | 46 +++++++++++-------- .../modules/hashDecoded/Flask_hashDecoded.py | 4 +- .../hashDecoded/templates/hashDecoded.html | 11 +++-- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/bin/Decoder.py b/bin/Decoder.py index 7c9abb78..399cd4a8 100755 --- a/bin/Decoder.py +++ b/bin/Decoder.py @@ -85,29 +85,33 @@ def save_hash(decoder_name, message, date, decoded): else: serv_metadata.hset('metadata_hash:'+hash, 'last_seen', date_paste) - # first time we see this file encoding on this paste - if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: - print('first '+decoder_name) + # first time we see this hash (all encoding) on this paste + if serv_metadata.zscore('nb_seen_hash:'+hash, message) is None: serv_metadata.hincrby('metadata_hash:'+hash, 'nb_seen_in_all_pastes', 1) - serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) - serv_metadata.sadd('hash_paste:'+message, hash) # paste - hash map - serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map - serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map - serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1)# hash - paste map - # create hash metadata serv_metadata.hset('metadata_hash:'+hash, 'estimated_type', type) serv_metadata.sadd('hash_all_type', type) + + # first time we see this hash encoding on this paste + if serv_metadata.zscore(decoder_name+'_hash:'+hash, message) is None: + print('first '+decoder_name) + + serv_metadata.sadd(decoder_name+'_paste:'+message, hash) # paste - hash map + + # create hash metadata serv_metadata.sadd('hash_'+ decoder_name +'_all_type', type) - serv_metadata.zincrby('hash_type:'+type, date_key, 1) - serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) save_hash_on_disk(decoded, type, hash, json_data) print('found {} '.format(type)) - # duplicate - else: - serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste + + serv_metadata.hincrby('metadata_hash:'+hash, decoder_name+'_decoder', 1) + + serv_metadata.zincrby('hash_type:'+type, date_key, 1) + serv_metadata.zincrby(decoder_name+'_type:'+type, date_key, 1) + + serv_metadata.zincrby('nb_seen_hash:'+hash, message, 1)# hash - paste map + serv_metadata.zincrby(decoder_name+'_hash:'+hash, message, 1) # number of b64 on this paste def save_hash_on_disk(decode, type, hash, json_data): @@ -182,12 +186,14 @@ if __name__ == '__main__': binary_max_execution_time = p.config.getint("Binary", "max_execution_time") base64_max_execution_time = p.config.getint("Base64", "max_execution_time") - # list all decoder yith regex, the order is use to search content by order - all_decoder = [ {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time}, - {'name': 'hexadecimal', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time}, - {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time}] + # list all decoder yith regex, + decoder_binary = {'name': 'binary', 'regex': regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time} + decoder_hexadecimal = {'name': 'hexadecimal', 'regex': regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time} + decoder_base64 = {'name': 'base64', 'regex': regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time} - for decoder in all_decoder: + decoder_order = [ decoder_base64, decoder_binary, decoder_hexadecimal, decoder_base64] + + for decoder in decoder_order: serv_metadata.sadd('all_decoder', decoder['name']) # Endless loop getting messages from the input queue @@ -207,7 +213,7 @@ if __name__ == '__main__': content = paste.get_p_content() date = str(paste._get_p_date()) - for decoder in all_decoder: # add threshold and size limit + for decoder in decoder_order: # add threshold and size limit # max execution time on regex signal.alarm(decoder['max_execution_time']) diff --git a/var/www/modules/hashDecoded/Flask_hashDecoded.py b/var/www/modules/hashDecoded/Flask_hashDecoded.py index 29f15275..174b5e10 100644 --- a/var/www/modules/hashDecoded/Flask_hashDecoded.py +++ b/var/www/modules/hashDecoded/Flask_hashDecoded.py @@ -364,9 +364,7 @@ def decoder_type_json(): all_decoder = r_serv_metadata.smembers('all_decoder') # sort DESC decoder for color - all_decoder = sorted(all_decoder, reverse=True) - print(all_decoder) - print(type(all_decoder)) + all_decoder = sorted(all_decoder) date_range = [] if date_from is not None and date_to is not None: diff --git a/var/www/modules/hashDecoded/templates/hashDecoded.html b/var/www/modules/hashDecoded/templates/hashDecoded.html index 6cffa9cc..84927981 100644 --- a/var/www/modules/hashDecoded/templates/hashDecoded.html +++ b/var/www/modules/hashDecoded/templates/hashDecoded.html @@ -47,6 +47,9 @@ .bar_stack:hover{ cursor: pointer; } + .pie_path:hover{ + cursor: pointer; + } .svgText { pointer-events: none; } @@ -318,7 +321,7 @@ function sparklines(id, points) {