mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-22 22:27:17 +00:00
chg: [DB Migration] add hash dynamic update
This commit is contained in:
parent
0519b4a437
commit
6165987ec3
5 changed files with 37 additions and 6 deletions
|
@ -10,8 +10,7 @@ import configparser
|
||||||
|
|
||||||
def update_hash_item(has_type):
|
def update_hash_item(has_type):
|
||||||
#get all hash items:
|
#get all hash items:
|
||||||
#all_base64 = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\"'.format(has_type))
|
all_base64 = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\"'.format(has_type))
|
||||||
all_hash_items = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\":20190307'.format(has_type))
|
|
||||||
for item_path in all_hash_items:
|
for item_path in all_hash_items:
|
||||||
if PASTES_FOLDER in item_path:
|
if PASTES_FOLDER in item_path:
|
||||||
base64_key = '{}_paste:{}'.format(has_type, item_path)
|
base64_key = '{}_paste:{}'.format(has_type, item_path)
|
||||||
|
@ -76,13 +75,30 @@ if __name__ == '__main__':
|
||||||
update_hash_item('hexadecimal')
|
update_hash_item('hexadecimal')
|
||||||
|
|
||||||
# Update onion metadata
|
# Update onion metadata
|
||||||
#all_crawled_items = r_serv_tag.smembers('infoleak:submission=\"crawler\"')
|
all_crawled_items = r_serv_tag.smembers('infoleak:submission=\"crawler\"')
|
||||||
all_crawled_items = r_serv_tag.smembers('infoleak:submission=\"crawler\":20190227')
|
|
||||||
for item_path in all_crawled_items:
|
for item_path in all_crawled_items:
|
||||||
|
domain = None
|
||||||
if PASTES_FOLDER in item_path:
|
if PASTES_FOLDER in item_path:
|
||||||
item_metadata = 'paste_metadata:{}'.format(item_path)
|
old_item_metadata = 'paste_metadata:{}'.format(item_path)
|
||||||
|
item_path = item_path.replace(PASTES_FOLDER, '', 1)
|
||||||
|
new_item_metadata = 'paste_metadata:{}'.format(item_path)
|
||||||
## TODO: catch error
|
## TODO: catch error
|
||||||
r_serv_metadata.rename(item_metadata, item_metadata.replace(PASTES_FOLDER, '', 1))
|
r_serv_metadata.rename(old_item_metadata, new_item_metadata)
|
||||||
|
# update domain port
|
||||||
|
domain = r_serv_metadata.hget('paste_metadata:{}'.format(item_path), 'domain')
|
||||||
|
if domain:
|
||||||
|
r_serv_metadata.hset('paste_metadata:{}'.format(item_path), 'domain', '{}:80'.format(domain))
|
||||||
|
super_father = r_serv_metadata.hget('paste_metadata:{}'.format(item_path), 'super_father')
|
||||||
|
if super_father:
|
||||||
|
if PASTES_FOLDER in super_father:
|
||||||
|
r_serv_metadata.hset('paste_metadata:{}'.format(item_path), 'super_father', super_father.replace(PASTES_FOLDER, '', 1))
|
||||||
|
father = r_serv_metadata.hget('paste_metadata:{}'.format(item_path), 'father')
|
||||||
|
if father:
|
||||||
|
if PASTES_FOLDER in father:
|
||||||
|
r_serv_metadata.hset('paste_metadata:{}'.format(item_path), 'father', father.replace(PASTES_FOLDER, '', 1))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################################################################
|
######################################################################################################################
|
||||||
######################################################################################################################
|
######################################################################################################################
|
||||||
|
|
|
@ -121,11 +121,17 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# update crawler queue
|
||||||
for elem in r_serv_onion.smembers('onion_crawler_queue'):
|
for elem in r_serv_onion.smembers('onion_crawler_queue'):
|
||||||
if PASTES_FOLDER in elem:
|
if PASTES_FOLDER in elem:
|
||||||
r_serv_onion.srem('onion_crawler_queue', elem)
|
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||||
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||||
index = index +1
|
index = index +1
|
||||||
|
for elem in r_serv_onion.smembers('onion_crawler_priority_queue'):
|
||||||
|
if PASTES_FOLDER in elem:
|
||||||
|
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||||
|
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||||
|
index = index +1
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
|
@ -163,6 +163,7 @@ bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info']
|
||||||
UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted')
|
UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted')
|
||||||
|
|
||||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes"))
|
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes"))
|
||||||
|
PASTES_FOLDERS = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||||
SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "crawled_screenshot"))
|
SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "crawled_screenshot"))
|
||||||
|
|
||||||
max_dashboard_logs = int(cfg.get("Flask", "max_dashboard_logs"))
|
max_dashboard_logs = int(cfg.get("Flask", "max_dashboard_logs"))
|
||||||
|
|
|
@ -25,6 +25,7 @@ baseUrl = Flask_config.baseUrl
|
||||||
r_serv_metadata = Flask_config.r_serv_metadata
|
r_serv_metadata = Flask_config.r_serv_metadata
|
||||||
vt_enabled = Flask_config.vt_enabled
|
vt_enabled = Flask_config.vt_enabled
|
||||||
vt_auth = Flask_config.vt_auth
|
vt_auth = Flask_config.vt_auth
|
||||||
|
PASTES_FOLDER = Flask_config.PASTES_FOLDERS
|
||||||
|
|
||||||
hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates')
|
hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates')
|
||||||
|
|
||||||
|
@ -589,6 +590,12 @@ def hash_graph_node_json():
|
||||||
#get related paste
|
#get related paste
|
||||||
l_pastes = r_serv_metadata.zrange('nb_seen_hash:'+hash, 0, -1)
|
l_pastes = r_serv_metadata.zrange('nb_seen_hash:'+hash, 0, -1)
|
||||||
for paste in l_pastes:
|
for paste in l_pastes:
|
||||||
|
# dynamic update
|
||||||
|
if PASTES_FOLDER in paste:
|
||||||
|
score = r_serv_metadata.zscore('nb_seen_hash:{}'.format(hash), paste)
|
||||||
|
r_serv_metadata.zrem('nb_seen_hash:{}'.format(hash), paste)
|
||||||
|
paste = paste.replace(PASTES_FOLDER, '', 1)
|
||||||
|
r_serv_metadata.zadd('nb_seen_hash:{}'.format(hash), score, paste)
|
||||||
url = paste
|
url = paste
|
||||||
#nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, paste))
|
#nb_seen_in_this_paste = nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, paste))
|
||||||
nb_hash_in_paste = r_serv_metadata.scard('hash_paste:'+paste)
|
nb_hash_in_paste = r_serv_metadata.scard('hash_paste:'+paste)
|
||||||
|
|
|
@ -154,6 +154,7 @@ def showpaste(content_range, requested_path):
|
||||||
if r_serv_metadata.scard('hash_paste:'+requested_path) > 0:
|
if r_serv_metadata.scard('hash_paste:'+requested_path) > 0:
|
||||||
set_b64 = r_serv_metadata.smembers('hash_paste:'+requested_path)
|
set_b64 = r_serv_metadata.smembers('hash_paste:'+requested_path)
|
||||||
for hash in set_b64:
|
for hash in set_b64:
|
||||||
|
print(requested_path)
|
||||||
nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, requested_path))
|
nb_in_file = int(r_serv_metadata.zscore('nb_seen_hash:'+hash, requested_path))
|
||||||
estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type')
|
estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type')
|
||||||
file_type = estimated_type.split('/')[0]
|
file_type = estimated_type.split('/')[0]
|
||||||
|
|
Loading…
Reference in a new issue