fix: [Domain tag] avoid crawled tags

This commit is contained in:
Terrtia 2019-11-11 14:54:12 +01:00
parent c5510acb73
commit 35b63f7bf4
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
2 changed files with 3 additions and 2 deletions

View file

@ -87,7 +87,7 @@ if __name__ == '__main__':
server_metadata.sadd('tag:{}'.format(path), tag)
# Domain Object
if Item.is_crawled(path):
if Item.is_crawled(path) and tag!='infoleak:submission="crawler"':
domain = Item.get_item_domain(path)
server_metadata.sadd('tag:{}'.format(domain), tag)
server.sadd('domain:{}:{}'.format(tag, item_date), domain)

View file

@ -22,6 +22,7 @@ if __name__ == "__main__":
config_loader = ConfigLoader.ConfigLoader()
r_serv = config_loader.get_redis_conn("ARDB_DB")
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None
if r_serv.scard('ail:update_v1.5') != 5:
@ -64,7 +65,7 @@ if __name__ == "__main__":
process = subprocess.run(['python' ,update_file])
if int(r_serv.get('ail:current_background_script_stat')) != 100:
if int(r_serv_onion.scard('domain_update_v2.4')) != 0:
r_serv.set('ail:update_error', 'Update v2.4 Failed, please relaunch the bin/update-background.py script')
else:
r_serv.delete('ail:update_in_progress')