mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-23 06:37:15 +00:00
add python 2 to 3 bash + fix Duplicate + clean
This commit is contained in:
parent
c52caebe7c
commit
a900d5f08d
9 changed files with 9 additions and 69 deletions
|
@ -3,7 +3,7 @@
|
||||||
"""
|
"""
|
||||||
Base64 module
|
Base64 module
|
||||||
|
|
||||||
Dectect Base64, decode it and send to XXX for reprocess
|
Dectect Base64 and decode it
|
||||||
"""
|
"""
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -77,7 +77,6 @@ if __name__ == "__main__":
|
||||||
tmp_dict[bname] = re.compile('|'.join(patterns), re.IGNORECASE)
|
tmp_dict[bname] = re.compile('|'.join(patterns), re.IGNORECASE)
|
||||||
|
|
||||||
prec_filename = None
|
prec_filename = None
|
||||||
print(tmp_dict)
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
filename = p.get_from_set()
|
filename = p.get_from_set()
|
||||||
|
|
|
@ -54,7 +54,6 @@ if __name__ == "__main__":
|
||||||
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
|
||||||
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
host=p.config.get("Redis_Level_DB", "host"), port=year,
|
||||||
db=month)
|
db=month)
|
||||||
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
|
|
||||||
|
|
||||||
# FUNCTIONS #
|
# FUNCTIONS #
|
||||||
publisher.info("Script duplicate started")
|
publisher.info("Script duplicate started")
|
||||||
|
@ -142,11 +141,12 @@ if __name__ == "__main__":
|
||||||
paste_date = paste_date.decode('utf8')
|
paste_date = paste_date.decode('utf8')
|
||||||
paste_date = paste_date if paste_date != None else "No date available"
|
paste_date = paste_date if paste_date != None else "No date available"
|
||||||
if paste_path != None:
|
if paste_path != None:
|
||||||
hash_dico[dico_hash] = (hash_type, paste_path, percent, paste_date)
|
if paste_path != PST.p_path:
|
||||||
|
hash_dico[dico_hash] = (hash_type, paste_path, percent, paste_date)
|
||||||
|
|
||||||
|
print('['+hash_type+'] '+'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent))
|
||||||
|
|
||||||
print('['+hash_type+'] '+'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent))
|
|
||||||
except Exception:
|
except Exception:
|
||||||
#print(str(e))
|
|
||||||
print('hash not comparable, bad hash: '+dico_hash+' , current_hash: '+paste_hash)
|
print('hash not comparable, bad hash: '+dico_hash+' , current_hash: '+paste_hash)
|
||||||
|
|
||||||
# Add paste in DB after checking to prevent its analysis twice
|
# Add paste in DB after checking to prevent its analysis twice
|
||||||
|
@ -181,7 +181,6 @@ if __name__ == "__main__":
|
||||||
y = time.time()
|
y = time.time()
|
||||||
|
|
||||||
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
|
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
|
||||||
#print '{}Processed in {} sec'.format(to_print, y-x)
|
|
||||||
|
|
||||||
except IOError:
|
except IOError:
|
||||||
to_print = 'Duplicate;{};{};{};'.format(
|
to_print = 'Duplicate;{};{};{};'.format(
|
||||||
|
|
|
@ -29,7 +29,6 @@ def get_date_range(num_day):
|
||||||
|
|
||||||
|
|
||||||
def compute_most_posted(server, message):
|
def compute_most_posted(server, message):
|
||||||
print(message)
|
|
||||||
module, num, keyword, paste_date = message.split(';')
|
module, num, keyword, paste_date = message.split(';')
|
||||||
|
|
||||||
redis_progression_name_set = 'top_'+ module +'_set_' + paste_date
|
redis_progression_name_set = 'top_'+ module +'_set_' + paste_date
|
||||||
|
|
|
@ -112,9 +112,6 @@ class Paste(object):
|
||||||
paste = self.cache.get(self.p_path)
|
paste = self.cache.get(self.p_path)
|
||||||
if paste is None:
|
if paste is None:
|
||||||
try:
|
try:
|
||||||
#print('----------------------------------------------------------------')
|
|
||||||
#print(self.p_name)
|
|
||||||
#print('----------------------------------------------------------------')
|
|
||||||
with gzip.open(self.p_path, 'rb') as f:
|
with gzip.open(self.p_path, 'rb') as f:
|
||||||
paste = f.read()
|
paste = f.read()
|
||||||
self.cache.set(self.p_path, paste)
|
self.cache.set(self.p_path, paste)
|
||||||
|
@ -334,7 +331,6 @@ class Paste(object):
|
||||||
Save a new duplicate on others pastes
|
Save a new duplicate on others pastes
|
||||||
"""
|
"""
|
||||||
for hash_type, path, percent, date in list_value:
|
for hash_type, path, percent, date in list_value:
|
||||||
print(hash_type, path, percent, date)
|
|
||||||
#get json
|
#get json
|
||||||
json_duplicate = self.store.hget(path, attr_name)
|
json_duplicate = self.store.hget(path, attr_name)
|
||||||
#json save on redis
|
#json save on redis
|
||||||
|
|
|
@ -54,6 +54,9 @@ criticalNumberToAlert=8
|
||||||
#Will be considered as false positive if less that X matches from the top password list
|
#Will be considered as false positive if less that X matches from the top password list
|
||||||
minTopPassList=5
|
minTopPassList=5
|
||||||
|
|
||||||
|
[Curve]
|
||||||
|
max_execution_time = 90
|
||||||
|
|
||||||
[Base64]
|
[Base64]
|
||||||
path = Base64/
|
path = Base64/
|
||||||
max_execution_time = 60
|
max_execution_time = 60
|
||||||
|
|
|
@ -74,10 +74,6 @@ subscribe = Redis_Url
|
||||||
subscribe = Redis_Url
|
subscribe = Redis_Url
|
||||||
publish = Redis_alertHandler,Redis_Duplicate
|
publish = Redis_alertHandler,Redis_Duplicate
|
||||||
|
|
||||||
[Dox]
|
|
||||||
subscribe = Redis_Dox
|
|
||||||
publish = Redis_Duplicate,Redis_alertHandler
|
|
||||||
|
|
||||||
[ModuleStats]
|
[ModuleStats]
|
||||||
subscribe = Redis_ModuleStats
|
subscribe = Redis_ModuleStats
|
||||||
|
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
#Essential
|
|
||||||
redis
|
|
||||||
pyzmq
|
|
||||||
dnspython
|
|
||||||
logbook
|
|
||||||
pubsublogger
|
|
||||||
textblob
|
|
||||||
|
|
||||||
#Graph
|
|
||||||
numpy
|
|
||||||
matplotlib
|
|
||||||
networkx
|
|
||||||
terminaltables
|
|
||||||
colorama
|
|
||||||
asciimatics
|
|
||||||
|
|
||||||
#Tokeniser
|
|
||||||
nltk
|
|
||||||
|
|
||||||
# Hashlib
|
|
||||||
crcmod
|
|
||||||
mmh3
|
|
||||||
ssdeep
|
|
||||||
python-Levenshtein
|
|
||||||
|
|
||||||
#Others
|
|
||||||
python-magic
|
|
||||||
pybloomfiltermmap
|
|
||||||
psutil
|
|
||||||
phonenumbers
|
|
||||||
|
|
||||||
ipython
|
|
||||||
flask
|
|
||||||
texttable
|
|
||||||
|
|
||||||
#DomainClassifier
|
|
||||||
DomainClassifier
|
|
||||||
#Indexer requirements
|
|
||||||
whoosh
|
|
||||||
|
|
||||||
ipaddress
|
|
||||||
pycountry
|
|
||||||
|
|
||||||
# To fetch Onion urls
|
|
||||||
PySocks
|
|
||||||
|
|
||||||
#ASN lookup requirements
|
|
||||||
https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/adns-python/adns-python-1.2.1.tar.gz
|
|
||||||
https://github.com/trolldbois/python-cymru-services/archive/master.zip
|
|
||||||
|
|
||||||
https://github.com/saffsd/langid.py/archive/master.zip
|
|
|
@ -48,6 +48,7 @@ def getPastebyType(server, module_name):
|
||||||
all_path = []
|
all_path = []
|
||||||
for path in server.smembers('WARNING_'+module_name):
|
for path in server.smembers('WARNING_'+module_name):
|
||||||
all_path.append(path)
|
all_path.append(path)
|
||||||
|
|
||||||
return all_path
|
return all_path
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,8 +96,6 @@ def importantPasteByModule():
|
||||||
for path in allPastes[0:10]:
|
for path in allPastes[0:10]:
|
||||||
path = path.decode('utf8')
|
path = path.decode('utf8')
|
||||||
all_path.append(path)
|
all_path.append(path)
|
||||||
#print(path)
|
|
||||||
#print(type(path))
|
|
||||||
paste = Paste.Paste(path)
|
paste = Paste.Paste(path)
|
||||||
content = paste.get_p_content()
|
content = paste.get_p_content()
|
||||||
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
||||||
|
|
Loading…
Reference in a new issue