add python 2 to 3 bash + fix Duplicate + clean

This commit is contained in:
Terrtia 2018-05-03 16:21:33 +02:00
parent c52caebe7c
commit a900d5f08d
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
9 changed files with 9 additions and 69 deletions

View file

@ -3,7 +3,7 @@
"""
Base64 module
Dectect Base64, decode it and send to XXX for reprocess
Dectect Base64 and decode it
"""
import time
import os

View file

@ -77,7 +77,6 @@ if __name__ == "__main__":
tmp_dict[bname] = re.compile('|'.join(patterns), re.IGNORECASE)
prec_filename = None
print(tmp_dict)
while True:
filename = p.get_from_set()

View file

@ -54,7 +54,6 @@ if __name__ == "__main__":
dico_redis[str(year)+str(month).zfill(2)] = redis.StrictRedis(
host=p.config.get("Redis_Level_DB", "host"), port=year,
db=month)
#print("dup: "+str(year)+str(month).zfill(2)+"\n")
# FUNCTIONS #
publisher.info("Script duplicate started")
@ -142,11 +141,12 @@ if __name__ == "__main__":
paste_date = paste_date.decode('utf8')
paste_date = paste_date if paste_date != None else "No date available"
if paste_path != None:
hash_dico[dico_hash] = (hash_type, paste_path, percent, paste_date)
if paste_path != PST.p_path:
hash_dico[dico_hash] = (hash_type, paste_path, percent, paste_date)
print('['+hash_type+'] '+'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent))
print('['+hash_type+'] '+'comparing: ' + str(PST.p_path[44:]) + ' and ' + str(paste_path[44:]) + ' percentage: ' + str(percent))
except Exception:
#print(str(e))
print('hash not comparable, bad hash: '+dico_hash+' , current_hash: '+paste_hash)
# Add paste in DB after checking to prevent its analysis twice
@ -181,7 +181,6 @@ if __name__ == "__main__":
y = time.time()
publisher.debug('{}Processed in {} sec'.format(to_print, y-x))
#print '{}Processed in {} sec'.format(to_print, y-x)
except IOError:
to_print = 'Duplicate;{};{};{};'.format(

View file

@ -29,7 +29,6 @@ def get_date_range(num_day):
def compute_most_posted(server, message):
print(message)
module, num, keyword, paste_date = message.split(';')
redis_progression_name_set = 'top_'+ module +'_set_' + paste_date

View file

@ -112,9 +112,6 @@ class Paste(object):
paste = self.cache.get(self.p_path)
if paste is None:
try:
#print('----------------------------------------------------------------')
#print(self.p_name)
#print('----------------------------------------------------------------')
with gzip.open(self.p_path, 'rb') as f:
paste = f.read()
self.cache.set(self.p_path, paste)
@ -334,7 +331,6 @@ class Paste(object):
Save a new duplicate on others pastes
"""
for hash_type, path, percent, date in list_value:
print(hash_type, path, percent, date)
#get json
json_duplicate = self.store.hget(path, attr_name)
#json save on redis

View file

@ -54,6 +54,9 @@ criticalNumberToAlert=8
#Will be considered as false positive if less that X matches from the top password list
minTopPassList=5
[Curve]
max_execution_time = 90
[Base64]
path = Base64/
max_execution_time = 60

View file

@ -74,10 +74,6 @@ subscribe = Redis_Url
subscribe = Redis_Url
publish = Redis_alertHandler,Redis_Duplicate
[Dox]
subscribe = Redis_Dox
publish = Redis_Duplicate,Redis_alertHandler
[ModuleStats]
subscribe = Redis_ModuleStats

View file

@ -1,51 +0,0 @@
#Essential
redis
pyzmq
dnspython
logbook
pubsublogger
textblob
#Graph
numpy
matplotlib
networkx
terminaltables
colorama
asciimatics
#Tokeniser
nltk
# Hashlib
crcmod
mmh3
ssdeep
python-Levenshtein
#Others
python-magic
pybloomfiltermmap
psutil
phonenumbers
ipython
flask
texttable
#DomainClassifier
DomainClassifier
#Indexer requirements
whoosh
ipaddress
pycountry
# To fetch Onion urls
PySocks
#ASN lookup requirements
https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/adns-python/adns-python-1.2.1.tar.gz
https://github.com/trolldbois/python-cymru-services/archive/master.zip
https://github.com/saffsd/langid.py/archive/master.zip

View file

@ -48,6 +48,7 @@ def getPastebyType(server, module_name):
all_path = []
for path in server.smembers('WARNING_'+module_name):
all_path.append(path)
return all_path
@ -95,8 +96,6 @@ def importantPasteByModule():
for path in allPastes[0:10]:
path = path.decode('utf8')
all_path.append(path)
#print(path)
#print(type(path))
paste = Paste.Paste(path)
content = paste.get_p_content()
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1