fix: [faup] fix new return types (bytes to str)

This commit is contained in:
Terrtia 2019-05-06 13:38:13 +02:00
parent a4c03b4ba4
commit 2606220c2b
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
6 changed files with 87 additions and 16 deletions

View file

@ -124,6 +124,11 @@ if __name__ == "__main__":
for url in sites:
faup.decode(url)
domain = faup.get()['domain']
## TODO: # FIXME: remove me
try:
domain = domain.decode()
except:
pass
if domain in creds_sites.keys():
creds_sites[domain] += 1
else:
@ -143,6 +148,11 @@ if __name__ == "__main__":
maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
faup.decode(maildomains)
tld = faup.get()['tld']
## TODO: # FIXME: remove me
try:
tld = tld.decode()
except:
pass
server_statistics.hincrby('credential_by_tld:'+date, tld, 1)
else:
publisher.info(to_print)

View file

@ -29,7 +29,16 @@ def analyse(url, path):
faup.decode(url)
url_parsed = faup.get()
pprint.pprint(url_parsed)
## TODO: # FIXME: remove me
try:
resource_path = url_parsed['resource_path'].encode()
except:
resource_path = url_parsed['resource_path']
## TODO: # FIXME: remove me
try:
query_string = url_parsed['query_string'].encode()
except:
query_string = url_parsed['query_string']
result_path = {'sqli' : False}
@ -56,6 +65,10 @@ def analyse(url, path):
p.populate_set_out(msg, 'Tags')
#statistics
## TODO: # FIXME: remove me
try:
tld = url_parsed['tld'].decode()
except:
tld = url_parsed['tld']
if tld is not None:
date = datetime.datetime.now().strftime("%Y%m")

View file

@ -95,6 +95,10 @@ if __name__ == "__main__":
faup.decode(mail)
tld = faup.get()['tld']
try:
tld = tld.decode()
except:
pass
server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail])
else:

View file

@ -68,10 +68,20 @@ def analyse(url, path):
result_query = 0
if resource_path is not None:
result_path = is_sql_injection(resource_path.decode('utf8'))
## TODO: # FIXME: remove me
try:
resource_path = resource_path.decode()
except:
pass
result_path = is_sql_injection(resource_path)
if query_string is not None:
result_query = is_sql_injection(query_string.decode('utf8'))
## TODO: # FIXME: remove me
try:
query_string = query_string.decode()
except:
pass
result_query = is_sql_injection(query_string)
if (result_path > 0) or (result_query > 0):
paste = Paste.Paste(path)
@ -89,6 +99,11 @@ def analyse(url, path):
#statistics
tld = url_parsed['tld']
if tld is not None:
## TODO: # FIXME: remove me
try:
tld = tld.decode()
except:
pass
date = datetime.datetime.now().strftime("%Y%m")
server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1)

View file

@ -94,18 +94,22 @@ if __name__ == "__main__":
faup.decode(url)
domain = faup.get_domain()
subdomain = faup.get_subdomain()
f1 = None
publisher.debug('{} Published'.format(url))
if f1 == "onion":
print(domain)
if subdomain is not None:
subdomain = subdomain.decode('utf8')
## TODO: # FIXME: remove me
try:
subdomain = subdomain.decode()
except:
pass
if domain is not None:
domain = domain.decode('utf8')
## TODO: # FIXME: remove me
try:
domain = domain.decode()
except:
pass
domains_list.append(domain)
hostl = avoidNone(subdomain) + avoidNone(domain)

View file

@ -115,7 +115,12 @@ def get_type_domain(domain):
def get_domain_from_url(url):
faup.decode(url)
unpack_url = faup.get()
domain = unpack_url['domain'].decode()
domain = unpack_url['domain']
## TODO: FIXME remove me
try:
domain = domain.decode()
except:
pass
return domain
def get_last_domains_crawled(type):
@ -418,8 +423,19 @@ def create_spider_splash():
# get service_type
faup.decode(url)
unpack_url = faup.get()
## TODO: # FIXME: remove me
try:
domain = unpack_url['domain'].decode()
if unpack_url['tld'] == b'onion':
except:
domain = unpack_url['domain']
## TODO: # FIXME: remove me
try:
tld = unpack_url['tld'].decode()
except:
tld = unpack_url['tld']
if tld == 'onion':
service_type = 'onion'
else:
service_type = 'regular'
@ -694,10 +710,19 @@ def show_domain():
port = request.args.get('port')
faup.decode(domain)
unpack_url = faup.get()
## TODO: # FIXME: remove me
try:
domain = unpack_url['domain'].decode()
except:
domain = unpack_url['domain']
if not port:
if unpack_url['port']:
try:
port = unpack_url['port'].decode()
except:
port = unpack_url['port']
else:
port = 80
try: