From 2606220c2bbc74a6c351c8713f84d6d4a111b02e Mon Sep 17 00:00:00 2001 From: Terrtia Date: Mon, 6 May 2019 13:38:13 +0200 Subject: [PATCH] fix: [faup] fix new return types (bytes to str) --- bin/Credential.py | 10 ++++++ bin/LibInjection.py | 19 ++++++++-- bin/Mail.py | 4 +++ bin/SQLInjectionDetection.py | 19 ++++++++-- bin/Web.py | 16 +++++---- .../hiddenServices/Flask_hiddenServices.py | 35 ++++++++++++++++--- 6 files changed, 87 insertions(+), 16 deletions(-) diff --git a/bin/Credential.py b/bin/Credential.py index 8da84883..2eeb3d55 100755 --- a/bin/Credential.py +++ b/bin/Credential.py @@ -124,6 +124,11 @@ if __name__ == "__main__": for url in sites: faup.decode(url) domain = faup.get()['domain'] + ## TODO: # FIXME: remove me + try: + domain = domain.decode() + except: + pass if domain in creds_sites.keys(): creds_sites[domain] += 1 else: @@ -143,6 +148,11 @@ if __name__ == "__main__": maildomains = re.findall("@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0] faup.decode(maildomains) tld = faup.get()['tld'] + ## TODO: # FIXME: remove me + try: + tld = tld.decode() + except: + pass server_statistics.hincrby('credential_by_tld:'+date, tld, 1) else: publisher.info(to_print) diff --git a/bin/LibInjection.py b/bin/LibInjection.py index 4b851f21..65789e62 100755 --- a/bin/LibInjection.py +++ b/bin/LibInjection.py @@ -29,8 +29,17 @@ def analyse(url, path): faup.decode(url) url_parsed = faup.get() pprint.pprint(url_parsed) - resource_path = url_parsed['resource_path'] - query_string = url_parsed['query_string'] + ## TODO: # FIXME: remove me + try: + resource_path = url_parsed['resource_path'].encode() + except: + resource_path = url_parsed['resource_path'] + + ## TODO: # FIXME: remove me + try: + query_string = url_parsed['query_string'].encode() + except: + query_string = url_parsed['query_string'] result_path = {'sqli' : False} result_query = {'sqli' : False} @@ -56,7 +65,11 @@ def analyse(url, path): p.populate_set_out(msg, 'Tags') #statistics - tld = url_parsed['tld'] + ## TODO: # FIXME: remove me + try: + tld = url_parsed['tld'].decode() + except: + tld = url_parsed['tld'] if tld is not None: date = datetime.datetime.now().strftime("%Y%m") server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1) diff --git a/bin/Mail.py b/bin/Mail.py index b8311d3c..b90ee4ab 100755 --- a/bin/Mail.py +++ b/bin/Mail.py @@ -95,6 +95,10 @@ if __name__ == "__main__": faup.decode(mail) tld = faup.get()['tld'] + try: + tld = tld.decode() + except: + pass server_statistics.hincrby('mail_by_tld:'+date, tld, MX_values[1][mail]) else: diff --git a/bin/SQLInjectionDetection.py b/bin/SQLInjectionDetection.py index a5595dc7..ffb21003 100755 --- a/bin/SQLInjectionDetection.py +++ b/bin/SQLInjectionDetection.py @@ -68,10 +68,20 @@ def analyse(url, path): result_query = 0 if resource_path is not None: - result_path = is_sql_injection(resource_path.decode('utf8')) + ## TODO: # FIXME: remove me + try: + resource_path = resource_path.decode() + except: + pass + result_path = is_sql_injection(resource_path) if query_string is not None: - result_query = is_sql_injection(query_string.decode('utf8')) + ## TODO: # FIXME: remove me + try: + query_string = query_string.decode() + except: + pass + result_query = is_sql_injection(query_string) if (result_path > 0) or (result_query > 0): paste = Paste.Paste(path) @@ -89,6 +99,11 @@ def analyse(url, path): #statistics tld = url_parsed['tld'] if tld is not None: + ## TODO: # FIXME: remove me + try: + tld = tld.decode() + except: + pass date = datetime.datetime.now().strftime("%Y%m") server_statistics.hincrby('SQLInjection_by_tld:'+date, tld, 1) diff --git a/bin/Web.py b/bin/Web.py index 7cc96822..ca4366e8 100755 --- a/bin/Web.py +++ b/bin/Web.py @@ -94,18 +94,22 @@ if __name__ == "__main__": faup.decode(url) domain = faup.get_domain() subdomain = faup.get_subdomain() - f1 = None publisher.debug('{} Published'.format(url)) - if f1 == "onion": - print(domain) - if subdomain is not None: - subdomain = subdomain.decode('utf8') + ## TODO: # FIXME: remove me + try: + subdomain = subdomain.decode() + except: + pass if domain is not None: - domain = domain.decode('utf8') + ## TODO: # FIXME: remove me + try: + domain = domain.decode() + except: + pass domains_list.append(domain) hostl = avoidNone(subdomain) + avoidNone(domain) diff --git a/var/www/modules/hiddenServices/Flask_hiddenServices.py b/var/www/modules/hiddenServices/Flask_hiddenServices.py index 2d45b9c6..0ee44974 100644 --- a/var/www/modules/hiddenServices/Flask_hiddenServices.py +++ b/var/www/modules/hiddenServices/Flask_hiddenServices.py @@ -115,7 +115,12 @@ def get_type_domain(domain): def get_domain_from_url(url): faup.decode(url) unpack_url = faup.get() - domain = unpack_url['domain'].decode() + domain = unpack_url['domain'] + ## TODO: FIXME remove me + try: + domain = domain.decode() + except: + pass return domain def get_last_domains_crawled(type): @@ -418,8 +423,19 @@ def create_spider_splash(): # get service_type faup.decode(url) unpack_url = faup.get() - domain = unpack_url['domain'].decode() - if unpack_url['tld'] == b'onion': + ## TODO: # FIXME: remove me + try: + domain = unpack_url['domain'].decode() + except: + domain = unpack_url['domain'] + + ## TODO: # FIXME: remove me + try: + tld = unpack_url['tld'].decode() + except: + tld = unpack_url['tld'] + + if tld == 'onion': service_type = 'onion' else: service_type = 'regular' @@ -694,10 +710,19 @@ def show_domain(): port = request.args.get('port') faup.decode(domain) unpack_url = faup.get() - domain = unpack_url['domain'].decode() + + ## TODO: # FIXME: remove me + try: + domain = unpack_url['domain'].decode() + except: + domain = unpack_url['domain'] + if not port: if unpack_url['port']: - port = unpack_url['port'].decode() + try: + port = unpack_url['port'].decode() + except: + port = unpack_url['port'] else: port = 80 try: