mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
fix Onion module
This commit is contained in:
parent
f93fe9aeb2
commit
4e55f6ee90
3 changed files with 15 additions and 10 deletions
|
@ -25,7 +25,7 @@ import time
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
|
||||||
|
|
||||||
# thank http://rosettacode.org/wiki/Bitcoin/address_validation#Python for this 2 functions
|
#### thank http://rosettacode.org/wiki/Bitcoin/address_validation#Python for this 2 functions
|
||||||
|
|
||||||
def decode_base58(bc, length):
|
def decode_base58(bc, length):
|
||||||
n = 0
|
n = 0
|
||||||
|
@ -38,7 +38,7 @@ def check_bc(bc):
|
||||||
return bcbytes[-4:] == sha256(sha256(bcbytes[:-4]).digest()).digest()[:4]
|
return bcbytes[-4:] == sha256(sha256(bcbytes[:-4]).digest()).digest()[:4]
|
||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
########################################################3
|
########################################################
|
||||||
|
|
||||||
def search_key(content, message):
|
def search_key(content, message):
|
||||||
bitcoin_address = re.findall(regex_bitcoin_public_address, content)
|
bitcoin_address = re.findall(regex_bitcoin_public_address, content)
|
||||||
|
|
|
@ -142,7 +142,7 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Mail" bash -c './Mail.py; read x'
|
screen -S "Script_AIL" -X screen -t "Mail" bash -c './Mail.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
#screen -S "Script_AIL" -X screen -t "Dox" bash -c './Dox.py; read x'
|
screen -S "Script_AIL" -X screen -t "Dox" bash -c './Dox.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Web" bash -c './Web.py; read x'
|
screen -S "Script_AIL" -X screen -t "Web" bash -c './Web.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
@ -162,6 +162,8 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x'
|
screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x'
|
||||||
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x'
|
screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Release" bash -c './Release.py; read x'
|
screen -S "Script_AIL" -X screen -t "Release" bash -c './Release.py; read x'
|
||||||
|
|
15
bin/Onion.py
15
bin/Onion.py
|
@ -37,12 +37,12 @@ from Helper import Process
|
||||||
def fetch(p, r_cache, urls, domains, path):
|
def fetch(p, r_cache, urls, domains, path):
|
||||||
failed = []
|
failed = []
|
||||||
downloaded = []
|
downloaded = []
|
||||||
print(len(urls), 'Urls to fetch.')
|
print('{} Urls to fetch'.format(len(urls)))
|
||||||
for url, domain in zip(urls, domains):
|
for url, domain in zip(urls, domains):
|
||||||
if r_cache.exists(url) or url in failed:
|
if r_cache.exists(url) or url in failed:
|
||||||
continue
|
continue
|
||||||
to_fetch = base64.standard_b64encode(url.encode('utf8'))
|
to_fetch = base64.standard_b64encode(url.encode('utf8'))
|
||||||
print(to_fetch)
|
print('fetching url: {}'.format(to_fetch))
|
||||||
process = subprocess.Popen(["python", './tor_fetcher.py', to_fetch],
|
process = subprocess.Popen(["python", './tor_fetcher.py', to_fetch],
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
while process.poll() is None:
|
while process.poll() is None:
|
||||||
|
@ -52,9 +52,10 @@ def fetch(p, r_cache, urls, domains, path):
|
||||||
r_cache.setbit(url, 0, 1)
|
r_cache.setbit(url, 0, 1)
|
||||||
r_cache.expire(url, 360000)
|
r_cache.expire(url, 360000)
|
||||||
downloaded.append(url)
|
downloaded.append(url)
|
||||||
tempfile = process.stdout.read().strip()
|
print('downloaded : {}'.format(downloaded))
|
||||||
|
'''tempfile = process.stdout.read().strip()
|
||||||
tempfile = tempfile.decode('utf8')
|
tempfile = tempfile.decode('utf8')
|
||||||
with open(tempfile, 'r') as f:
|
#with open(tempfile, 'r') as f:
|
||||||
filename = path + domain + '.gz'
|
filename = path + domain + '.gz'
|
||||||
fetched = f.read()
|
fetched = f.read()
|
||||||
content = base64.standard_b64decode(fetched)
|
content = base64.standard_b64decode(fetched)
|
||||||
|
@ -68,9 +69,9 @@ def fetch(p, r_cache, urls, domains, path):
|
||||||
ff.write(content)
|
ff.write(content)
|
||||||
p.populate_set_out(save_path, 'Global')
|
p.populate_set_out(save_path, 'Global')
|
||||||
p.populate_set_out(url, 'ValidOnion')
|
p.populate_set_out(url, 'ValidOnion')
|
||||||
p.populate_set_out(fetched, 'FetchedOnion')
|
p.populate_set_out(fetched, 'FetchedOnion')'''
|
||||||
yield url
|
yield url
|
||||||
os.unlink(tempfile)
|
#os.unlink(tempfile)
|
||||||
else:
|
else:
|
||||||
r_cache.setbit(url, 0, 0)
|
r_cache.setbit(url, 0, 0)
|
||||||
r_cache.expire(url, 3600)
|
r_cache.expire(url, 3600)
|
||||||
|
@ -133,6 +134,8 @@ if __name__ == "__main__":
|
||||||
PST.save_attribute_redis(channel, domains_list)
|
PST.save_attribute_redis(channel, domains_list)
|
||||||
to_print = 'Onion;{};{};{};'.format(PST.p_source, PST.p_date,
|
to_print = 'Onion;{};{};{};'.format(PST.p_source, PST.p_date,
|
||||||
PST.p_name)
|
PST.p_name)
|
||||||
|
|
||||||
|
print(len(domains_list))
|
||||||
if len(domains_list) > 0:
|
if len(domains_list) > 0:
|
||||||
|
|
||||||
publisher.warning('{}Detected {} .onion(s);{}'.format(
|
publisher.warning('{}Detected {} .onion(s);{}'.format(
|
||||||
|
|
Loading…
Reference in a new issue