mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-29 17:27:17 +00:00
Merge branch 'master' into i2p
This commit is contained in:
commit
51a48a4c1a
38 changed files with 746 additions and 137 deletions
|
@ -19,15 +19,6 @@ if [ -e "${DIR}/AILENV/bin/python" ]; then
|
||||||
ENV_PY="${DIR}/AILENV/bin/python"
|
ENV_PY="${DIR}/AILENV/bin/python"
|
||||||
export AIL_VENV=${AIL_HOME}/AILENV/
|
export AIL_VENV=${AIL_HOME}/AILENV/
|
||||||
. ./AILENV/bin/activate
|
. ./AILENV/bin/activate
|
||||||
elif [ ! -z "$TRAVIS" ]; then
|
|
||||||
echo "Travis detected"
|
|
||||||
ENV_PY="~/virtualenv/python3.6/bin/python"
|
|
||||||
export AIL_VENV="~/virtualenv/python3.6/"
|
|
||||||
|
|
||||||
export AIL_BIN=${AIL_HOME}/bin/
|
|
||||||
export AIL_FLASK=${AIL_HOME}/var/www/
|
|
||||||
export AIL_REDIS=${AIL_HOME}/redis/src/
|
|
||||||
export AIL_ARDB=${AIL_HOME}/ardb/src/
|
|
||||||
else
|
else
|
||||||
echo "Please make sure you have a AIL-framework environment, au revoir"
|
echo "Please make sure you have a AIL-framework environment, au revoir"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -209,8 +200,6 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Decoder" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Decoder.py; read x"
|
screen -S "Script_AIL" -X screen -t "Decoder" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Decoder.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x"
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script_AIL" -X screen -t "Keys" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Keys.py; read x"
|
screen -S "Script_AIL" -X screen -t "Keys" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Keys.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x"
|
screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x"
|
||||||
|
@ -220,17 +209,25 @@ function launching_scripts {
|
||||||
screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x"
|
screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
|
||||||
|
screen -S "Script_AIL" -X screen -t "Hosts" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Hosts.py; read x"
|
||||||
|
sleep 0.1
|
||||||
|
screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x"
|
||||||
|
sleep 0.1
|
||||||
|
|
||||||
screen -S "Script_AIL" -X screen -t "Urls" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Urls.py; read x"
|
screen -S "Script_AIL" -X screen -t "Urls" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Urls.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./SQLInjectionDetection.py; read x"
|
screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./SQLInjectionDetection.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./LibInjection.py; read x"
|
screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./LibInjection.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
screen -S "Script_AIL" -X screen -t "Zerobins" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Zerobins.py; read x"
|
||||||
|
sleep 0.1
|
||||||
|
|
||||||
##################################
|
##################################
|
||||||
# TRACKERS MODULES #
|
# TRACKERS MODULES #
|
||||||
##################################
|
##################################
|
||||||
|
screen -S "Script_AIL" -X screen -t "Tracker_Typo_Squatting" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Typo_Squatting.py; read x"
|
||||||
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x"
|
||||||
|
@ -439,12 +436,12 @@ function launch_feeder {
|
||||||
}
|
}
|
||||||
|
|
||||||
function killscript {
|
function killscript {
|
||||||
if [[ $islogged || $isqueued || $is_ail_core || $isscripted || $isflasked || $isfeeded || $iscrawler ]]; then
|
if [[ $islogged || $isqueued || $is_ail_core || $isscripted || $isflasked || $isfeeded || $iscrawler || $is_ail_2_ail ]]; then
|
||||||
echo -e $GREEN"Killing Script"$DEFAULT
|
echo -e $GREEN"Killing Script"$DEFAULT
|
||||||
kill $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler
|
kill $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler $is_ail_2_ail
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
echo -e $ROSE`screen -ls`$DEFAULT
|
echo -e $ROSE`screen -ls`$DEFAULT
|
||||||
echo -e $GREEN"\t* $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler killed."$DEFAULT
|
echo -e $GREEN"\t* $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler $is_ail_2_ail killed."$DEFAULT
|
||||||
else
|
else
|
||||||
echo -e $RED"\t* No script to kill"$DEFAULT
|
echo -e $RED"\t* No script to kill"$DEFAULT
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import cld3
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from packages import Item
|
from packages import Item
|
||||||
|
|
|
@ -79,7 +79,7 @@ class Sync_importer(AbstractModule):
|
||||||
b64_gzip_content = ail_stream['payload']['raw']
|
b64_gzip_content = ail_stream['payload']['raw']
|
||||||
|
|
||||||
# # TODO: create default id
|
# # TODO: create default id
|
||||||
item_id = ail_stream['meta']['ail:id'] + 'test'
|
item_id = ail_stream['meta']['ail:id']
|
||||||
|
|
||||||
message = f'{item_id} {b64_gzip_content}'
|
message = f'{item_id} {b64_gzip_content}'
|
||||||
print(item_id)
|
print(item_id)
|
||||||
|
|
|
@ -48,8 +48,6 @@ class Sync_module(AbstractModule):
|
||||||
|
|
||||||
def compute(self, message):
|
def compute(self, message):
|
||||||
|
|
||||||
print(message)
|
|
||||||
|
|
||||||
### REFRESH DICT
|
### REFRESH DICT
|
||||||
if self.last_refresh < ail_2_ail.get_last_updated_sync_config():
|
if self.last_refresh < ail_2_ail.get_last_updated_sync_config():
|
||||||
self.last_refresh = time.time()
|
self.last_refresh = time.time()
|
||||||
|
@ -71,17 +69,16 @@ class Sync_module(AbstractModule):
|
||||||
tags = obj.get_tags(r_set=True)
|
tags = obj.get_tags(r_set=True)
|
||||||
|
|
||||||
# check filter + tags
|
# check filter + tags
|
||||||
|
#print(message)
|
||||||
for queue_uuid in self.dict_sync_queues:
|
for queue_uuid in self.dict_sync_queues:
|
||||||
filter_tags = self.dict_sync_queues[queue_uuid]['filter']
|
filter_tags = self.dict_sync_queues[queue_uuid]['filter']
|
||||||
print(tags)
|
|
||||||
print(filter_tags)
|
|
||||||
print(tags.issubset(filter_tags))
|
|
||||||
if filter_tags and tags:
|
if filter_tags and tags:
|
||||||
if tags.issubset(filter_tags):
|
#print(f'tags: {tags} filter: {filter_tags}')
|
||||||
|
if filter_tags.issubset(tags):
|
||||||
obj_dict = obj.get_default_meta()
|
obj_dict = obj.get_default_meta()
|
||||||
# send to queue push and/or pull
|
# send to queue push and/or pull
|
||||||
for dict_ail in self.dict_sync_queues[queue_uuid]['ail_instances']:
|
for dict_ail in self.dict_sync_queues[queue_uuid]['ail_instances']:
|
||||||
|
print(f'ail_uuid: {dict_ail["ail_uuid"]} obj: {message}')
|
||||||
ail_2_ail.add_object_to_sync_queue(queue_uuid, dict_ail['ail_uuid'], obj_dict,
|
ail_2_ail.add_object_to_sync_queue(queue_uuid, dict_ail['ail_uuid'], obj_dict,
|
||||||
push=dict_ail['push'], pull=dict_ail['pull'])
|
push=dict_ail['push'], pull=dict_ail['pull'])
|
||||||
|
|
||||||
|
|
|
@ -1028,8 +1028,9 @@ def api_create_sync_queue(json_dict):
|
||||||
tags = json_dict.get('tags')
|
tags = json_dict.get('tags')
|
||||||
if not tags:
|
if not tags:
|
||||||
return {"status": "error", "reason": "no tags provided"}, 400
|
return {"status": "error", "reason": "no tags provided"}, 400
|
||||||
if not Tag.are_enabled_tags(tags):
|
# FIXME: add custom tags
|
||||||
return {"status": "error", "reason": "Invalid/Disabled tags"}, 400
|
# if not Tag.are_enabled_tags(tags):
|
||||||
|
# return {"status": "error", "reason": "Invalid/Disabled tags"}, 400
|
||||||
|
|
||||||
max_size = json_dict.get('max_size')
|
max_size = json_dict.get('max_size')
|
||||||
if not max_size:
|
if not max_size:
|
||||||
|
@ -1064,8 +1065,9 @@ def api_edit_sync_queue(json_dict):
|
||||||
|
|
||||||
tags = json_dict.get('tags')
|
tags = json_dict.get('tags')
|
||||||
if tags:
|
if tags:
|
||||||
if not Tag.are_enabled_tags(tags):
|
# FIXME: add custom tags
|
||||||
return {"status": "error", "reason": "Invalid/Disabled tags"}, 400
|
# if not Tag.are_enabled_tags(tags):
|
||||||
|
# return {"status": "error", "reason": "Invalid/Disabled tags"}, 400
|
||||||
edit_sync_queue_filter_tags(queue_uuid, tags)
|
edit_sync_queue_filter_tags(queue_uuid, tags)
|
||||||
|
|
||||||
max_size = json_dict.get('max_size')
|
max_size = json_dict.get('max_size')
|
||||||
|
@ -1203,11 +1205,13 @@ def create_ail_stream(Object):
|
||||||
'type': Object.get_type()}
|
'type': Object.get_type()}
|
||||||
|
|
||||||
# OBJECT META
|
# OBJECT META
|
||||||
ail_stream['meta'] = {'ail_mime-type': 'text/plain'}
|
ail_stream['meta'] = {'ail:mime-type': 'text/plain'}
|
||||||
|
ail_stream['meta']['compress'] = 'gzip'
|
||||||
|
ail_stream['meta']['encoding'] = 'base64'
|
||||||
ail_stream['meta']['ail:id'] = Object.get_id()
|
ail_stream['meta']['ail:id'] = Object.get_id()
|
||||||
ail_stream['meta']['ail:tags'] = Object.get_tags()
|
ail_stream['meta']['tags'] = Object.get_tags()
|
||||||
# GLOBAL PAYLOAD
|
# GLOBAL META
|
||||||
ail_stream['meta']['ail:uuid'] = get_ail_uuid()
|
ail_stream['meta']['uuid_org'] = get_ail_uuid()
|
||||||
|
|
||||||
# OBJECT PAYLOAD
|
# OBJECT PAYLOAD
|
||||||
ail_stream['payload'] = Object.get_ail_2_ail_payload()
|
ail_stream['payload'] = Object.get_ail_2_ail_payload()
|
||||||
|
|
|
@ -20,6 +20,16 @@ sys.path.append(os.environ['AIL_BIN'])
|
||||||
# Import Project packages
|
# Import Project packages
|
||||||
##################################
|
##################################
|
||||||
from core import ail_2_ail
|
from core import ail_2_ail
|
||||||
|
from lib.ConfigLoader import ConfigLoader
|
||||||
|
|
||||||
|
config_loader = ConfigLoader()
|
||||||
|
local_addr = config_loader.get_config_str('AIL_2_AIL', 'local_addr')
|
||||||
|
if not local_addr or local_addr == None:
|
||||||
|
local_addr = None
|
||||||
|
else:
|
||||||
|
local_addr = (local_addr, 0)
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
|
|
||||||
#### LOGS ####
|
#### LOGS ####
|
||||||
redis_logger = publisher
|
redis_logger = publisher
|
||||||
|
@ -68,10 +78,9 @@ async def push(websocket, ail_uuid):
|
||||||
Obj, queue_uuid = ail_2_ail.get_sync_queue_object_and_queue_uuid(ail_uuid)
|
Obj, queue_uuid = ail_2_ail.get_sync_queue_object_and_queue_uuid(ail_uuid)
|
||||||
if Obj:
|
if Obj:
|
||||||
obj_ail_stream = ail_2_ail.create_ail_stream(Obj)
|
obj_ail_stream = ail_2_ail.create_ail_stream(Obj)
|
||||||
|
print(obj_ail_stream['meta'])
|
||||||
obj_ail_stream = json.dumps(obj_ail_stream)
|
obj_ail_stream = json.dumps(obj_ail_stream)
|
||||||
|
|
||||||
sys.stdout.write(obj_ail_stream)
|
|
||||||
|
|
||||||
# send objects
|
# send objects
|
||||||
await websocket.send(obj_ail_stream)
|
await websocket.send(obj_ail_stream)
|
||||||
await asyncio.sleep(0.1)
|
await asyncio.sleep(0.1)
|
||||||
|
@ -112,6 +121,7 @@ async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None, client_id=No
|
||||||
async with websockets.connect(
|
async with websockets.connect(
|
||||||
uri,
|
uri,
|
||||||
ssl=ssl_context,
|
ssl=ssl_context,
|
||||||
|
local_addr=local_addr,
|
||||||
#open_timeout=10, websockers 10.0 /!\ python>=3.7
|
#open_timeout=10, websockers 10.0 /!\ python>=3.7
|
||||||
extra_headers={"Authorization": f"{ail_key}"}
|
extra_headers={"Authorization": f"{ail_key}"}
|
||||||
) as websocket:
|
) as websocket:
|
||||||
|
|
|
@ -17,6 +17,12 @@ sys.path.append(os.environ['AIL_BIN'])
|
||||||
##################################
|
##################################
|
||||||
from pubsublogger import publisher
|
from pubsublogger import publisher
|
||||||
from core import ail_2_ail
|
from core import ail_2_ail
|
||||||
|
from lib.ConfigLoader import ConfigLoader
|
||||||
|
|
||||||
|
config_loader = ConfigLoader()
|
||||||
|
host = config_loader.get_config_str('AIL_2_AIL', 'server_host')
|
||||||
|
port = config_loader.get_config_int('AIL_2_AIL', 'server_port')
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
# # TODO: refactor logging
|
# # TODO: refactor logging
|
||||||
#### LOGS ####
|
#### LOGS ####
|
||||||
|
@ -303,9 +309,6 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
host = '0.0.0.0'
|
|
||||||
port = 4443
|
|
||||||
|
|
||||||
print('Launching Server...')
|
print('Launching Server...')
|
||||||
redis_logger.info('Launching Server...')
|
redis_logger.info('Launching Server...')
|
||||||
|
|
||||||
|
@ -315,7 +318,7 @@ if __name__ == '__main__':
|
||||||
cert_dir = os.environ['AIL_FLASK']
|
cert_dir = os.environ['AIL_FLASK']
|
||||||
ssl_context.load_cert_chain(certfile=os.path.join(cert_dir, 'server.crt'), keyfile=os.path.join(cert_dir, 'server.key'))
|
ssl_context.load_cert_chain(certfile=os.path.join(cert_dir, 'server.crt'), keyfile=os.path.join(cert_dir, 'server.key'))
|
||||||
|
|
||||||
start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol)
|
start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol, max_size=None)
|
||||||
|
|
||||||
print(f'Server Launched: wss://{host}:{port}')
|
print(f'Server Launched: wss://{host}:{port}')
|
||||||
redis_logger.info(f'Server Launched: wss://{host}:{port}')
|
redis_logger.info(f'Server Launched: wss://{host}:{port}')
|
||||||
|
|
59
bin/import/ail_json_importer/Ail_feeder_telegram.py
Executable file
59
bin/import/ail_json_importer/Ail_feeder_telegram.py
Executable file
|
@ -0,0 +1,59 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
"""
|
||||||
|
The JSON Receiver Module
|
||||||
|
================
|
||||||
|
|
||||||
|
Recieve Json Items (example: Twitter feeder)
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
|
import item_basic
|
||||||
|
import Username
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer'))
|
||||||
|
from Default_json import Default_json
|
||||||
|
|
||||||
|
class Ail_feeder_telegram(Default_json):
|
||||||
|
"""Twitter Feeder functions"""
|
||||||
|
|
||||||
|
def __init__(self, name, json_item):
|
||||||
|
super().__init__(name, json_item)
|
||||||
|
|
||||||
|
def get_feeder_name(self):
|
||||||
|
return 'telegram'
|
||||||
|
|
||||||
|
# define item id
|
||||||
|
def get_item_id(self):
|
||||||
|
# use twitter timestamp ?
|
||||||
|
item_date = datetime.date.today().strftime("%Y/%m/%d")
|
||||||
|
channel_id = str(self.json_item['meta']['channel_id'])
|
||||||
|
message_id = str(self.json_item['meta']['message_id'])
|
||||||
|
item_id = f'{channel_id}_{message_id}'
|
||||||
|
return os.path.join('telegram', item_date, item_id) + '.gz'
|
||||||
|
|
||||||
|
def process_json_meta(self, process, item_id):
|
||||||
|
'''
|
||||||
|
Process JSON meta filed.
|
||||||
|
'''
|
||||||
|
channel_id = str(self.json_item['meta']['channel_id'])
|
||||||
|
message_id = str(self.json_item['meta']['message_id'])
|
||||||
|
telegram_id = f'{channel_id}_{message_id}'
|
||||||
|
item_basic.add_map_obj_id_item_id(telegram_id, item_id, 'telegram_id')
|
||||||
|
#print(self.json_item['meta'])
|
||||||
|
username = None
|
||||||
|
if self.json_item['meta'].get('user'):
|
||||||
|
username = str(self.json_item['meta']['user'])
|
||||||
|
else:
|
||||||
|
if self.json_item['meta'].get('channel'):
|
||||||
|
username = str(self.json_item['meta']['channel']['username'])
|
||||||
|
if username:
|
||||||
|
#print(username)
|
||||||
|
item_date = item_basic.get_item_date(item_id)
|
||||||
|
Username.save_item_correlation('telegram', username, item_id, item_date)
|
||||||
|
return None
|
|
@ -270,7 +270,7 @@ def exists_investigation(investigation_uuid):
|
||||||
|
|
||||||
# created by user
|
# created by user
|
||||||
def get_user_all_investigations(user_id):
|
def get_user_all_investigations(user_id):
|
||||||
return r_tracking.smembers('investigations:user:{user_id}')
|
return r_tracking.smembers(f'investigations:user:{user_id}')
|
||||||
|
|
||||||
def is_object_investigated(obj_id, obj_type, subtype=''):
|
def is_object_investigated(obj_id, obj_type, subtype=''):
|
||||||
return r_tracking.exists(f'obj:investigations:{obj_type}:{subtype}:{obj_id}')
|
return r_tracking.exists(f'obj:investigations:{obj_type}:{subtype}:{obj_id}')
|
||||||
|
@ -278,6 +278,15 @@ def is_object_investigated(obj_id, obj_type, subtype=''):
|
||||||
def get_obj_investigations(obj_id, obj_type, subtype=''):
|
def get_obj_investigations(obj_id, obj_type, subtype=''):
|
||||||
return r_tracking.smembers(f'obj:investigations:{obj_type}:{subtype}:{obj_id}')
|
return r_tracking.smembers(f'obj:investigations:{obj_type}:{subtype}:{obj_id}')
|
||||||
|
|
||||||
|
def delete_obj_investigations(obj_id, obj_type, subtype=''):
|
||||||
|
unregistred = False
|
||||||
|
for investigation_uuid in get_obj_investigations(obj_id, obj_type, subtype=subtype):
|
||||||
|
investigation = Investigation(investigation_uuid)
|
||||||
|
investigation.unregister_object(obj_id, obj_type, subtype)
|
||||||
|
unregistred = True
|
||||||
|
return unregistred
|
||||||
|
|
||||||
|
|
||||||
# # TODO: fix default threat_level analysis
|
# # TODO: fix default threat_level analysis
|
||||||
# # TODO: limit description + name
|
# # TODO: limit description + name
|
||||||
# # TODO: sanityze tags
|
# # TODO: sanityze tags
|
||||||
|
@ -286,7 +295,7 @@ def create_investigation(user_id, date, name, threat_level, analysis, info, tags
|
||||||
investigation_uuid = generate_uuid()
|
investigation_uuid = generate_uuid()
|
||||||
r_tracking.sadd('investigations:all', investigation_uuid)
|
r_tracking.sadd('investigations:all', investigation_uuid)
|
||||||
# user map
|
# user map
|
||||||
r_tracking.sadd('investigations:user:{user_id}', investigation_uuid)
|
r_tracking.sadd(f'investigations:user:{user_id}', investigation_uuid)
|
||||||
# metadata
|
# metadata
|
||||||
r_tracking.hset(f'investigations:data:{investigation_uuid}', 'creator_user', user_id)
|
r_tracking.hset(f'investigations:data:{investigation_uuid}', 'creator_user', user_id)
|
||||||
|
|
||||||
|
|
|
@ -11,11 +11,15 @@ import yara
|
||||||
import datetime
|
import datetime
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
from ail_typo_squatting import runAll
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
from flask import escape
|
from flask import escape
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||||
import Date
|
import Date
|
||||||
|
import Tag
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
@ -26,6 +30,10 @@ r_cache = config_loader.get_redis_conn("Redis_Cache")
|
||||||
|
|
||||||
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
|
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
|
||||||
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
|
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
|
||||||
|
|
||||||
|
items_dir = config_loader.get_config_str("Directories", "pastes")
|
||||||
|
if items_dir[-1] == '/':
|
||||||
|
items_dir = items_dir[:-1]
|
||||||
config_loader = None
|
config_loader = None
|
||||||
|
|
||||||
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
||||||
|
@ -75,8 +83,8 @@ def get_all_tracker_type():
|
||||||
def get_all_tracker_uuid():
|
def get_all_tracker_uuid():
|
||||||
return r_serv_tracker.smembers(f'trackers:all')
|
return r_serv_tracker.smembers(f'trackers:all')
|
||||||
|
|
||||||
def get_all_tracker_by_type(tracker_type):
|
def get_all_tracker_uuid_by_type(tracker_type):
|
||||||
r_serv_tracker.smembers(f'trackers:all:{tracker_type}')
|
return r_serv_tracker.smembers(f'trackers:all:{tracker_type}')
|
||||||
|
|
||||||
# def get_all_tracker():
|
# def get_all_tracker():
|
||||||
# l_keys_name = []
|
# l_keys_name = []
|
||||||
|
@ -211,6 +219,20 @@ def get_tracker_items_by_daterange(tracker_uuid, date_from, date_to):
|
||||||
all_item_id |= r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}')
|
all_item_id |= r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}')
|
||||||
return all_item_id
|
return all_item_id
|
||||||
|
|
||||||
|
def get_tracker_typosquatting_domains(tracker_uuid):
|
||||||
|
return r_serv_tracker.smembers(f'tracker:typosquatting:{tracker_uuid}')
|
||||||
|
|
||||||
|
def get_typosquatting_tracked_words_list():
|
||||||
|
all_typo = dict()
|
||||||
|
typos_uuid = get_all_tracker_uuid_by_type("typosquatting")
|
||||||
|
|
||||||
|
for typo_uuid in typos_uuid:
|
||||||
|
tracker = get_tracker_by_uuid(typo_uuid)
|
||||||
|
all_typo[tracker] = get_tracker_typosquatting_domains(typo_uuid)
|
||||||
|
|
||||||
|
return all_typo
|
||||||
|
|
||||||
|
|
||||||
def add_tracked_item(tracker_uuid, item_id):
|
def add_tracked_item(tracker_uuid, item_id):
|
||||||
item_date = item_basic.get_item_date(item_id)
|
item_date = item_basic.get_item_date(item_id)
|
||||||
# track item
|
# track item
|
||||||
|
@ -248,7 +270,6 @@ def update_tracker_daterange(tracker_uuid, date, op='add'):
|
||||||
if op == 'del':
|
if op == 'del':
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def remove_tracked_item(item_id):
|
def remove_tracked_item(item_id):
|
||||||
item_date = item_basic.get_item_date(item_id)
|
item_date = item_basic.get_item_date(item_id)
|
||||||
for tracker_uuid in get_item_all_trackers_uuid(item_id):
|
for tracker_uuid in get_item_all_trackers_uuid(item_id):
|
||||||
|
@ -267,6 +288,11 @@ def is_obj_tracked(obj_type, subtype, id):
|
||||||
def get_obj_all_trackers(obj_type, subtype, id):
|
def get_obj_all_trackers(obj_type, subtype, id):
|
||||||
return r_serv_tracker.smembers(f'obj:trackers:{obj_type}:{obj_id}')
|
return r_serv_tracker.smembers(f'obj:trackers:{obj_type}:{obj_id}')
|
||||||
|
|
||||||
|
# # TODO: ADD all Objects + Subtypes
|
||||||
|
def delete_obj_trackers(obj_type, subtype, id):
|
||||||
|
if obj_type == 'item':
|
||||||
|
remove_tracked_item(id)
|
||||||
|
|
||||||
def get_email_subject(tracker_uuid):
|
def get_email_subject(tracker_uuid):
|
||||||
tracker_description = get_tracker_description(tracker_uuid)
|
tracker_description = get_tracker_description(tracker_uuid)
|
||||||
if not tracker_description:
|
if not tracker_description:
|
||||||
|
@ -400,6 +426,15 @@ def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1):
|
||||||
|
|
||||||
tracker = ",".join(words_set)
|
tracker = ",".join(words_set)
|
||||||
tracker = "{};{}".format(tracker, nb_words)
|
tracker = "{};{}".format(tracker, nb_words)
|
||||||
|
elif tracker_type == 'typosquatting':
|
||||||
|
tracker = tracker.lower()
|
||||||
|
# Take only the first term
|
||||||
|
domain = tracker.split(" ")
|
||||||
|
if len(domain) > 1:
|
||||||
|
return {"status": "error", "reason": "Only one domain is accepted at a time"}, 400
|
||||||
|
if not "." in tracker:
|
||||||
|
return {"status": "error", "reason": "Invalid domain name"}, 400
|
||||||
|
|
||||||
|
|
||||||
elif tracker_type=='yara_custom':
|
elif tracker_type=='yara_custom':
|
||||||
if not is_valid_yara_rule(tracker):
|
if not is_valid_yara_rule(tracker):
|
||||||
|
@ -440,6 +475,12 @@ def create_tracker(tracker, tracker_type, user_id, level, tags, mails, descripti
|
||||||
tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid)
|
tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid)
|
||||||
tracker_type = 'yara'
|
tracker_type = 'yara'
|
||||||
|
|
||||||
|
elif tracker_type == 'typosquatting':
|
||||||
|
domain = tracker.split(" ")[0]
|
||||||
|
typo_generation = runAll(domain=domain, limit=math.inf, formatoutput="text", pathOutput="-", verbose=False)
|
||||||
|
for typo in typo_generation:
|
||||||
|
r_serv_tracker.sadd(f'tracker:typosquatting:{tracker_uuid}', typo)
|
||||||
|
|
||||||
# create metadata
|
# create metadata
|
||||||
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'tracked', tracker)
|
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'tracked', tracker)
|
||||||
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'type', tracker_type)
|
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'type', tracker_type)
|
||||||
|
@ -1039,7 +1080,10 @@ def get_retro_hunt_dir_day_to_analyze(task_uuid, date, filter_last=False, source
|
||||||
|
|
||||||
# # TODO: move me
|
# # TODO: move me
|
||||||
def get_items_to_analyze(dir, last=None):
|
def get_items_to_analyze(dir, last=None):
|
||||||
full_dir = os.path.join(os.environ['AIL_HOME'], 'PASTES', dir) # # TODO: # FIXME: use item config dir
|
if items_dir == 'PASTES':
|
||||||
|
full_dir = os.path.join(os.environ['AIL_HOME'], 'PASTES', dir)
|
||||||
|
else:
|
||||||
|
full_dir = os.path.join(items_dir, dir)
|
||||||
if os.path.isdir(full_dir):
|
if os.path.isdir(full_dir):
|
||||||
all_items = sorted([os.path.join(dir, f) for f in os.listdir(full_dir) if os.path.isfile(os.path.join(full_dir, f))])
|
all_items = sorted([os.path.join(dir, f) for f in os.listdir(full_dir) if os.path.isfile(os.path.join(full_dir, f))])
|
||||||
# remove processed items
|
# remove processed items
|
||||||
|
@ -1265,7 +1309,21 @@ def api_delete_retro_hunt_task(task_uuid):
|
||||||
else:
|
else:
|
||||||
return (delete_retro_hunt_task(task_uuid), 200)
|
return (delete_retro_hunt_task(task_uuid), 200)
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
#### DB FIX ####
|
||||||
|
def get_trackers_custom_tags():
|
||||||
|
tags = set()
|
||||||
|
for tracker_uuid in get_all_tracker_uuid():
|
||||||
|
for tag in get_tracker_tags(tracker_uuid):
|
||||||
|
tags.add(tag)
|
||||||
|
for task_uuid in get_all_retro_hunt_tasks():
|
||||||
|
for tag in get_retro_hunt_task_tags(task_uuid):
|
||||||
|
tags.add(tag)
|
||||||
|
return tags
|
||||||
|
|
||||||
|
#### -- ####
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print(get_trackers_custom_tags())
|
||||||
# fix_all_tracker_uuid_list()
|
# fix_all_tracker_uuid_list()
|
||||||
# res = get_all_tracker_uuid()
|
# res = get_all_tracker_uuid()
|
||||||
# print(len(res))
|
# print(len(res))
|
||||||
|
|
|
@ -116,6 +116,9 @@ def is_domain_root(item_id):
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def get_item_url(item_id):
|
||||||
|
return r_serv_metadata.hget(f'paste_metadata:{item_id}', 'real_link')
|
||||||
|
|
||||||
def get_nb_children(item_id):
|
def get_nb_children(item_id):
|
||||||
return r_serv_metadata.scard('paste_children:{}'.format(item_id))
|
return r_serv_metadata.scard('paste_children:{}'.format(item_id))
|
||||||
|
|
||||||
|
@ -166,7 +169,7 @@ def add_item_parent(parent_item_id, item_id):
|
||||||
#### UNKNOW SECTION ####
|
#### UNKNOW SECTION ####
|
||||||
|
|
||||||
def get_obj_id_item_id(parent_type, parent_id):
|
def get_obj_id_item_id(parent_type, parent_id):
|
||||||
all_parents_type = ['twitter_id', 'jabber_id']
|
all_parents_type = ['twitter_id', 'jabber_id', 'telegram_id']
|
||||||
if parent_type in all_parents_type:
|
if parent_type in all_parents_type:
|
||||||
return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id)
|
return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id)
|
||||||
else:
|
else:
|
||||||
|
@ -177,6 +180,8 @@ def add_map_obj_id_item_id(obj_id, item_id, obj_type):
|
||||||
r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id)
|
r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id)
|
||||||
if obj_type == 'jabber_id':
|
if obj_type == 'jabber_id':
|
||||||
r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id)
|
r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id)
|
||||||
|
if obj_type == 'telegram_id':
|
||||||
|
r_serv_metadata.hset('map:telegram_id:item_id', obj_id, item_id)
|
||||||
|
|
||||||
# delete twitter id
|
# delete twitter id
|
||||||
|
|
||||||
|
|
|
@ -11,25 +11,33 @@ import html2text
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
from pymisp import MISPObject
|
||||||
|
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from export.Export import get_ail_uuid # # TODO: REPLACE
|
||||||
|
from lib.objects.abstract_object import AbstractObject
|
||||||
|
from lib.ConfigLoader import ConfigLoader
|
||||||
|
from lib import item_basic
|
||||||
|
from lib import domain_basic
|
||||||
|
|
||||||
|
from packages import Tag
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||||
import Tag
|
|
||||||
import Cryptocurrency
|
import Cryptocurrency
|
||||||
import Pgp
|
import Pgp
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import item_basic
|
|
||||||
import domain_basic
|
|
||||||
import ConfigLoader
|
|
||||||
import Correlate_object
|
import Correlate_object
|
||||||
import Decoded
|
import Decoded
|
||||||
import Screenshot
|
import Screenshot
|
||||||
import Username
|
import Username
|
||||||
|
|
||||||
from abstract_object import AbstractObject
|
|
||||||
from item_basic import *
|
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
|
|
||||||
config_loader = ConfigLoader.ConfigLoader()
|
config_loader = ConfigLoader()
|
||||||
# get and sanityze PASTE DIRECTORY
|
# get and sanityze PASTE DIRECTORY
|
||||||
# # TODO: rename PASTES_FOLDER
|
# # TODO: rename PASTES_FOLDER
|
||||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
|
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
|
||||||
|
@ -89,6 +97,12 @@ class Item(AbstractObject):
|
||||||
"""
|
"""
|
||||||
return item_basic.get_item_content(self.id)
|
return item_basic.get_item_content(self.id)
|
||||||
|
|
||||||
|
def get_raw_content(self):
|
||||||
|
filepath = self.get_filename()
|
||||||
|
with open(filepath, 'rb') as f:
|
||||||
|
raw_content = BytesIO(f.read())
|
||||||
|
return raw_content
|
||||||
|
|
||||||
def get_gzip_content(self, b64=False):
|
def get_gzip_content(self, b64=False):
|
||||||
with open(self.get_filename(), 'rb') as f:
|
with open(self.get_filename(), 'rb') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
@ -97,8 +111,7 @@ class Item(AbstractObject):
|
||||||
return content.decode()
|
return content.decode()
|
||||||
|
|
||||||
def get_ail_2_ail_payload(self):
|
def get_ail_2_ail_payload(self):
|
||||||
payload = {'raw': self.get_gzip_content(b64=True),
|
payload = {'raw': self.get_gzip_content(b64=True)}
|
||||||
'compress': 'gzip'}
|
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
# # TODO:
|
# # TODO:
|
||||||
|
@ -108,6 +121,7 @@ class Item(AbstractObject):
|
||||||
# # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\
|
# # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\
|
||||||
# TODO: DELETE ITEM CORRELATION + TAGS + METADATA + ...
|
# TODO: DELETE ITEM CORRELATION + TAGS + METADATA + ...
|
||||||
def delete(self):
|
def delete(self):
|
||||||
|
self._delete()
|
||||||
try:
|
try:
|
||||||
os.remove(self.get_filename())
|
os.remove(self.get_filename())
|
||||||
return True
|
return True
|
||||||
|
@ -128,9 +142,19 @@ class Item(AbstractObject):
|
||||||
color = '#332288'
|
color = '#332288'
|
||||||
return {'style': '', 'icon': '', 'color': color, 'radius':5}
|
return {'style': '', 'icon': '', 'color': color, 'radius':5}
|
||||||
|
|
||||||
############################################################################
|
def get_misp_object(self):
|
||||||
############################################################################
|
obj_date = self.get_date()
|
||||||
############################################################################
|
obj = MISPObject('ail-leak', standalone=True)
|
||||||
|
obj.first_seen = obj_date
|
||||||
|
|
||||||
|
obj_attrs = []
|
||||||
|
obj_attrs.append( obj.add_attribute('first-seen', value=obj_date) )
|
||||||
|
obj_attrs.append( obj.add_attribute('raw-data', value=self.id, data=self.get_raw_content()) )
|
||||||
|
obj_attrs.append( obj.add_attribute('sensor', value=get_ail_uuid()) )
|
||||||
|
for obj_attr in obj_attrs:
|
||||||
|
for tag in self.get_tags():
|
||||||
|
obj_attr.add_tag(tag)
|
||||||
|
return obj
|
||||||
|
|
||||||
def exist_correlation(self):
|
def exist_correlation(self):
|
||||||
pass
|
pass
|
||||||
|
@ -689,3 +713,6 @@ def delete_domain_node(item_id):
|
||||||
|
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
# if __name__ == '__main__':
|
||||||
|
#
|
||||||
|
# item = Item('')
|
||||||
|
# print(item.get_misp_object().to_json())
|
||||||
|
|
|
@ -53,21 +53,22 @@ class DomClassifier(AbstractModule):
|
||||||
|
|
||||||
|
|
||||||
def compute(self, message, r_result=False):
|
def compute(self, message, r_result=False):
|
||||||
item = Item(message)
|
host, id = message.split()
|
||||||
|
|
||||||
item_content = item.get_content()
|
item = Item(id)
|
||||||
item_basename = item.get_basename()
|
item_basename = item.get_basename()
|
||||||
item_date = item.get_date()
|
item_date = item.get_date()
|
||||||
item_source = item.get_source()
|
item_source = item.get_source()
|
||||||
try:
|
try:
|
||||||
mimetype = item_basic.get_item_mimetype(item.get_id())
|
|
||||||
|
|
||||||
if mimetype.split('/')[0] == "text":
|
self.c.text(rawtext=host)
|
||||||
self.c.text(rawtext=item_content)
|
print(self.c.domain)
|
||||||
self.c.potentialdomain()
|
|
||||||
self.c.validdomain(passive_dns=True, extended=False)
|
self.c.validdomain(passive_dns=True, extended=False)
|
||||||
#self.redis_logger.debug(self.c.vdomain)
|
#self.redis_logger.debug(self.c.vdomain)
|
||||||
|
|
||||||
|
print(self.c.vdomain)
|
||||||
|
print()
|
||||||
|
|
||||||
if self.c.vdomain and d4.is_passive_dns_enabled():
|
if self.c.vdomain and d4.is_passive_dns_enabled():
|
||||||
for dns_record in self.c.vdomain:
|
for dns_record in self.c.vdomain:
|
||||||
self.send_message_to_queue(dns_record)
|
self.send_message_to_queue(dns_record)
|
||||||
|
|
77
bin/modules/Hosts.py
Executable file
77
bin/modules/Hosts.py
Executable file
|
@ -0,0 +1,77 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
"""
|
||||||
|
The Hosts Module
|
||||||
|
======================
|
||||||
|
|
||||||
|
This module is consuming the Redis-list created by the Global module.
|
||||||
|
|
||||||
|
It is looking for Hosts
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Import External packages
|
||||||
|
##################################
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from modules.abstract_module import AbstractModule
|
||||||
|
from lib.ConfigLoader import ConfigLoader
|
||||||
|
from lib import regex_helper
|
||||||
|
#from lib.objects.Items import Item
|
||||||
|
from packages.Item import Item
|
||||||
|
|
||||||
|
class Hosts(AbstractModule):
|
||||||
|
"""
|
||||||
|
Hosts module for AIL framework
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Hosts, self).__init__()
|
||||||
|
|
||||||
|
config_loader = ConfigLoader()
|
||||||
|
self.r_cache = config_loader.get_redis_conn("Redis_Cache")
|
||||||
|
|
||||||
|
self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
|
||||||
|
|
||||||
|
# regex timeout
|
||||||
|
self.regex_timeout = 30
|
||||||
|
|
||||||
|
# Waiting time in secondes between to message proccessed
|
||||||
|
self.pending_seconds = 1
|
||||||
|
|
||||||
|
self.host_regex = r'\b([a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)\b'
|
||||||
|
re.compile(self.host_regex)
|
||||||
|
|
||||||
|
self.redis_logger.info(f"Module: {self.module_name} Launched")
|
||||||
|
|
||||||
|
|
||||||
|
def compute(self, message):
|
||||||
|
item = Item(message)
|
||||||
|
|
||||||
|
# mimetype = item_basic.get_item_mimetype(item.get_id())
|
||||||
|
# if mimetype.split('/')[0] == "text":
|
||||||
|
|
||||||
|
content = item.get_content()
|
||||||
|
|
||||||
|
hosts = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.host_regex, item.get_id(), content)
|
||||||
|
for host in hosts:
|
||||||
|
#print(host)
|
||||||
|
|
||||||
|
msg = f'{host} {item.get_id()}'
|
||||||
|
self.send_message_to_queue(msg, 'Host')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
module = Hosts()
|
||||||
|
module.run()
|
|
@ -54,7 +54,7 @@ class Tags(AbstractModule):
|
||||||
self.send_message_to_queue(message, 'MISP_The_Hive_feeder')
|
self.send_message_to_queue(message, 'MISP_The_Hive_feeder')
|
||||||
|
|
||||||
message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}'
|
message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}'
|
||||||
self.send_message_to_queue(message, 'Sync_module')
|
self.send_message_to_queue(message, 'SyncModule')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Malformed message
|
# Malformed message
|
||||||
|
|
74
bin/modules/Zerobins.py
Executable file
74
bin/modules/Zerobins.py
Executable file
|
@ -0,0 +1,74 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
"""
|
||||||
|
The Zerobins Module
|
||||||
|
======================
|
||||||
|
This module spots zerobins-like services for further processing
|
||||||
|
"""
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Import External packages
|
||||||
|
##################################
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import pdb
|
||||||
|
import re
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from modules.abstract_module import AbstractModule
|
||||||
|
from lib import crawlers
|
||||||
|
|
||||||
|
|
||||||
|
class Zerobins(AbstractModule):
|
||||||
|
"""
|
||||||
|
Zerobins module for AIL framework
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Zerobins, self).__init__()
|
||||||
|
|
||||||
|
binz = [
|
||||||
|
r'^https:\/\/(zerobin||privatebin)\..*$', # historical ones
|
||||||
|
]
|
||||||
|
|
||||||
|
self.regex = re.compile('|'.join(binz))
|
||||||
|
|
||||||
|
# Pending time between two computation (computeNone) in seconds
|
||||||
|
self.pending_seconds = 10
|
||||||
|
|
||||||
|
# Send module state to logs
|
||||||
|
self.redis_logger.info(f'Module {self.module_name} initialized')
|
||||||
|
|
||||||
|
|
||||||
|
def computeNone(self):
|
||||||
|
"""
|
||||||
|
Compute when no message in queue
|
||||||
|
"""
|
||||||
|
self.redis_logger.debug("No message in queue")
|
||||||
|
|
||||||
|
|
||||||
|
def compute(self, message):
|
||||||
|
"""regex_helper.regex_findall(self.module_name, self.redis_cache_key
|
||||||
|
Compute a message in queue
|
||||||
|
"""
|
||||||
|
print(message)
|
||||||
|
url, id = message.split()
|
||||||
|
|
||||||
|
# Extract zerobins addresses
|
||||||
|
matching_binz = self.regex_findall(self.regex, id, url)
|
||||||
|
|
||||||
|
if len(matching_binz) > 0:
|
||||||
|
for bin in matching_binz:
|
||||||
|
print("send {} to crawler".format(bin))
|
||||||
|
crawlers.create_crawler_task(bin, screenshot=False, har=False, depth_limit=1, max_pages=1, auto_crawler=False, crawler_delta=3600, crawler_type=None, cookiejar_uuid=None, user_agent=None)
|
||||||
|
|
||||||
|
self.redis_logger.debug("Compute message in queue")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
module = Zerobins()
|
||||||
|
module.run()
|
|
@ -46,6 +46,9 @@ class AbstractModule(ABC):
|
||||||
# If provided could be a namespaced channel like script:<ModuleName>
|
# If provided could be a namespaced channel like script:<ModuleName>
|
||||||
self.redis_logger.channel = logger_channel
|
self.redis_logger.channel = logger_channel
|
||||||
|
|
||||||
|
#Cache key
|
||||||
|
self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
|
||||||
|
self.max_execution_time = 30
|
||||||
|
|
||||||
# Run module endlessly
|
# Run module endlessly
|
||||||
self.proceed = True
|
self.proceed = True
|
||||||
|
@ -102,6 +105,7 @@ class AbstractModule(ABC):
|
||||||
self.compute(message)
|
self.compute(message)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
trace = traceback.format_tb(err.__traceback__)
|
trace = traceback.format_tb(err.__traceback__)
|
||||||
|
trace = ''.join(trace)
|
||||||
self.redis_logger.critical(f"Error in module {self.module_name}: {err}")
|
self.redis_logger.critical(f"Error in module {self.module_name}: {err}")
|
||||||
self.redis_logger.critical(f"Module {self.module_name} input message: {message}")
|
self.redis_logger.critical(f"Module {self.module_name} input message: {message}")
|
||||||
self.redis_logger.critical(trace)
|
self.redis_logger.critical(trace)
|
||||||
|
@ -109,8 +113,7 @@ class AbstractModule(ABC):
|
||||||
print(f"ERROR: {err}")
|
print(f"ERROR: {err}")
|
||||||
print(f'MESSAGE: {message}')
|
print(f'MESSAGE: {message}')
|
||||||
print('TRACEBACK:')
|
print('TRACEBACK:')
|
||||||
for line in trace:
|
print(trace)
|
||||||
print(line)
|
|
||||||
# remove from set_module
|
# remove from set_module
|
||||||
## check if item process == completed
|
## check if item process == completed
|
||||||
|
|
||||||
|
|
|
@ -637,8 +637,7 @@ class Item(AbstractObject):
|
||||||
return content.decode()
|
return content.decode()
|
||||||
|
|
||||||
def get_ail_2_ail_payload(self):
|
def get_ail_2_ail_payload(self):
|
||||||
payload = {'raw': self.get_gzip_content(b64=True),
|
payload = {'raw': self.get_gzip_content(b64=True)}
|
||||||
'compress': 'gzip'}
|
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
# # TODO:
|
# # TODO:
|
||||||
|
@ -667,6 +666,9 @@ class Item(AbstractObject):
|
||||||
def get_svg_icon(self):
|
def get_svg_icon(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def get_misp_object(self):
|
||||||
|
pass
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
############################################################################
|
############################################################################
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
|
@ -22,8 +22,12 @@ subscribe = Redis_Duplicate
|
||||||
[Indexer]
|
[Indexer]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
|
|
||||||
[DomClassifier]
|
[Hosts]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
|
publish = Redis_Host
|
||||||
|
|
||||||
|
[DomClassifier]
|
||||||
|
subscribe = Redis_Host
|
||||||
publish = Redis_D4_client
|
publish = Redis_D4_client
|
||||||
|
|
||||||
[D4_client]
|
[D4_client]
|
||||||
|
@ -33,6 +37,10 @@ subscribe = Redis_D4_client
|
||||||
subscribe = Redis
|
subscribe = Redis
|
||||||
publish = Redis_Tags
|
publish = Redis_Tags
|
||||||
|
|
||||||
|
[Tracker_Typo_Squatting]
|
||||||
|
subscribe = Redis_Host
|
||||||
|
publish = Redis_Tags
|
||||||
|
|
||||||
[Tracker_Term]
|
[Tracker_Term]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Tags
|
publish = Redis_Tags
|
||||||
|
@ -161,3 +169,6 @@ publish = Redis_Mixer,Redis_Tags
|
||||||
[IP]
|
[IP]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
|
[Zerobins]
|
||||||
|
subscribe = Redis_Url
|
|
@ -58,13 +58,16 @@ class Retro_Hunt(AbstractModule):
|
||||||
# end_time
|
# end_time
|
||||||
|
|
||||||
def compute(self, task_uuid):
|
def compute(self, task_uuid):
|
||||||
print(task_uuid)
|
self.redis_logger.warning(f'{self.module_name}, starting Retro hunt task {task_uuid}')
|
||||||
|
print(f'starting Retro hunt task {task_uuid}')
|
||||||
self.task_uuid = task_uuid
|
self.task_uuid = task_uuid
|
||||||
self.progress = 0
|
self.progress = 0
|
||||||
# First launch
|
# First launch
|
||||||
# restart
|
# restart
|
||||||
rule = Tracker.get_retro_hunt_task_rule(task_uuid, r_compile=True)
|
rule = Tracker.get_retro_hunt_task_rule(task_uuid, r_compile=True)
|
||||||
|
|
||||||
timeout = Tracker.get_retro_hunt_task_timeout(task_uuid)
|
timeout = Tracker.get_retro_hunt_task_timeout(task_uuid)
|
||||||
|
self.redis_logger.debug(f'{self.module_name}, Retro Hunt rule {task_uuid} timeout {timeout}')
|
||||||
sources = Tracker.get_retro_hunt_task_sources(task_uuid, r_sort=True)
|
sources = Tracker.get_retro_hunt_task_sources(task_uuid, r_sort=True)
|
||||||
|
|
||||||
self.date_from = Tracker.get_retro_hunt_task_date_from(task_uuid)
|
self.date_from = Tracker.get_retro_hunt_task_date_from(task_uuid)
|
||||||
|
@ -85,6 +88,7 @@ class Retro_Hunt(AbstractModule):
|
||||||
# # TODO: Filter previous item
|
# # TODO: Filter previous item
|
||||||
for dir in dirs_date:
|
for dir in dirs_date:
|
||||||
print(dir)
|
print(dir)
|
||||||
|
self.redis_logger.debug(f'{self.module_name}, Retro Hunt searching in directory {dir}')
|
||||||
l_obj = Tracker.get_items_to_analyze(dir)
|
l_obj = Tracker.get_items_to_analyze(dir)
|
||||||
for id in l_obj:
|
for id in l_obj:
|
||||||
#print(f'{dir} / {id}')
|
#print(f'{dir} / {id}')
|
||||||
|
@ -92,6 +96,8 @@ class Retro_Hunt(AbstractModule):
|
||||||
# save current item in cache
|
# save current item in cache
|
||||||
Tracker.set_cache_retro_hunt_task_id(task_uuid, id)
|
Tracker.set_cache_retro_hunt_task_id(task_uuid, id)
|
||||||
|
|
||||||
|
self.redis_logger.debug(f'{self.module_name}, Retro Hunt rule {task_uuid}, searching item {id}')
|
||||||
|
|
||||||
yara_match = rule.match(data=self.item.get_content(), callback=self.yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=timeout)
|
yara_match = rule.match(data=self.item.get_content(), callback=self.yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=timeout)
|
||||||
|
|
||||||
# save last item
|
# save last item
|
||||||
|
@ -120,6 +126,8 @@ class Retro_Hunt(AbstractModule):
|
||||||
Tracker.set_retro_hunt_nb_match(task_uuid)
|
Tracker.set_retro_hunt_nb_match(task_uuid)
|
||||||
Tracker.clear_retro_hunt_task_cache(task_uuid)
|
Tracker.clear_retro_hunt_task_cache(task_uuid)
|
||||||
|
|
||||||
|
print(f'Retro Hunt {task_uuid} completed')
|
||||||
|
self.redis_logger.warning(f'{self.module_name}, Retro Hunt {task_uuid} completed')
|
||||||
|
|
||||||
# # TODO: stop
|
# # TODO: stop
|
||||||
|
|
||||||
|
@ -133,10 +141,12 @@ class Retro_Hunt(AbstractModule):
|
||||||
# Tracker.set_retro_hunt_task_progress(task_uuid, progress)
|
# Tracker.set_retro_hunt_task_progress(task_uuid, progress)
|
||||||
|
|
||||||
def yara_rules_match(self, data):
|
def yara_rules_match(self, data):
|
||||||
#print(data)
|
|
||||||
|
|
||||||
task_uuid = data['namespace']
|
|
||||||
id = self.item.get_id()
|
id = self.item.get_id()
|
||||||
|
#print(data)
|
||||||
|
task_uuid = data['namespace']
|
||||||
|
|
||||||
|
self.redis_logger.info(f'{self.module_name}, Retro hunt {task_uuid} match found: {id}')
|
||||||
|
print(f'Retro hunt {task_uuid} match found: {id}')
|
||||||
|
|
||||||
Tracker.save_retro_hunt_match(task_uuid, id)
|
Tracker.save_retro_hunt_match(task_uuid, id)
|
||||||
|
|
||||||
|
|
116
bin/trackers/Tracker_Typo_Squatting.py
Normal file
116
bin/trackers/Tracker_Typo_Squatting.py
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
"""
|
||||||
|
The Tracker_Typo_Squatting Module
|
||||||
|
===================
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Import External packages
|
||||||
|
##################################
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from modules.abstract_module import AbstractModule
|
||||||
|
import NotificationHelper
|
||||||
|
from packages.Item import Item
|
||||||
|
from packages import Term
|
||||||
|
from lib import Tracker
|
||||||
|
|
||||||
|
class Tracker_Typo_Squatting(AbstractModule):
|
||||||
|
mail_body_template = "AIL Framework,\nNew occurrence for tracked Typo: {}\nitem id: {}\nurl: {}{}"
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tracker_Typo_Squatting module for AIL framework
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Tracker_Typo_Squatting, self).__init__()
|
||||||
|
|
||||||
|
self.pending_seconds = 5
|
||||||
|
|
||||||
|
self.full_item_url = self.process.config.get("Notifications", "ail_domain") + "/object/item?id="
|
||||||
|
|
||||||
|
# loads typosquatting
|
||||||
|
self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list()
|
||||||
|
self.last_refresh_typosquat = time.time()
|
||||||
|
|
||||||
|
self.redis_logger.info(f"Module: {self.module_name} Launched")
|
||||||
|
|
||||||
|
def compute(self, message):
|
||||||
|
# refresh Tracked typo
|
||||||
|
if self.last_refresh_typosquat < Term.get_tracked_term_last_updated_by_type('typosquatting'):
|
||||||
|
self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list()
|
||||||
|
self.last_refresh_typosquat = time.time()
|
||||||
|
self.redis_logger.debug('Tracked typosquatting refreshed')
|
||||||
|
print('Tracked typosquatting refreshed')
|
||||||
|
|
||||||
|
host, id = message.split()
|
||||||
|
item = Item(id)
|
||||||
|
|
||||||
|
# Cast message as Item
|
||||||
|
for key in self.typosquat_tracked_words_list.keys():
|
||||||
|
#print(key)
|
||||||
|
if host in self.typosquat_tracked_words_list[key]:
|
||||||
|
self.new_term_found(key, 'typosquatting', item)
|
||||||
|
|
||||||
|
def new_term_found(self, term, term_type, item):
|
||||||
|
uuid_list = Term.get_term_uuid_list(term, term_type)
|
||||||
|
|
||||||
|
item_id = item.get_id()
|
||||||
|
item_date = item.get_date()
|
||||||
|
item_source = item.get_source()
|
||||||
|
self.redis_logger.info(f'new tracked typo found: {term} in {item_id}')
|
||||||
|
print(f'new tracked typo found: {term} in {item_id}')
|
||||||
|
for term_uuid in uuid_list:
|
||||||
|
tracker_sources = Tracker.get_tracker_uuid_sources(term_uuid)
|
||||||
|
if not tracker_sources or item_source in tracker_sources:
|
||||||
|
Tracker.add_tracked_item(term_uuid, item_id)
|
||||||
|
|
||||||
|
tags_to_add = Term.get_term_tags(term_uuid)
|
||||||
|
for tag in tags_to_add:
|
||||||
|
msg = '{};{}'.format(tag, item_id)
|
||||||
|
self.send_message_to_queue(msg, 'Tags')
|
||||||
|
|
||||||
|
mail_to_notify = Term.get_term_mails(term_uuid)
|
||||||
|
if mail_to_notify:
|
||||||
|
mail_subject = Tracker.get_email_subject(term_uuid)
|
||||||
|
mail_body = Tracker_Typo_Squatting.mail_body_template.format(term, item_id, self.full_item_url, item_id)
|
||||||
|
for mail in mail_to_notify:
|
||||||
|
self.redis_logger.debug(f'Send Mail {mail_subject}')
|
||||||
|
print(f'S print(item_content)end Mail {mail_subject}')
|
||||||
|
NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body)
|
||||||
|
|
||||||
|
# Webhook
|
||||||
|
webhook_to_post = Term.get_term_webhook(term_uuid)
|
||||||
|
if webhook_to_post:
|
||||||
|
json_request = {"trackerId": term_uuid,
|
||||||
|
"itemId": item_id,
|
||||||
|
"itemURL": self.full_item_url + item_id,
|
||||||
|
"term": term,
|
||||||
|
"itemSource": item_source,
|
||||||
|
"itemDate": item_date,
|
||||||
|
"tags": tags_to_add,
|
||||||
|
"emailNotification": f'{mail_to_notify}',
|
||||||
|
"trackerType": term_type
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
response = requests.post(webhook_to_post, json=json_request)
|
||||||
|
if response.status_code >= 400:
|
||||||
|
self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: {response.reason}")
|
||||||
|
except:
|
||||||
|
self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: Something went wrong")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
module = Tracker_Typo_Squatting()
|
||||||
|
module.run()
|
|
@ -57,6 +57,11 @@ minute_processed_paste = 10
|
||||||
#Maximum line length authorized to make a diff between duplicates
|
#Maximum line length authorized to make a diff between duplicates
|
||||||
DiffMaxLineLength = 10000
|
DiffMaxLineLength = 10000
|
||||||
|
|
||||||
|
[AIL_2_AIL]
|
||||||
|
server_host = 0.0.0.0
|
||||||
|
server_port = 4443
|
||||||
|
local_addr =
|
||||||
|
|
||||||
#### Modules ####
|
#### Modules ####
|
||||||
[BankAccount]
|
[BankAccount]
|
||||||
max_execution_time = 60
|
max_execution_time = 60
|
||||||
|
|
|
@ -18,22 +18,10 @@ if [ -z "$VIRTUAL_ENV" ]; then
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -z "$TRAVIS" ]; then
|
|
||||||
echo "Travis detected"
|
|
||||||
ENV_PY="~/virtualenv/python3.6/bin/python"
|
|
||||||
export AIL_VENV="~/virtualenv/python3.6/"
|
|
||||||
|
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd |sed 's/bin//' )"
|
|
||||||
export AIL_HOME="${DIR}"
|
|
||||||
|
|
||||||
export AIL_BIN=${AIL_HOME}/bin/
|
|
||||||
export AIL_FLASK=${AIL_HOME}/var/www/
|
|
||||||
export AIL_REDIS=${AIL_HOME}/redis/src/
|
|
||||||
export AIL_ARDB=${AIL_HOME}/ardb/src/
|
|
||||||
else
|
|
||||||
# activate virtual environment
|
# activate virtual environment
|
||||||
. ./AILENV/bin/activate
|
. ./AILENV/bin/activate
|
||||||
fi
|
|
||||||
|
|
||||||
pip3 install -U pip
|
pip3 install -U pip
|
||||||
pip3 install 'git+https://github.com/D4-project/BGP-Ranking.git/@7e698f87366e6f99b4d0d11852737db28e3ddc62#egg=pybgpranking&subdirectory=client'
|
pip3 install 'git+https://github.com/D4-project/BGP-Ranking.git/@7e698f87366e6f99b4d0d11852737db28e3ddc62#egg=pybgpranking&subdirectory=client'
|
||||||
|
|
|
@ -14,9 +14,6 @@ sudo apt-get install python3-pip virtualenv python3-dev python3-tk libfreetype6-
|
||||||
#Needed for downloading jemalloc
|
#Needed for downloading jemalloc
|
||||||
sudo apt-get install wget -qq
|
sudo apt-get install wget -qq
|
||||||
|
|
||||||
#optional tor install
|
|
||||||
sudo apt-get install tor -qq
|
|
||||||
|
|
||||||
#Needed for bloom filters
|
#Needed for bloom filters
|
||||||
sudo apt-get install libssl-dev libfreetype6-dev python3-numpy -qq
|
sudo apt-get install libssl-dev libfreetype6-dev python3-numpy -qq
|
||||||
|
|
||||||
|
|
|
@ -70,6 +70,8 @@ flask>=1.1.4
|
||||||
flask-login
|
flask-login
|
||||||
bcrypt>3.1.6
|
bcrypt>3.1.6
|
||||||
|
|
||||||
|
# Ail typo squatting
|
||||||
|
ail_typo_squatting
|
||||||
|
|
||||||
# Tests
|
# Tests
|
||||||
nose>=1.3.7
|
nose>=1.3.7
|
||||||
|
|
|
@ -59,9 +59,10 @@ class Test_Module_Categ(unittest.TestCase):
|
||||||
|
|
||||||
def test_module(self):
|
def test_module(self):
|
||||||
item_id = 'tests/2021/01/01/categ.gz'
|
item_id = 'tests/2021/01/01/categ.gz'
|
||||||
test_categ = ['CreditCards', 'Mail', 'Onion', 'Web', 'Credential', 'Cve']
|
test_categ = ['CreditCards', 'Mail', 'Onion', 'Urls', 'Credential', 'Cve']
|
||||||
|
|
||||||
result = self.module_obj.compute(item_id, r_result=True)
|
result = self.module_obj.compute(item_id, r_result=True)
|
||||||
|
print(result)
|
||||||
self.assertCountEqual(result, test_categ)
|
self.assertCountEqual(result, test_categ)
|
||||||
|
|
||||||
class Test_Module_CreditCards(unittest.TestCase):
|
class Test_Module_CreditCards(unittest.TestCase):
|
||||||
|
@ -87,8 +88,10 @@ class Test_Module_DomClassifier(unittest.TestCase):
|
||||||
self.module_obj = DomClassifier()
|
self.module_obj = DomClassifier()
|
||||||
|
|
||||||
def test_module(self):
|
def test_module(self):
|
||||||
|
test_host = 'foo.be'
|
||||||
item_id = 'tests/2021/01/01/domain_classifier.gz'
|
item_id = 'tests/2021/01/01/domain_classifier.gz'
|
||||||
result = self.module_obj.compute(item_id, r_result=True)
|
msg = f'{test_host} {item_id}'
|
||||||
|
result = self.module_obj.compute(msg, r_result=True)
|
||||||
self.assertTrue(len(result))
|
self.assertTrue(len(result))
|
||||||
|
|
||||||
class Test_Module_Global(unittest.TestCase):
|
class Test_Module_Global(unittest.TestCase):
|
||||||
|
|
26
update/v4.2/Update.py
Executable file
26
update/v4.2/Update.py
Executable file
|
@ -0,0 +1,26 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import redis
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
|
import ConfigLoader
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
|
||||||
|
from ail_updater import AIL_Updater
|
||||||
|
|
||||||
|
class Updater(AIL_Updater):
|
||||||
|
"""default Updater."""
|
||||||
|
|
||||||
|
def __init__(self, version):
|
||||||
|
super(Updater, self).__init__(version)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
updater = Updater('v4.2')
|
||||||
|
updater.run_update()
|
33
update/v4.2/Update.sh
Executable file
33
update/v4.2/Update.sh
Executable file
|
@ -0,0 +1,33 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
|
||||||
|
|
||||||
|
export PATH=$AIL_HOME:$PATH
|
||||||
|
export PATH=$AIL_REDIS:$PATH
|
||||||
|
export PATH=$AIL_ARDB:$PATH
|
||||||
|
export PATH=$AIL_BIN:$PATH
|
||||||
|
export PATH=$AIL_FLASK:$PATH
|
||||||
|
|
||||||
|
GREEN="\\033[1;32m"
|
||||||
|
DEFAULT="\\033[0;39m"
|
||||||
|
|
||||||
|
echo -e $GREEN"Shutting down AIL ..."$DEFAULT
|
||||||
|
bash ${AIL_BIN}/LAUNCH.sh -ks
|
||||||
|
wait
|
||||||
|
|
||||||
|
# SUBMODULES #
|
||||||
|
git submodule update
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e $GREEN"Installing typo-squatting ..."$DEFAULT
|
||||||
|
pip3 install -U ail_typo_squatting
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e $GREEN"Updating d4-client ..."$DEFAULT
|
||||||
|
pip3 install -U d4-pyclient
|
||||||
|
|
||||||
|
exit 0
|
|
@ -261,3 +261,12 @@ def graph_node_json():
|
||||||
|
|
||||||
res = Correlate_object.get_graph_node_object_correlation(object_type, correlation_id, mode, correlation_names, correlation_objects, requested_correl_type=type_id, max_nodes=max_nodes)
|
res = Correlate_object.get_graph_node_object_correlation(object_type, correlation_id, mode, correlation_names, correlation_objects, requested_correl_type=type_id, max_nodes=max_nodes)
|
||||||
return jsonify(res)
|
return jsonify(res)
|
||||||
|
|
||||||
|
@correlation.route('/correlation/subtype_search', methods=['POST'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def subtype_search():
|
||||||
|
obj_type = request.form.get('object_type')
|
||||||
|
obj_subtype = request.form.get('object_subtype')
|
||||||
|
obj_id = request.form.get('object_id')
|
||||||
|
return redirect(url_for('correlation.show_correlation', object_type=obj_type, type_id=obj_subtype, correlation_id=obj_id))
|
||||||
|
|
|
@ -75,8 +75,34 @@
|
||||||
<div class="col-xl-10">
|
<div class="col-xl-10">
|
||||||
<div class="mt-1" id="barchart_type">
|
<div class="mt-1" id="barchart_type">
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="card border-secondary my-2">
|
||||||
|
<div class="card-body text-dark">
|
||||||
|
<h5 class="card-title">Search {{correlation_type}} by name:</h5>
|
||||||
|
|
||||||
|
<form action="{{ url_for('correlation.subtype_search') }}" id="search_subtype_onj" method='post'>
|
||||||
|
|
||||||
|
<div class="input-group mb-1">
|
||||||
|
<input type="text" class="form-control" name="object_type" value="{{correlation_type}}" hidden>
|
||||||
|
<select class="custom-select col-2" name="object_subtype" value="{{obj_type}}" required>
|
||||||
|
<option value="">{{correlation_type}} Type...</option>
|
||||||
|
{% for typ in l_type %}
|
||||||
|
<option value="{{typ}}">{{typ}}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<input type="text" class="form-control col-8" name="object_id" value="" placeholder="{{correlation_type}} ID" required>
|
||||||
|
<button class="btn btn-primary input-group-addon search-obj col-2"><i class="fas fa-search"></i></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<div class="col-xl-2">
|
<div class="col-xl-2">
|
||||||
|
|
||||||
<div class="card mb-3 mt-2" style="background-color:#d9edf7;">
|
<div class="card mb-3 mt-2" style="background-color:#d9edf7;">
|
||||||
|
|
|
@ -85,6 +85,16 @@ def tracked_menu_yara():
|
||||||
global_term = Term.get_all_global_tracked_terms(filter_type=filter_type)
|
global_term = Term.get_all_global_tracked_terms(filter_type=filter_type)
|
||||||
return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type)
|
return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type)
|
||||||
|
|
||||||
|
@hunter.route("/trackers/typosquatting")
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def tracked_menu_typosquatting():
|
||||||
|
filter_type = 'typosquatting'
|
||||||
|
user_id = current_user.get_id()
|
||||||
|
user_term = Term.get_all_user_tracked_terms(user_id, filter_type=filter_type)
|
||||||
|
global_term = Term.get_all_global_tracked_terms(filter_type=filter_type)
|
||||||
|
return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type)
|
||||||
|
|
||||||
|
|
||||||
@hunter.route("/tracker/add", methods=['GET', 'POST'])
|
@hunter.route("/tracker/add", methods=['GET', 'POST'])
|
||||||
@login_required
|
@login_required
|
||||||
|
@ -208,6 +218,13 @@ def show_tracker():
|
||||||
else:
|
else:
|
||||||
yara_rule_content = None
|
yara_rule_content = None
|
||||||
|
|
||||||
|
if tracker_metadata['type'] == 'typosquatting':
|
||||||
|
typo_squatting = list(Tracker.get_tracker_typosquatting_domains(tracker_uuid))
|
||||||
|
typo_squatting.sort()
|
||||||
|
else:
|
||||||
|
typo_squatting = None
|
||||||
|
|
||||||
|
|
||||||
if date_from:
|
if date_from:
|
||||||
res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id)
|
res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id)
|
||||||
if res[1] !=200:
|
if res[1] !=200:
|
||||||
|
@ -224,6 +241,7 @@ def show_tracker():
|
||||||
|
|
||||||
return render_template("showTracker.html", tracker_metadata=tracker_metadata,
|
return render_template("showTracker.html", tracker_metadata=tracker_metadata,
|
||||||
yara_rule_content=yara_rule_content,
|
yara_rule_content=yara_rule_content,
|
||||||
|
typo_squatting=typo_squatting,
|
||||||
bootstrap_label=bootstrap_label)
|
bootstrap_label=bootstrap_label)
|
||||||
|
|
||||||
@hunter.route("/tracker/update_tracker_description", methods=['POST'])
|
@hunter.route("/tracker/update_tracker_description", methods=['POST'])
|
||||||
|
|
|
@ -94,6 +94,7 @@
|
||||||
<option value="set">Set</option>
|
<option value="set">Set</option>
|
||||||
<option value="regex">Regex</option>
|
<option value="regex">Regex</option>
|
||||||
<option value="yara">YARA rule</option>
|
<option value="yara">YARA rule</option>
|
||||||
|
<option value="typosquatting">Typo-squatting</option>
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
<p id="tracker_desc">Terms to track (space separated)</p>
|
<p id="tracker_desc">Terms to track (space separated)</p>
|
||||||
|
@ -199,6 +200,12 @@ $(document).ready(function(){
|
||||||
$("#tracker").hide();
|
$("#tracker").hide();
|
||||||
$("#nb_word").hide();
|
$("#nb_word").hide();
|
||||||
$("#yara_rule").show();
|
$("#yara_rule").show();
|
||||||
|
} else if (tracker_type=="typosquatting") {
|
||||||
|
$("#tracker_desc").text("Generation of variation for domain name. Only one domain name at a time.");
|
||||||
|
$("#tracker_desc").show();
|
||||||
|
$("#tracker").show();
|
||||||
|
$("#nb_word").hide();
|
||||||
|
$("#yara_rule").hide();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,25 @@
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ tracker_metadata['type'] }}</td>
|
<td>{{ tracker_metadata['type'] }}</td>
|
||||||
|
{% if tracker_metadata['type'] == 'typosquatting' %}
|
||||||
|
<td>
|
||||||
|
<a class="btn btn-primary" data-toggle="collapse" href="#collapseTypo" role="button" aria-expanded="false" aria-controls="collapseTypo">
|
||||||
|
{{ tracker_metadata['tracker'].split(",")[0] }}
|
||||||
|
</a>
|
||||||
|
<div class="collapse" id="collapseTypo">
|
||||||
|
<div class="card card-body">
|
||||||
|
{% if typo_squatting %}
|
||||||
|
{% for typo in typo_squatting %}
|
||||||
|
{{typo}}
|
||||||
|
<br/>
|
||||||
|
{% endfor %}
|
||||||
|
{%endif%}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
{% else %}
|
||||||
<td>{{ tracker_metadata['tracker'] }}</td>
|
<td>{{ tracker_metadata['tracker'] }}</td>
|
||||||
|
{% endif %}
|
||||||
<td>{{ tracker_metadata['date'][0:4] }}/{{ tracker_metadata['date'][4:6] }}/{{ tracker_metadata['date'][6:8] }}</td>
|
<td>{{ tracker_metadata['date'][0:4] }}/{{ tracker_metadata['date'][4:6] }}/{{ tracker_metadata['date'][6:8] }}</td>
|
||||||
<td>
|
<td>
|
||||||
{% if tracker_metadata['level'] == 0 %}
|
{% if tracker_metadata['level'] == 0 %}
|
||||||
|
|
|
@ -70,12 +70,16 @@
|
||||||
<span>
|
<span>
|
||||||
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
|
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
|
||||||
{% if dict_uuid['term'] %}
|
{% if dict_uuid['term'] %}
|
||||||
{% if dict_uuid['term']|length > 256 %}
|
{% if dict_uuid['term']|length > 2000 %}
|
||||||
{{ dict_uuid['term'][0:256] }}...
|
{{ dict_uuid['term'][0:50] }}...
|
||||||
|
{% else %}
|
||||||
|
{% if dict_uuid['term']|length > 100 %}
|
||||||
|
{{ dict_uuid['term'][0:100] }}...
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ dict_uuid['term'] }}
|
{{ dict_uuid['term'] }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
</span>
|
</span>
|
||||||
<div>
|
<div>
|
||||||
|
@ -135,12 +139,16 @@
|
||||||
<span>
|
<span>
|
||||||
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
|
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
|
||||||
{% if dict_uuid['term'] %}
|
{% if dict_uuid['term'] %}
|
||||||
{% if dict_uuid['term']|length > 256 %}
|
{% if dict_uuid['term']|length > 2000 %}
|
||||||
{{ dict_uuid['term'][0:256] }}...
|
{{ dict_uuid['term'][0:50] }}...
|
||||||
|
{% else %}
|
||||||
|
{% if dict_uuid['term']|length > 100 %}
|
||||||
|
{{ dict_uuid['term'][0:100] }}...
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ dict_uuid['term'] }}
|
{{ dict_uuid['term'] }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
</a>
|
</a>
|
||||||
</span>
|
</span>
|
||||||
<div>
|
<div>
|
||||||
|
|
|
@ -42,6 +42,12 @@
|
||||||
<span class="bg-danger text-white font-weight-bold" style="font-size: 120%"> { </span>
|
<span class="bg-danger text-white font-weight-bold" style="font-size: 120%"> { </span>
|
||||||
<span> YARA</span>
|
<span> YARA</span>
|
||||||
</a>
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{url_for('hunter.tracked_menu_typosquatting')}}" id="nav_tracker_typosquatting">
|
||||||
|
<i class="fa fa-clone"></i>
|
||||||
|
<span>Typo-squatting</span>
|
||||||
|
</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<h5 class="d-flex text-muted w-100" id="nav_title_retro_hunt">
|
<h5 class="d-flex text-muted w-100" id="nav_title_retro_hunt">
|
||||||
|
|
|
@ -35,6 +35,7 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
<button class="btn btn-primary" onclick="tagsSelector()">
|
<button class="btn btn-primary" onclick="tagsSelector()">
|
||||||
<i class="fas fa-plus"></i>
|
<i class="fas fa-plus"></i>
|
||||||
|
@ -47,7 +48,7 @@
|
||||||
var ltags;
|
var ltags;
|
||||||
var ltagsgalaxies;
|
var ltagsgalaxies;
|
||||||
|
|
||||||
$.getJSON("{{ url_for('tags_ui.tag_taxonomies_tags_enabled_json') }}",
|
$.getJSON("{{ url_for('Tags.get_all_tags') }}",
|
||||||
function(data) {
|
function(data) {
|
||||||
{% if 'taxonomies_tags' in tags_selector_data %}
|
{% if 'taxonomies_tags' in tags_selector_data %}
|
||||||
var valueData = [
|
var valueData = [
|
||||||
|
|
Loading…
Reference in a new issue