diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index bd6755b3..d6a5fbe6 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -19,15 +19,6 @@ if [ -e "${DIR}/AILENV/bin/python" ]; then ENV_PY="${DIR}/AILENV/bin/python" export AIL_VENV=${AIL_HOME}/AILENV/ . ./AILENV/bin/activate -elif [ ! -z "$TRAVIS" ]; then - echo "Travis detected" - ENV_PY="~/virtualenv/python3.6/bin/python" - export AIL_VENV="~/virtualenv/python3.6/" - - export AIL_BIN=${AIL_HOME}/bin/ - export AIL_FLASK=${AIL_HOME}/var/www/ - export AIL_REDIS=${AIL_HOME}/redis/src/ - export AIL_ARDB=${AIL_HOME}/ardb/src/ else echo "Please make sure you have a AIL-framework environment, au revoir" exit 1 @@ -209,8 +200,6 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Decoder" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Decoder.py; read x" sleep 0.1 - screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x" - sleep 0.1 screen -S "Script_AIL" -X screen -t "Keys" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Keys.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x" @@ -220,17 +209,25 @@ function launching_scripts { screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x" sleep 0.1 + screen -S "Script_AIL" -X screen -t "Hosts" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Hosts.py; read x" + sleep 0.1 + screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./DomClassifier.py; read x" + sleep 0.1 + screen -S "Script_AIL" -X screen -t "Urls" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Urls.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./SQLInjectionDetection.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./LibInjection.py; read x" sleep 0.1 - + screen -S "Script_AIL" -X screen -t "Zerobins" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Zerobins.py; read x" + sleep 0.1 ################################## # TRACKERS MODULES # ################################## + screen -S "Script_AIL" -X screen -t "Tracker_Typo_Squatting" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Typo_Squatting.py; read x" + sleep 0.1 screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x" @@ -439,12 +436,12 @@ function launch_feeder { } function killscript { - if [[ $islogged || $isqueued || $is_ail_core || $isscripted || $isflasked || $isfeeded || $iscrawler ]]; then + if [[ $islogged || $isqueued || $is_ail_core || $isscripted || $isflasked || $isfeeded || $iscrawler || $is_ail_2_ail ]]; then echo -e $GREEN"Killing Script"$DEFAULT - kill $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler + kill $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler $is_ail_2_ail sleep 0.2 echo -e $ROSE`screen -ls`$DEFAULT - echo -e $GREEN"\t* $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler killed."$DEFAULT + echo -e $GREEN"\t* $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler $is_ail_2_ail killed."$DEFAULT else echo -e $RED"\t* No script to kill"$DEFAULT fi diff --git a/bin/Languages.py b/bin/Languages.py index bd646fd7..48c58a81 100755 --- a/bin/Languages.py +++ b/bin/Languages.py @@ -3,7 +3,6 @@ import os import sys -import cld3 import time from packages import Item diff --git a/bin/core/Sync_importer.py b/bin/core/Sync_importer.py index f0513aaf..0176565d 100755 --- a/bin/core/Sync_importer.py +++ b/bin/core/Sync_importer.py @@ -79,7 +79,7 @@ class Sync_importer(AbstractModule): b64_gzip_content = ail_stream['payload']['raw'] # # TODO: create default id - item_id = ail_stream['meta']['ail:id'] + 'test' + item_id = ail_stream['meta']['ail:id'] message = f'{item_id} {b64_gzip_content}' print(item_id) diff --git a/bin/core/Sync_module.py b/bin/core/Sync_module.py index d8c01d83..7c1330e0 100755 --- a/bin/core/Sync_module.py +++ b/bin/core/Sync_module.py @@ -48,8 +48,6 @@ class Sync_module(AbstractModule): def compute(self, message): - print(message) - ### REFRESH DICT if self.last_refresh < ail_2_ail.get_last_updated_sync_config(): self.last_refresh = time.time() @@ -71,17 +69,16 @@ class Sync_module(AbstractModule): tags = obj.get_tags(r_set=True) # check filter + tags + #print(message) for queue_uuid in self.dict_sync_queues: filter_tags = self.dict_sync_queues[queue_uuid]['filter'] - print(tags) - print(filter_tags) - print(tags.issubset(filter_tags)) if filter_tags and tags: - if tags.issubset(filter_tags): + #print(f'tags: {tags} filter: {filter_tags}') + if filter_tags.issubset(tags): obj_dict = obj.get_default_meta() # send to queue push and/or pull for dict_ail in self.dict_sync_queues[queue_uuid]['ail_instances']: - + print(f'ail_uuid: {dict_ail["ail_uuid"]} obj: {message}') ail_2_ail.add_object_to_sync_queue(queue_uuid, dict_ail['ail_uuid'], obj_dict, push=dict_ail['push'], pull=dict_ail['pull']) diff --git a/bin/core/ail_2_ail.py b/bin/core/ail_2_ail.py index 027eb791..2dd12ea6 100755 --- a/bin/core/ail_2_ail.py +++ b/bin/core/ail_2_ail.py @@ -1028,8 +1028,9 @@ def api_create_sync_queue(json_dict): tags = json_dict.get('tags') if not tags: return {"status": "error", "reason": "no tags provided"}, 400 - if not Tag.are_enabled_tags(tags): - return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 + # FIXME: add custom tags + # if not Tag.are_enabled_tags(tags): + # return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 max_size = json_dict.get('max_size') if not max_size: @@ -1064,8 +1065,9 @@ def api_edit_sync_queue(json_dict): tags = json_dict.get('tags') if tags: - if not Tag.are_enabled_tags(tags): - return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 + # FIXME: add custom tags + # if not Tag.are_enabled_tags(tags): + # return {"status": "error", "reason": "Invalid/Disabled tags"}, 400 edit_sync_queue_filter_tags(queue_uuid, tags) max_size = json_dict.get('max_size') @@ -1203,11 +1205,13 @@ def create_ail_stream(Object): 'type': Object.get_type()} # OBJECT META - ail_stream['meta'] = {'ail_mime-type': 'text/plain'} + ail_stream['meta'] = {'ail:mime-type': 'text/plain'} + ail_stream['meta']['compress'] = 'gzip' + ail_stream['meta']['encoding'] = 'base64' ail_stream['meta']['ail:id'] = Object.get_id() - ail_stream['meta']['ail:tags'] = Object.get_tags() - # GLOBAL PAYLOAD - ail_stream['meta']['ail:uuid'] = get_ail_uuid() + ail_stream['meta']['tags'] = Object.get_tags() + # GLOBAL META + ail_stream['meta']['uuid_org'] = get_ail_uuid() # OBJECT PAYLOAD ail_stream['payload'] = Object.get_ail_2_ail_payload() diff --git a/bin/core/ail_2_ail_client.py b/bin/core/ail_2_ail_client.py index 810f93c6..8f23b053 100755 --- a/bin/core/ail_2_ail_client.py +++ b/bin/core/ail_2_ail_client.py @@ -20,6 +20,16 @@ sys.path.append(os.environ['AIL_BIN']) # Import Project packages ################################## from core import ail_2_ail +from lib.ConfigLoader import ConfigLoader + +config_loader = ConfigLoader() +local_addr = config_loader.get_config_str('AIL_2_AIL', 'local_addr') +if not local_addr or local_addr == None: + local_addr = None +else: + local_addr = (local_addr, 0) +config_loader = None + #### LOGS #### redis_logger = publisher @@ -68,10 +78,9 @@ async def push(websocket, ail_uuid): Obj, queue_uuid = ail_2_ail.get_sync_queue_object_and_queue_uuid(ail_uuid) if Obj: obj_ail_stream = ail_2_ail.create_ail_stream(Obj) + print(obj_ail_stream['meta']) obj_ail_stream = json.dumps(obj_ail_stream) - sys.stdout.write(obj_ail_stream) - # send objects await websocket.send(obj_ail_stream) await asyncio.sleep(0.1) @@ -112,6 +121,7 @@ async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None, client_id=No async with websockets.connect( uri, ssl=ssl_context, + local_addr=local_addr, #open_timeout=10, websockers 10.0 /!\ python>=3.7 extra_headers={"Authorization": f"{ail_key}"} ) as websocket: diff --git a/bin/core/ail_2_ail_server.py b/bin/core/ail_2_ail_server.py index 8a5bfd7a..6a49b7d9 100755 --- a/bin/core/ail_2_ail_server.py +++ b/bin/core/ail_2_ail_server.py @@ -17,6 +17,12 @@ sys.path.append(os.environ['AIL_BIN']) ################################## from pubsublogger import publisher from core import ail_2_ail +from lib.ConfigLoader import ConfigLoader + +config_loader = ConfigLoader() +host = config_loader.get_config_str('AIL_2_AIL', 'server_host') +port = config_loader.get_config_int('AIL_2_AIL', 'server_port') +config_loader = None # # TODO: refactor logging #### LOGS #### @@ -303,9 +309,6 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol): if __name__ == '__main__': - host = '0.0.0.0' - port = 4443 - print('Launching Server...') redis_logger.info('Launching Server...') @@ -315,7 +318,7 @@ if __name__ == '__main__': cert_dir = os.environ['AIL_FLASK'] ssl_context.load_cert_chain(certfile=os.path.join(cert_dir, 'server.crt'), keyfile=os.path.join(cert_dir, 'server.key')) - start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol) + start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol, max_size=None) print(f'Server Launched: wss://{host}:{port}') redis_logger.info(f'Server Launched: wss://{host}:{port}') diff --git a/bin/import/ail_json_importer/Ail_feeder_telegram.py b/bin/import/ail_json_importer/Ail_feeder_telegram.py new file mode 100755 index 00000000..06045a5e --- /dev/null +++ b/bin/import/ail_json_importer/Ail_feeder_telegram.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" +The JSON Receiver Module +================ + +Recieve Json Items (example: Twitter feeder) + +""" +import os +import json +import sys +import datetime + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) +import item_basic +import Username + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer')) +from Default_json import Default_json + +class Ail_feeder_telegram(Default_json): + """Twitter Feeder functions""" + + def __init__(self, name, json_item): + super().__init__(name, json_item) + + def get_feeder_name(self): + return 'telegram' + + # define item id + def get_item_id(self): + # use twitter timestamp ? + item_date = datetime.date.today().strftime("%Y/%m/%d") + channel_id = str(self.json_item['meta']['channel_id']) + message_id = str(self.json_item['meta']['message_id']) + item_id = f'{channel_id}_{message_id}' + return os.path.join('telegram', item_date, item_id) + '.gz' + + def process_json_meta(self, process, item_id): + ''' + Process JSON meta filed. + ''' + channel_id = str(self.json_item['meta']['channel_id']) + message_id = str(self.json_item['meta']['message_id']) + telegram_id = f'{channel_id}_{message_id}' + item_basic.add_map_obj_id_item_id(telegram_id, item_id, 'telegram_id') + #print(self.json_item['meta']) + username = None + if self.json_item['meta'].get('user'): + username = str(self.json_item['meta']['user']) + else: + if self.json_item['meta'].get('channel'): + username = str(self.json_item['meta']['channel']['username']) + if username: + #print(username) + item_date = item_basic.get_item_date(item_id) + Username.save_item_correlation('telegram', username, item_id, item_date) + return None diff --git a/bin/lib/Investigations.py b/bin/lib/Investigations.py index 55775830..39a27650 100755 --- a/bin/lib/Investigations.py +++ b/bin/lib/Investigations.py @@ -270,7 +270,7 @@ def exists_investigation(investigation_uuid): # created by user def get_user_all_investigations(user_id): - return r_tracking.smembers('investigations:user:{user_id}') + return r_tracking.smembers(f'investigations:user:{user_id}') def is_object_investigated(obj_id, obj_type, subtype=''): return r_tracking.exists(f'obj:investigations:{obj_type}:{subtype}:{obj_id}') @@ -278,6 +278,15 @@ def is_object_investigated(obj_id, obj_type, subtype=''): def get_obj_investigations(obj_id, obj_type, subtype=''): return r_tracking.smembers(f'obj:investigations:{obj_type}:{subtype}:{obj_id}') +def delete_obj_investigations(obj_id, obj_type, subtype=''): + unregistred = False + for investigation_uuid in get_obj_investigations(obj_id, obj_type, subtype=subtype): + investigation = Investigation(investigation_uuid) + investigation.unregister_object(obj_id, obj_type, subtype) + unregistred = True + return unregistred + + # # TODO: fix default threat_level analysis # # TODO: limit description + name # # TODO: sanityze tags @@ -286,7 +295,7 @@ def create_investigation(user_id, date, name, threat_level, analysis, info, tags investigation_uuid = generate_uuid() r_tracking.sadd('investigations:all', investigation_uuid) # user map - r_tracking.sadd('investigations:user:{user_id}', investigation_uuid) + r_tracking.sadd(f'investigations:user:{user_id}', investigation_uuid) # metadata r_tracking.hset(f'investigations:data:{investigation_uuid}', 'creator_user', user_id) diff --git a/bin/lib/Tracker.py b/bin/lib/Tracker.py index 2f8e8d42..f06f9df5 100755 --- a/bin/lib/Tracker.py +++ b/bin/lib/Tracker.py @@ -11,11 +11,15 @@ import yara import datetime import base64 +from ail_typo_squatting import runAll +import math + from flask import escape sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/')) import Date +import Tag sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) import ConfigLoader @@ -26,6 +30,10 @@ r_cache = config_loader.get_redis_conn("Redis_Cache") r_serv_db = config_loader.get_redis_conn("ARDB_DB") r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker") + +items_dir = config_loader.get_config_str("Directories", "pastes") +if items_dir[-1] == '/': + items_dir = items_dir[:-1] config_loader = None email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}' @@ -75,8 +83,8 @@ def get_all_tracker_type(): def get_all_tracker_uuid(): return r_serv_tracker.smembers(f'trackers:all') -def get_all_tracker_by_type(tracker_type): - r_serv_tracker.smembers(f'trackers:all:{tracker_type}') +def get_all_tracker_uuid_by_type(tracker_type): + return r_serv_tracker.smembers(f'trackers:all:{tracker_type}') # def get_all_tracker(): # l_keys_name = [] @@ -211,6 +219,20 @@ def get_tracker_items_by_daterange(tracker_uuid, date_from, date_to): all_item_id |= r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}') return all_item_id +def get_tracker_typosquatting_domains(tracker_uuid): + return r_serv_tracker.smembers(f'tracker:typosquatting:{tracker_uuid}') + +def get_typosquatting_tracked_words_list(): + all_typo = dict() + typos_uuid = get_all_tracker_uuid_by_type("typosquatting") + + for typo_uuid in typos_uuid: + tracker = get_tracker_by_uuid(typo_uuid) + all_typo[tracker] = get_tracker_typosquatting_domains(typo_uuid) + + return all_typo + + def add_tracked_item(tracker_uuid, item_id): item_date = item_basic.get_item_date(item_id) # track item @@ -248,7 +270,6 @@ def update_tracker_daterange(tracker_uuid, date, op='add'): if op == 'del': pass - def remove_tracked_item(item_id): item_date = item_basic.get_item_date(item_id) for tracker_uuid in get_item_all_trackers_uuid(item_id): @@ -267,6 +288,11 @@ def is_obj_tracked(obj_type, subtype, id): def get_obj_all_trackers(obj_type, subtype, id): return r_serv_tracker.smembers(f'obj:trackers:{obj_type}:{obj_id}') +# # TODO: ADD all Objects + Subtypes +def delete_obj_trackers(obj_type, subtype, id): + if obj_type == 'item': + remove_tracked_item(id) + def get_email_subject(tracker_uuid): tracker_description = get_tracker_description(tracker_uuid) if not tracker_description: @@ -400,6 +426,15 @@ def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1): tracker = ",".join(words_set) tracker = "{};{}".format(tracker, nb_words) + elif tracker_type == 'typosquatting': + tracker = tracker.lower() + # Take only the first term + domain = tracker.split(" ") + if len(domain) > 1: + return {"status": "error", "reason": "Only one domain is accepted at a time"}, 400 + if not "." in tracker: + return {"status": "error", "reason": "Invalid domain name"}, 400 + elif tracker_type=='yara_custom': if not is_valid_yara_rule(tracker): @@ -440,6 +475,12 @@ def create_tracker(tracker, tracker_type, user_id, level, tags, mails, descripti tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid) tracker_type = 'yara' + elif tracker_type == 'typosquatting': + domain = tracker.split(" ")[0] + typo_generation = runAll(domain=domain, limit=math.inf, formatoutput="text", pathOutput="-", verbose=False) + for typo in typo_generation: + r_serv_tracker.sadd(f'tracker:typosquatting:{tracker_uuid}', typo) + # create metadata r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'tracked', tracker) r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'type', tracker_type) @@ -1039,7 +1080,10 @@ def get_retro_hunt_dir_day_to_analyze(task_uuid, date, filter_last=False, source # # TODO: move me def get_items_to_analyze(dir, last=None): - full_dir = os.path.join(os.environ['AIL_HOME'], 'PASTES', dir) # # TODO: # FIXME: use item config dir + if items_dir == 'PASTES': + full_dir = os.path.join(os.environ['AIL_HOME'], 'PASTES', dir) + else: + full_dir = os.path.join(items_dir, dir) if os.path.isdir(full_dir): all_items = sorted([os.path.join(dir, f) for f in os.listdir(full_dir) if os.path.isfile(os.path.join(full_dir, f))]) # remove processed items @@ -1265,7 +1309,21 @@ def api_delete_retro_hunt_task(task_uuid): else: return (delete_retro_hunt_task(task_uuid), 200) -# if __name__ == '__main__': +#### DB FIX #### +def get_trackers_custom_tags(): + tags = set() + for tracker_uuid in get_all_tracker_uuid(): + for tag in get_tracker_tags(tracker_uuid): + tags.add(tag) + for task_uuid in get_all_retro_hunt_tasks(): + for tag in get_retro_hunt_task_tags(task_uuid): + tags.add(tag) + return tags + +#### -- #### + +if __name__ == '__main__': + print(get_trackers_custom_tags()) # fix_all_tracker_uuid_list() # res = get_all_tracker_uuid() # print(len(res)) diff --git a/bin/lib/item_basic.py b/bin/lib/item_basic.py index 46c03a24..608a5ba6 100755 --- a/bin/lib/item_basic.py +++ b/bin/lib/item_basic.py @@ -116,6 +116,9 @@ def is_domain_root(item_id): else: return True +def get_item_url(item_id): + return r_serv_metadata.hget(f'paste_metadata:{item_id}', 'real_link') + def get_nb_children(item_id): return r_serv_metadata.scard('paste_children:{}'.format(item_id)) @@ -166,7 +169,7 @@ def add_item_parent(parent_item_id, item_id): #### UNKNOW SECTION #### def get_obj_id_item_id(parent_type, parent_id): - all_parents_type = ['twitter_id', 'jabber_id'] + all_parents_type = ['twitter_id', 'jabber_id', 'telegram_id'] if parent_type in all_parents_type: return r_serv_metadata.hget('map:{}:item_id'.format(parent_type), parent_id) else: @@ -177,6 +180,8 @@ def add_map_obj_id_item_id(obj_id, item_id, obj_type): r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id) if obj_type == 'jabber_id': r_serv_metadata.hset('map:jabber_id:item_id', obj_id, item_id) + if obj_type == 'telegram_id': + r_serv_metadata.hset('map:telegram_id:item_id', obj_id, item_id) # delete twitter id diff --git a/bin/lib/objects/Items.py b/bin/lib/objects/Items.py index 95eed98b..f49f5320 100755 --- a/bin/lib/objects/Items.py +++ b/bin/lib/objects/Items.py @@ -11,25 +11,33 @@ import html2text from io import BytesIO +from pymisp import MISPObject + +sys.path.append(os.environ['AIL_BIN']) +################################## +# Import Project packages +################################## +from export.Export import get_ail_uuid # # TODO: REPLACE +from lib.objects.abstract_object import AbstractObject +from lib.ConfigLoader import ConfigLoader +from lib import item_basic +from lib import domain_basic + +from packages import Tag + sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/')) -import Tag import Cryptocurrency import Pgp sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) -import item_basic -import domain_basic -import ConfigLoader import Correlate_object import Decoded import Screenshot import Username -from abstract_object import AbstractObject -from item_basic import * from flask import url_for -config_loader = ConfigLoader.ConfigLoader() +config_loader = ConfigLoader() # get and sanityze PASTE DIRECTORY # # TODO: rename PASTES_FOLDER PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/' @@ -89,6 +97,12 @@ class Item(AbstractObject): """ return item_basic.get_item_content(self.id) + def get_raw_content(self): + filepath = self.get_filename() + with open(filepath, 'rb') as f: + raw_content = BytesIO(f.read()) + return raw_content + def get_gzip_content(self, b64=False): with open(self.get_filename(), 'rb') as f: content = f.read() @@ -97,8 +111,7 @@ class Item(AbstractObject): return content.decode() def get_ail_2_ail_payload(self): - payload = {'raw': self.get_gzip_content(b64=True), - 'compress': 'gzip'} + payload = {'raw': self.get_gzip_content(b64=True)} return payload # # TODO: @@ -108,6 +121,7 @@ class Item(AbstractObject): # # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\ # TODO: DELETE ITEM CORRELATION + TAGS + METADATA + ... def delete(self): + self._delete() try: os.remove(self.get_filename()) return True @@ -128,9 +142,19 @@ class Item(AbstractObject): color = '#332288' return {'style': '', 'icon': '', 'color': color, 'radius':5} - ############################################################################ - ############################################################################ - ############################################################################ + def get_misp_object(self): + obj_date = self.get_date() + obj = MISPObject('ail-leak', standalone=True) + obj.first_seen = obj_date + + obj_attrs = [] + obj_attrs.append( obj.add_attribute('first-seen', value=obj_date) ) + obj_attrs.append( obj.add_attribute('raw-data', value=self.id, data=self.get_raw_content()) ) + obj_attrs.append( obj.add_attribute('sensor', value=get_ail_uuid()) ) + for obj_attr in obj_attrs: + for tag in self.get_tags(): + obj_attr.add_tag(tag) + return obj def exist_correlation(self): pass @@ -688,4 +712,7 @@ def delete_domain_node(item_id): delete_item(child_id) -#if __name__ == '__main__': +# if __name__ == '__main__': +# +# item = Item('') +# print(item.get_misp_object().to_json()) diff --git a/bin/modules/DomClassifier.py b/bin/modules/DomClassifier.py index 240916eb..08f202ab 100755 --- a/bin/modules/DomClassifier.py +++ b/bin/modules/DomClassifier.py @@ -53,37 +53,38 @@ class DomClassifier(AbstractModule): def compute(self, message, r_result=False): - item = Item(message) + host, id = message.split() - item_content = item.get_content() + item = Item(id) item_basename = item.get_basename() item_date = item.get_date() item_source = item.get_source() try: - mimetype = item_basic.get_item_mimetype(item.get_id()) - if mimetype.split('/')[0] == "text": - self.c.text(rawtext=item_content) - self.c.potentialdomain() - self.c.validdomain(passive_dns=True, extended=False) - #self.redis_logger.debug(self.c.vdomain) + self.c.text(rawtext=host) + print(self.c.domain) + self.c.validdomain(passive_dns=True, extended=False) + #self.redis_logger.debug(self.c.vdomain) - if self.c.vdomain and d4.is_passive_dns_enabled(): - for dns_record in self.c.vdomain: - self.send_message_to_queue(dns_record) + print(self.c.vdomain) + print() - localizeddomains = self.c.include(expression=self.cc_tld) - if localizeddomains: - print(localizeddomains) - self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{item.get_id()}") + if self.c.vdomain and d4.is_passive_dns_enabled(): + for dns_record in self.c.vdomain: + self.send_message_to_queue(dns_record) - localizeddomains = self.c.localizedomain(cc=self.cc) - if localizeddomains: - print(localizeddomains) - self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{item.get_id()}") + localizeddomains = self.c.include(expression=self.cc_tld) + if localizeddomains: + print(localizeddomains) + self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc_tld};{item.get_id()}") - if r_result: - return self.c.vdomain + localizeddomains = self.c.localizedomain(cc=self.cc) + if localizeddomains: + print(localizeddomains) + self.redis_logger.warning(f"DomainC;{item_source};{item_date};{item_basename};Checked {localizeddomains} located in {self.cc};{item.get_id()}") + + if r_result: + return self.c.vdomain except IOError as err: self.redis_logger.error(f"Duplicate;{item_source};{item_date};{item_basename};CRC Checksum Failed") diff --git a/bin/modules/Hosts.py b/bin/modules/Hosts.py new file mode 100755 index 00000000..5c89a5dc --- /dev/null +++ b/bin/modules/Hosts.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The Hosts Module +====================== + +This module is consuming the Redis-list created by the Global module. + +It is looking for Hosts + +""" + +################################## +# Import External packages +################################## +import os +import re +import sys +import time + +sys.path.append(os.environ['AIL_BIN']) +################################## +# Import Project packages +################################## +from modules.abstract_module import AbstractModule +from lib.ConfigLoader import ConfigLoader +from lib import regex_helper +#from lib.objects.Items import Item +from packages.Item import Item + +class Hosts(AbstractModule): + """ + Hosts module for AIL framework + """ + + def __init__(self): + super(Hosts, self).__init__() + + config_loader = ConfigLoader() + self.r_cache = config_loader.get_redis_conn("Redis_Cache") + + self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name) + + # regex timeout + self.regex_timeout = 30 + + # Waiting time in secondes between to message proccessed + self.pending_seconds = 1 + + self.host_regex = r'\b([a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)\b' + re.compile(self.host_regex) + + self.redis_logger.info(f"Module: {self.module_name} Launched") + + + def compute(self, message): + item = Item(message) + + # mimetype = item_basic.get_item_mimetype(item.get_id()) + # if mimetype.split('/')[0] == "text": + + content = item.get_content() + + hosts = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.host_regex, item.get_id(), content) + for host in hosts: + #print(host) + + msg = f'{host} {item.get_id()}' + self.send_message_to_queue(msg, 'Host') + + + +if __name__ == '__main__': + + module = Hosts() + module.run() diff --git a/bin/modules/Tags.py b/bin/modules/Tags.py index 2ed37b74..9bfc080c 100755 --- a/bin/modules/Tags.py +++ b/bin/modules/Tags.py @@ -54,7 +54,7 @@ class Tags(AbstractModule): self.send_message_to_queue(message, 'MISP_The_Hive_feeder') message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}' - self.send_message_to_queue(message, 'Sync_module') + self.send_message_to_queue(message, 'SyncModule') else: # Malformed message diff --git a/bin/modules/Zerobins.py b/bin/modules/Zerobins.py new file mode 100755 index 00000000..3a9b1fa7 --- /dev/null +++ b/bin/modules/Zerobins.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" +The Zerobins Module +====================== +This module spots zerobins-like services for further processing +""" + +################################## +# Import External packages +################################## +import os +import sys +import time +import pdb +import re +sys.path.append(os.environ['AIL_BIN']) +################################## +# Import Project packages +################################## +from modules.abstract_module import AbstractModule +from lib import crawlers + + +class Zerobins(AbstractModule): + """ + Zerobins module for AIL framework + """ + + def __init__(self): + super(Zerobins, self).__init__() + + binz = [ + r'^https:\/\/(zerobin||privatebin)\..*$', # historical ones + ] + + self.regex = re.compile('|'.join(binz)) + + # Pending time between two computation (computeNone) in seconds + self.pending_seconds = 10 + + # Send module state to logs + self.redis_logger.info(f'Module {self.module_name} initialized') + + + def computeNone(self): + """ + Compute when no message in queue + """ + self.redis_logger.debug("No message in queue") + + + def compute(self, message): + """regex_helper.regex_findall(self.module_name, self.redis_cache_key + Compute a message in queue + """ + print(message) + url, id = message.split() + + # Extract zerobins addresses + matching_binz = self.regex_findall(self.regex, id, url) + + if len(matching_binz) > 0: + for bin in matching_binz: + print("send {} to crawler".format(bin)) + crawlers.create_crawler_task(bin, screenshot=False, har=False, depth_limit=1, max_pages=1, auto_crawler=False, crawler_delta=3600, crawler_type=None, cookiejar_uuid=None, user_agent=None) + + self.redis_logger.debug("Compute message in queue") + + +if __name__ == '__main__': + + module = Zerobins() + module.run() \ No newline at end of file diff --git a/bin/modules/abstract_module.py b/bin/modules/abstract_module.py index 555a6bc9..32c60041 100644 --- a/bin/modules/abstract_module.py +++ b/bin/modules/abstract_module.py @@ -46,6 +46,9 @@ class AbstractModule(ABC): # If provided could be a namespaced channel like script: self.redis_logger.channel = logger_channel + #Cache key + self.redis_cache_key = regex_helper.generate_redis_cache_key(self.module_name) + self.max_execution_time = 30 # Run module endlessly self.proceed = True @@ -102,6 +105,7 @@ class AbstractModule(ABC): self.compute(message) except Exception as err: trace = traceback.format_tb(err.__traceback__) + trace = ''.join(trace) self.redis_logger.critical(f"Error in module {self.module_name}: {err}") self.redis_logger.critical(f"Module {self.module_name} input message: {message}") self.redis_logger.critical(trace) @@ -109,8 +113,7 @@ class AbstractModule(ABC): print(f"ERROR: {err}") print(f'MESSAGE: {message}') print('TRACEBACK:') - for line in trace: - print(line) + print(trace) # remove from set_module ## check if item process == completed diff --git a/bin/packages/Item.py b/bin/packages/Item.py index f8f96f93..336a565b 100755 --- a/bin/packages/Item.py +++ b/bin/packages/Item.py @@ -637,8 +637,7 @@ class Item(AbstractObject): return content.decode() def get_ail_2_ail_payload(self): - payload = {'raw': self.get_gzip_content(b64=True), - 'compress': 'gzip'} + payload = {'raw': self.get_gzip_content(b64=True)} return payload # # TODO: @@ -667,6 +666,9 @@ class Item(AbstractObject): def get_svg_icon(self): pass + def get_misp_object(self): + pass + ############################################################################ ############################################################################ ############################################################################ diff --git a/bin/packages/Term.py b/bin/packages/Term.py index f46ec4ea..0cce6e9c 100755 --- a/bin/packages/Term.py +++ b/bin/packages/Term.py @@ -103,7 +103,7 @@ def get_text_word_frequency(item_content, filtering=True): # # TODO: create all tracked words def get_tracked_words_list(): return list(r_serv_term.smembers('all:tracker:word')) - + def get_set_tracked_words_list(): set_list = r_serv_term.smembers('all:tracker:set') all_set_list = [] diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 4cc5ba6a..654df7bd 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -22,8 +22,12 @@ subscribe = Redis_Duplicate [Indexer] subscribe = Redis_Global -[DomClassifier] +[Hosts] subscribe = Redis_Global +publish = Redis_Host + +[DomClassifier] +subscribe = Redis_Host publish = Redis_D4_client [D4_client] @@ -33,6 +37,10 @@ subscribe = Redis_D4_client subscribe = Redis publish = Redis_Tags +[Tracker_Typo_Squatting] +subscribe = Redis_Host +publish = Redis_Tags + [Tracker_Term] subscribe = Redis_Global publish = Redis_Tags @@ -161,3 +169,6 @@ publish = Redis_Mixer,Redis_Tags [IP] subscribe = Redis_Global publish = Redis_Duplicate,Redis_Tags + +[Zerobins] +subscribe = Redis_Url \ No newline at end of file diff --git a/bin/trackers/Retro_Hunt.py b/bin/trackers/Retro_Hunt.py index 23d40058..037520fc 100755 --- a/bin/trackers/Retro_Hunt.py +++ b/bin/trackers/Retro_Hunt.py @@ -58,13 +58,16 @@ class Retro_Hunt(AbstractModule): # end_time def compute(self, task_uuid): - print(task_uuid) + self.redis_logger.warning(f'{self.module_name}, starting Retro hunt task {task_uuid}') + print(f'starting Retro hunt task {task_uuid}') self.task_uuid = task_uuid self.progress = 0 # First launch # restart rule = Tracker.get_retro_hunt_task_rule(task_uuid, r_compile=True) + timeout = Tracker.get_retro_hunt_task_timeout(task_uuid) + self.redis_logger.debug(f'{self.module_name}, Retro Hunt rule {task_uuid} timeout {timeout}') sources = Tracker.get_retro_hunt_task_sources(task_uuid, r_sort=True) self.date_from = Tracker.get_retro_hunt_task_date_from(task_uuid) @@ -85,6 +88,7 @@ class Retro_Hunt(AbstractModule): # # TODO: Filter previous item for dir in dirs_date: print(dir) + self.redis_logger.debug(f'{self.module_name}, Retro Hunt searching in directory {dir}') l_obj = Tracker.get_items_to_analyze(dir) for id in l_obj: #print(f'{dir} / {id}') @@ -92,6 +96,8 @@ class Retro_Hunt(AbstractModule): # save current item in cache Tracker.set_cache_retro_hunt_task_id(task_uuid, id) + self.redis_logger.debug(f'{self.module_name}, Retro Hunt rule {task_uuid}, searching item {id}') + yara_match = rule.match(data=self.item.get_content(), callback=self.yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=timeout) # save last item @@ -120,6 +126,8 @@ class Retro_Hunt(AbstractModule): Tracker.set_retro_hunt_nb_match(task_uuid) Tracker.clear_retro_hunt_task_cache(task_uuid) + print(f'Retro Hunt {task_uuid} completed') + self.redis_logger.warning(f'{self.module_name}, Retro Hunt {task_uuid} completed') # # TODO: stop @@ -133,10 +141,12 @@ class Retro_Hunt(AbstractModule): # Tracker.set_retro_hunt_task_progress(task_uuid, progress) def yara_rules_match(self, data): - #print(data) - - task_uuid = data['namespace'] id = self.item.get_id() + #print(data) + task_uuid = data['namespace'] + + self.redis_logger.info(f'{self.module_name}, Retro hunt {task_uuid} match found: {id}') + print(f'Retro hunt {task_uuid} match found: {id}') Tracker.save_retro_hunt_match(task_uuid, id) diff --git a/bin/trackers/Tracker_Term.py b/bin/trackers/Tracker_Term.py index c23b1077..7cfdc9b7 100755 --- a/bin/trackers/Tracker_Term.py +++ b/bin/trackers/Tracker_Term.py @@ -113,7 +113,7 @@ class Tracker_Term(AbstractModule): nb_uniq_word += 1 if nb_uniq_word >= nb_words_threshold: self.new_term_found(word_set, 'set', item) - + def new_term_found(self, term, term_type, item): uuid_list = Term.get_term_uuid_list(term, term_type) diff --git a/bin/trackers/Tracker_Typo_Squatting.py b/bin/trackers/Tracker_Typo_Squatting.py new file mode 100644 index 00000000..bdbfaaf5 --- /dev/null +++ b/bin/trackers/Tracker_Typo_Squatting.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" +The Tracker_Typo_Squatting Module +=================== + +""" + +################################## +# Import External packages +################################## +import os +import sys +import time +import requests + + +sys.path.append(os.environ['AIL_BIN']) +################################## +# Import Project packages +################################## +from modules.abstract_module import AbstractModule +import NotificationHelper +from packages.Item import Item +from packages import Term +from lib import Tracker + +class Tracker_Typo_Squatting(AbstractModule): + mail_body_template = "AIL Framework,\nNew occurrence for tracked Typo: {}\nitem id: {}\nurl: {}{}" + + """ + Tracker_Typo_Squatting module for AIL framework + """ + + def __init__(self): + super(Tracker_Typo_Squatting, self).__init__() + + self.pending_seconds = 5 + + self.full_item_url = self.process.config.get("Notifications", "ail_domain") + "/object/item?id=" + + # loads typosquatting + self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list() + self.last_refresh_typosquat = time.time() + + self.redis_logger.info(f"Module: {self.module_name} Launched") + + def compute(self, message): + # refresh Tracked typo + if self.last_refresh_typosquat < Term.get_tracked_term_last_updated_by_type('typosquatting'): + self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list() + self.last_refresh_typosquat = time.time() + self.redis_logger.debug('Tracked typosquatting refreshed') + print('Tracked typosquatting refreshed') + + host, id = message.split() + item = Item(id) + + # Cast message as Item + for key in self.typosquat_tracked_words_list.keys(): + #print(key) + if host in self.typosquat_tracked_words_list[key]: + self.new_term_found(key, 'typosquatting', item) + + def new_term_found(self, term, term_type, item): + uuid_list = Term.get_term_uuid_list(term, term_type) + + item_id = item.get_id() + item_date = item.get_date() + item_source = item.get_source() + self.redis_logger.info(f'new tracked typo found: {term} in {item_id}') + print(f'new tracked typo found: {term} in {item_id}') + for term_uuid in uuid_list: + tracker_sources = Tracker.get_tracker_uuid_sources(term_uuid) + if not tracker_sources or item_source in tracker_sources: + Tracker.add_tracked_item(term_uuid, item_id) + + tags_to_add = Term.get_term_tags(term_uuid) + for tag in tags_to_add: + msg = '{};{}'.format(tag, item_id) + self.send_message_to_queue(msg, 'Tags') + + mail_to_notify = Term.get_term_mails(term_uuid) + if mail_to_notify: + mail_subject = Tracker.get_email_subject(term_uuid) + mail_body = Tracker_Typo_Squatting.mail_body_template.format(term, item_id, self.full_item_url, item_id) + for mail in mail_to_notify: + self.redis_logger.debug(f'Send Mail {mail_subject}') + print(f'S print(item_content)end Mail {mail_subject}') + NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body) + + # Webhook + webhook_to_post = Term.get_term_webhook(term_uuid) + if webhook_to_post: + json_request = {"trackerId": term_uuid, + "itemId": item_id, + "itemURL": self.full_item_url + item_id, + "term": term, + "itemSource": item_source, + "itemDate": item_date, + "tags": tags_to_add, + "emailNotification": f'{mail_to_notify}', + "trackerType": term_type + } + try: + response = requests.post(webhook_to_post, json=json_request) + if response.status_code >= 400: + self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: {response.reason}") + except: + self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: Something went wrong") + + + +if __name__ == '__main__': + module = Tracker_Typo_Squatting() + module.run() diff --git a/configs/core.cfg.sample b/configs/core.cfg.sample index 705b648e..0a04268f 100644 --- a/configs/core.cfg.sample +++ b/configs/core.cfg.sample @@ -57,6 +57,11 @@ minute_processed_paste = 10 #Maximum line length authorized to make a diff between duplicates DiffMaxLineLength = 10000 +[AIL_2_AIL] +server_host = 0.0.0.0 +server_port = 4443 +local_addr = + #### Modules #### [BankAccount] max_execution_time = 60 diff --git a/install_virtualenv.sh b/install_virtualenv.sh index c0e5b28b..c410069d 100755 --- a/install_virtualenv.sh +++ b/install_virtualenv.sh @@ -18,22 +18,10 @@ if [ -z "$VIRTUAL_ENV" ]; then fi -if [ ! -z "$TRAVIS" ]; then - echo "Travis detected" - ENV_PY="~/virtualenv/python3.6/bin/python" - export AIL_VENV="~/virtualenv/python3.6/" - DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd |sed 's/bin//' )" - export AIL_HOME="${DIR}" +# activate virtual environment +. ./AILENV/bin/activate - export AIL_BIN=${AIL_HOME}/bin/ - export AIL_FLASK=${AIL_HOME}/var/www/ - export AIL_REDIS=${AIL_HOME}/redis/src/ - export AIL_ARDB=${AIL_HOME}/ardb/src/ -else - # activate virtual environment - . ./AILENV/bin/activate -fi pip3 install -U pip pip3 install 'git+https://github.com/D4-project/BGP-Ranking.git/@7e698f87366e6f99b4d0d11852737db28e3ddc62#egg=pybgpranking&subdirectory=client' diff --git a/installing_deps.sh b/installing_deps.sh index 977ed642..24a3b7b1 100755 --- a/installing_deps.sh +++ b/installing_deps.sh @@ -14,9 +14,6 @@ sudo apt-get install python3-pip virtualenv python3-dev python3-tk libfreetype6- #Needed for downloading jemalloc sudo apt-get install wget -qq -#optional tor install -sudo apt-get install tor -qq - #Needed for bloom filters sudo apt-get install libssl-dev libfreetype6-dev python3-numpy -qq diff --git a/requirements.txt b/requirements.txt index f2ebbc1c..2ea2f499 100644 --- a/requirements.txt +++ b/requirements.txt @@ -70,6 +70,8 @@ flask>=1.1.4 flask-login bcrypt>3.1.6 +# Ail typo squatting +ail_typo_squatting # Tests nose>=1.3.7 diff --git a/tests/test_modules.py b/tests/test_modules.py index 728294fe..e65edcc7 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -59,9 +59,10 @@ class Test_Module_Categ(unittest.TestCase): def test_module(self): item_id = 'tests/2021/01/01/categ.gz' - test_categ = ['CreditCards', 'Mail', 'Onion', 'Web', 'Credential', 'Cve'] + test_categ = ['CreditCards', 'Mail', 'Onion', 'Urls', 'Credential', 'Cve'] result = self.module_obj.compute(item_id, r_result=True) + print(result) self.assertCountEqual(result, test_categ) class Test_Module_CreditCards(unittest.TestCase): @@ -87,8 +88,10 @@ class Test_Module_DomClassifier(unittest.TestCase): self.module_obj = DomClassifier() def test_module(self): + test_host = 'foo.be' item_id = 'tests/2021/01/01/domain_classifier.gz' - result = self.module_obj.compute(item_id, r_result=True) + msg = f'{test_host} {item_id}' + result = self.module_obj.compute(msg, r_result=True) self.assertTrue(len(result)) class Test_Module_Global(unittest.TestCase): diff --git a/update/v4.2/Update.py b/update/v4.2/Update.py new file mode 100755 index 00000000..baecba0a --- /dev/null +++ b/update/v4.2/Update.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +import os +import re +import sys +import time +import redis +import datetime + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) +import ConfigLoader + +sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin')) +from ail_updater import AIL_Updater + +class Updater(AIL_Updater): + """default Updater.""" + + def __init__(self, version): + super(Updater, self).__init__(version) + +if __name__ == '__main__': + + updater = Updater('v4.2') + updater.run_update() diff --git a/update/v4.2/Update.sh b/update/v4.2/Update.sh new file mode 100755 index 00000000..a18aae61 --- /dev/null +++ b/update/v4.2/Update.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1; + +export PATH=$AIL_HOME:$PATH +export PATH=$AIL_REDIS:$PATH +export PATH=$AIL_ARDB:$PATH +export PATH=$AIL_BIN:$PATH +export PATH=$AIL_FLASK:$PATH + +GREEN="\\033[1;32m" +DEFAULT="\\033[0;39m" + +echo -e $GREEN"Shutting down AIL ..."$DEFAULT +bash ${AIL_BIN}/LAUNCH.sh -ks +wait + +# SUBMODULES # +git submodule update + +echo "" +echo -e $GREEN"Installing typo-squatting ..."$DEFAULT +pip3 install -U ail_typo_squatting + +echo "" +echo -e $GREEN"Updating d4-client ..."$DEFAULT +pip3 install -U d4-pyclient + +exit 0 diff --git a/var/www/blueprints/correlation.py b/var/www/blueprints/correlation.py index 03c7ea5e..ecc9b7ac 100644 --- a/var/www/blueprints/correlation.py +++ b/var/www/blueprints/correlation.py @@ -261,3 +261,12 @@ def graph_node_json(): res = Correlate_object.get_graph_node_object_correlation(object_type, correlation_id, mode, correlation_names, correlation_objects, requested_correl_type=type_id, max_nodes=max_nodes) return jsonify(res) + +@correlation.route('/correlation/subtype_search', methods=['POST']) +@login_required +@login_read_only +def subtype_search(): + obj_type = request.form.get('object_type') + obj_subtype = request.form.get('object_subtype') + obj_id = request.form.get('object_id') + return redirect(url_for('correlation.show_correlation', object_type=obj_type, type_id=obj_subtype, correlation_id=obj_id)) diff --git a/var/www/modules/hashDecoded/templates/DaysCorrelation.html b/var/www/modules/hashDecoded/templates/DaysCorrelation.html index 9b561240..5bbed4c4 100644 --- a/var/www/modules/hashDecoded/templates/DaysCorrelation.html +++ b/var/www/modules/hashDecoded/templates/DaysCorrelation.html @@ -75,8 +75,34 @@
+ +
+
+
Search {{correlation_type}} by name:
+ +
+ +
+ + + + + +
+ +
+
+
+
+ +
diff --git a/var/www/modules/hunter/Flask_hunter.py b/var/www/modules/hunter/Flask_hunter.py index 1a52e31b..41019d49 100644 --- a/var/www/modules/hunter/Flask_hunter.py +++ b/var/www/modules/hunter/Flask_hunter.py @@ -85,6 +85,16 @@ def tracked_menu_yara(): global_term = Term.get_all_global_tracked_terms(filter_type=filter_type) return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type) +@hunter.route("/trackers/typosquatting") +@login_required +@login_read_only +def tracked_menu_typosquatting(): + filter_type = 'typosquatting' + user_id = current_user.get_id() + user_term = Term.get_all_user_tracked_terms(user_id, filter_type=filter_type) + global_term = Term.get_all_global_tracked_terms(filter_type=filter_type) + return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type) + @hunter.route("/tracker/add", methods=['GET', 'POST']) @login_required @@ -207,6 +217,13 @@ def show_tracker(): yara_rule_content = Tracker.get_yara_rule_content(tracker_metadata['tracker']) else: yara_rule_content = None + + if tracker_metadata['type'] == 'typosquatting': + typo_squatting = list(Tracker.get_tracker_typosquatting_domains(tracker_uuid)) + typo_squatting.sort() + else: + typo_squatting = None + if date_from: res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id) @@ -224,6 +241,7 @@ def show_tracker(): return render_template("showTracker.html", tracker_metadata=tracker_metadata, yara_rule_content=yara_rule_content, + typo_squatting=typo_squatting, bootstrap_label=bootstrap_label) @hunter.route("/tracker/update_tracker_description", methods=['POST']) diff --git a/var/www/modules/hunter/templates/edit_tracker.html b/var/www/modules/hunter/templates/edit_tracker.html index cc3c8f80..2843ebfe 100644 --- a/var/www/modules/hunter/templates/edit_tracker.html +++ b/var/www/modules/hunter/templates/edit_tracker.html @@ -94,6 +94,7 @@ +

Terms to track (space separated)

@@ -199,6 +200,12 @@ $(document).ready(function(){ $("#tracker").hide(); $("#nb_word").hide(); $("#yara_rule").show(); + } else if (tracker_type=="typosquatting") { + $("#tracker_desc").text("Generation of variation for domain name. Only one domain name at a time."); + $("#tracker_desc").show(); + $("#tracker").show(); + $("#nb_word").hide(); + $("#yara_rule").hide(); } }); diff --git a/var/www/modules/hunter/templates/showTracker.html b/var/www/modules/hunter/templates/showTracker.html index 5f551d24..ca6fdf6b 100644 --- a/var/www/modules/hunter/templates/showTracker.html +++ b/var/www/modules/hunter/templates/showTracker.html @@ -69,29 +69,47 @@
- - - - - - - - - {% if tracker_metadata['webhook'] %} - - {% endif %} - - - + + + + + + + + + {% if tracker_metadata['webhook'] %} + + {% endif %} + + + - + {% if tracker_metadata['type'] == 'typosquatting' %} + + {% else %} + + {% endif %}
TypeTrackerDate addedAccess LevelCreated byFirst seenLast seenWebhookTags Email
TypeTrackerDate addedAccess LevelCreated byFirst seenLast seenWebhookTags Email
{{ tracker_metadata['type'] }}{{ tracker_metadata['tracker'] }} + +
+
+ {% if typo_squatting %} + {% for typo in typo_squatting %} + {{typo}} +
+ {% endfor %} + {%endif%} +
+
+
{{ tracker_metadata['tracker'] }}{{ tracker_metadata['date'][0:4] }}/{{ tracker_metadata['date'][4:6] }}/{{ tracker_metadata['date'][6:8] }} {% if tracker_metadata['level'] == 0 %} diff --git a/var/www/modules/hunter/templates/trackersManagement.html b/var/www/modules/hunter/templates/trackersManagement.html index aedc790d..09e19994 100644 --- a/var/www/modules/hunter/templates/trackersManagement.html +++ b/var/www/modules/hunter/templates/trackersManagement.html @@ -70,10 +70,14 @@ {% if dict_uuid['term'] %} - {% if dict_uuid['term']|length > 256 %} - {{ dict_uuid['term'][0:256] }}... + {% if dict_uuid['term']|length > 2000 %} + {{ dict_uuid['term'][0:50] }}... {% else %} - {{ dict_uuid['term'] }} + {% if dict_uuid['term']|length > 100 %} + {{ dict_uuid['term'][0:100] }}... + {% else %} + {{ dict_uuid['term'] }} + {% endif %} {% endif %} {% endif %} @@ -135,10 +139,14 @@ {% if dict_uuid['term'] %} - {% if dict_uuid['term']|length > 256 %} - {{ dict_uuid['term'][0:256] }}... + {% if dict_uuid['term']|length > 2000 %} + {{ dict_uuid['term'][0:50] }}... {% else %} - {{ dict_uuid['term'] }} + {% if dict_uuid['term']|length > 100 %} + {{ dict_uuid['term'][0:100] }}... + {% else %} + {{ dict_uuid['term'] }} + {% endif %} {% endif %} {% endif %} diff --git a/var/www/templates/hunter/menu_sidebar.html b/var/www/templates/hunter/menu_sidebar.html index 7410b5f0..d727810e 100644 --- a/var/www/templates/hunter/menu_sidebar.html +++ b/var/www/templates/hunter/menu_sidebar.html @@ -42,6 +42,12 @@  {  YARA + +