diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 0977e5d4..1fa0c3f9 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -117,16 +117,39 @@ function launching_kvrocks { } function launching_logs { + conf_dir="${AIL_HOME}/configs/" + syslog_cmd="" + syslog_enabled=`cat $conf_dir/core.cfg | grep 'ail_logs_syslog' | cut -d " " -f 3 ` + if [ "$syslog_enabled" = "True" ]; then + syslog_cmd="--syslog" + fi + syslog_server=`cat $conf_dir/core.cfg | grep 'ail_logs_syslog_server' | cut -d " " -f 3 ` + syslog_port=`cat $conf_dir/core.cfg | grep 'ail_logs_syslog_port' | cut -d " " -f 3 ` + if [ ! -z "$syslog_server" -a "$str" != " " ]; then + syslog_cmd="${syslog_cmd} -ss ${syslog_server}" + if [ ! -z "$syslog_port" -a "$str" != " " ]; then + syslog_cmd="${syslog_cmd} -sp ${syslog_port}" + fi + fi + syslog_facility=`cat $conf_dir/core.cfg | grep 'ail_logs_syslog_facility' | cut -d " " -f 3 ` + if [ ! -z "$syslog_facility" -a "$str" != " " ]; then + syslog_cmd="${syslog_cmd} -sf ${syslog_facility}" + fi + syslog_level=`cat $conf_dir/core.cfg | grep 'ail_logs_syslog_level' | cut -d " " -f 3 ` + if [ ! -z "$syslog_level" -a "$str" != " " ]; then + syslog_cmd="${syslog_cmd} -sl ${syslog_level}" + fi + screen -dmS "Logging_AIL" sleep 0.1 echo -e $GREEN"\t* Launching logging process"$DEFAULT - screen -S "Logging_AIL" -X screen -t "LogQueue" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Queuing -l ../logs/; read x" + screen -S "Logging_AIL" -X screen -t "LogQueue" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Queuing -l ../logs/ ${syslog_cmd}; read x" sleep 0.1 - screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Script -l ../logs/; read x" + screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Script -l ../logs/ ${syslog_cmd}; read x" sleep 0.1 - screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Sync -l ../logs/; read x" + screen -S "Logging_AIL" -X screen -t "LogSync" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Sync -l ../logs/ ${syslog_cmd}; read x" sleep 0.1 - screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Crawler -l ../logs/; read x" + screen -S "Logging_AIL" -X screen -t "LogCrawler" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Crawler -l ../logs/ ${syslog_cmd}; read x" } function launching_queues { @@ -259,6 +282,8 @@ function launching_scripts { ################################## # TRACKERS MODULES # ################################## + screen -S "Script_AIL" -X screen -t "Tracker_Typo_Squatting" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Typo_Squatting.py; read x" + sleep 0.1 screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x" diff --git a/bin/import/ail_json_importer/Ail_bgp_monitor.py b/bin/import/ail_json_importer/Ail_bgp_monitor.py new file mode 100755 index 00000000..e76760d5 --- /dev/null +++ b/bin/import/ail_json_importer/Ail_bgp_monitor.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* +""" +The JSON Receiver Module +================ + +Recieve Json Items (example: Twitter feeder) + +""" +import os +import json +import sys +import datetime +import uuid + +from packages import Tag + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) +import item_basic + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer')) +from Default_json import Default_json + +class Ail_bgp_monitor(Default_json): + """urlextract Feeder functions""" + + def __init__(self, name, json_item): + super().__init__(name, json_item) + + def get_feeder_name(self): + return 'bgp_monitor' + + # # define item id + # def get_item_id(self): + # # use twitter timestamp ? + # item_date = datetime.date.today().strftime("%Y/%m/%d") + # item_id = str(self.json_item['meta']['twitter:url-extracted']) + # item_id = item_id.split('//') + # if len(item_id) > 1: + # item_id = ''.join(item_id[1:]) + # else: + # item_id = item_id[0] + # item_id = item_id.replace('/', '_') + # if len(item_id) > 215: + # item_id = '{}{}.gz'.format(item_id[:215], str(uuid.uuid4())) + # else: + # item_id = '{}{}.gz'.format(item_id, str(uuid.uuid4())) + # return os.path.join('urlextract', item_date, item_id) + + def process_json_meta(self, process, item_id): + ''' + Process JSON meta filed. + ''' + json_meta = self.get_json_meta() + + tag = 'infoleak:automatic-detection=bgp_monitor' + Tag.add_tag('item', tag, item_id) diff --git a/bin/lib/Tracker.py b/bin/lib/Tracker.py index b90c3cae..e0381f97 100755 --- a/bin/lib/Tracker.py +++ b/bin/lib/Tracker.py @@ -10,6 +10,9 @@ import yara import datetime import base64 +from ail_typo_squatting import runAll +import math + from flask import escape @@ -182,8 +185,8 @@ def get_all_tracker_type(): def get_all_tracker_uuid(): return r_serv_tracker.smembers(f'trackers:all') -def get_all_tracker_by_type(tracker_type): - r_serv_tracker.smembers(f'trackers:all:{tracker_type}') +def get_all_tracker_uuid_by_type(tracker_type): + return r_serv_tracker.smembers(f'trackers:all:{tracker_type}') # def get_all_tracker(): # l_keys_name = [] @@ -231,7 +234,7 @@ def get_tracker_mails(tracker_uuid): return list(r_serv_tracker.smembers('tracker:mail:{}'.format(tracker_uuid))) def get_tracker_webhook(tracker_uuid): - return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'webhook') + return r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'webhook') def get_tracker_uuid_sources(tracker_uuid): return list(r_serv_tracker.smembers(f'tracker:sources:{tracker_uuid}')) @@ -345,6 +348,20 @@ def get_tracker_items_by_daterange(tracker_uuid, date_from, date_to): all_item_id |= r_serv_tracker.smembers(f'tracker:item:{tracker_uuid}:{date_day}') return all_item_id +def get_tracker_typosquatting_domains(tracker_uuid): + return r_serv_tracker.smembers(f'tracker:typosquatting:{tracker_uuid}') + +def get_typosquatting_tracked_words_list(): + typosquattings = {} + typos_uuid = get_all_tracker_uuid_by_type("typosquatting") + + for typo_uuid in typos_uuid: + tracker = get_tracker_by_uuid(typo_uuid) + typosquattings[tracker] = get_tracker_typosquatting_domains(typo_uuid) + + return typosquattings + + def add_tracked_item(tracker_uuid, item_id): item_date = item_basic.get_item_date(item_id) # track item @@ -416,14 +433,14 @@ def get_email_subject(tracker_uuid): return 'AIL framework: {}'.format(tracker_description) def get_tracker_last_updated_by_type(tracker_type): - epoch_update = r_serv_tracker.get('tracker:refresh:{}'.format(tracker_type)) + epoch_update = r_cache.get(f'tracker:refresh:{tracker_type}') if not epoch_update: epoch_update = 0 return float(epoch_update) # # TODO: check type API def trigger_trackers_refresh(tracker_type): - r_serv_tracker.set(f'tracker:refresh:{tracker_type}', time.time()) + r_cache.set(f'tracker:refresh:{tracker_type}', time.time()) ###################### #### TRACKERS ACL #### @@ -542,6 +559,15 @@ def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1): tracker = ",".join(words_set) tracker = "{};{}".format(tracker, nb_words) + elif tracker_type == 'typosquatting': + tracker = tracker.lower() + # Take only the first term + domain = tracker.split(" ") + if len(domain) > 1: + return {"status": "error", "reason": "Only one domain is accepted at a time"}, 400 + if not "." in tracker: + return {"status": "error", "reason": "Invalid domain name"}, 400 + elif tracker_type=='yara_custom': if not is_valid_yara_rule(tracker): @@ -594,6 +620,12 @@ def create_tracker(tracker, tracker_type, user_id, level, tags, mails, descripti tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid) tracker_type = 'yara' + elif tracker_type == 'typosquatting': + domain = tracker.split(" ")[0] + typo_generation = runAll(domain=domain, limit=math.inf, formatoutput="text", pathOutput="-", verbose=False) + for typo in typo_generation: + r_serv_tracker.sadd(f'tracker:typosquatting:{tracker_uuid}', typo) + # create metadata r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'tracked', tracker) r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'type', tracker_type) @@ -666,9 +698,9 @@ def create_tracker(tracker, tracker_type, user_id, level, tags, mails, descripti # escape source ? r_serv_tracker.sadd(f'tracker:sources:{tracker_uuid}', escape(source)) # toggle refresh module tracker list/set - r_serv_tracker.set('tracker:refresh:{}'.format(tracker_type), time.time()) + trigger_trackers_refresh(tracker_type) if tracker_type != old_type: # toggle old type refresh - r_serv_tracker.set('tracker:refresh:{}'.format(old_type), time.time()) + trigger_trackers_refresh(old_type) return tracker_uuid def api_add_tracker(dict_input, user_id): diff --git a/bin/lib/crawlers.py b/bin/lib/crawlers.py index 46d42536..6ff8da1a 100755 --- a/bin/lib/crawlers.py +++ b/bin/lib/crawlers.py @@ -1091,7 +1091,6 @@ def get_captures_status(): ##-- CRAWLER STATE --## -#### CRAWLER TASKS #### #### CRAWLER TASK #### @@ -1147,6 +1146,7 @@ class CrawlerTask: def get_proxy(self): return r_crawler.hget(f'crawler:task:{self.uuid}', 'proxy') +<<<<<<< HEAD def get_parent(self): return r_crawler.hget(f'crawler:task:{self.uuid}', 'parent') @@ -1316,6 +1316,22 @@ def create_task(url, depth=1, har=True, screenshot=True, header=None, cookiejar= ## -- CRAWLER TASK -- ## +======= +def send_url_to_crawl_in_queue(crawler_mode, crawler_type, url): + print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') + r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') + # add auto crawled url for user UI + if crawler_mode == 'auto': + r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url) + +def add_url_to_crawl_in_queue(url, crawler_mode='manual'): # crawler_type + #print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') + r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}') + # CURRENTLY DISABLED + # # add auto crawled url for user UI + # if crawler_mode == 'auto': + # r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url) +>>>>>>> master #### CRAWLER TASK API #### @@ -1666,6 +1682,20 @@ def test_ail_crawlers(): #### ---- #### +# TODO CHECK MIGRATION - Rest API + +# def add_auto_crawler_in_queue(domain, domain_type, port, epoch, delta, message): +# r_serv_onion.zadd('crawler_auto_queue', int(time.time() + delta) , f'{message};{domain_type}') +# # update list, last auto crawled domains +# r_serv_onion.lpush('last_auto_crawled', f'{domain}:{port};{epoch}') +# r_serv_onion.ltrim('last_auto_crawled', 0, 9) + +# TODO MIGRATE ME +# def api_create_crawler_task(user_id, url, screenshot=True, har=True, depth_limit=1, max_pages=100, auto_crawler=False, crawler_delta=3600, crawler_type=None, cookiejar_uuid=None, user_agent=None): +# # validate url +# if url is None or url=='' or url=='\n': +# return ({'error':'invalid depth limit'}, 400) + # TODO MOVE ME IN CRAWLER OR FLASK load_blacklist() diff --git a/bin/modules/Global.py b/bin/modules/Global.py index 4edb35b2..2db4a389 100755 --- a/bin/modules/Global.py +++ b/bin/modules/Global.py @@ -24,13 +24,11 @@ Requirements # Import External packages ################################## import base64 -import hashlib import io import gzip import os import sys import time -import datetime from hashlib import md5 from uuid import uuid4 @@ -40,7 +38,7 @@ sys.path.append(os.environ['AIL_BIN']) # Import Project packages ################################## from modules.abstract_module import AbstractModule -from lib.ConfigLoader import ConfigLoader +from lib.ail_core import get_ail_uuid from lib.data_retention_engine import update_obj_date from lib import item_basic # from lib import Statistics @@ -67,6 +65,8 @@ class Global(AbstractModule): # Send module state to logs self.redis_logger.info(f"Module {self.module_name} initialized") + # Send module state to logs + self.redis_logger.critical(f"AIL {get_ail_uuid()} started") def computeNone(self): difftime = time.time() - self.time_last_stats diff --git a/bin/modules/PgpDump.py b/bin/modules/PgpDump.py index 1dc1e9d9..c6f91740 100755 --- a/bin/modules/PgpDump.py +++ b/bin/modules/PgpDump.py @@ -25,6 +25,9 @@ sys.path.append(os.environ['AIL_BIN']) from modules.abstract_module import AbstractModule from lib.objects import Pgps from lib.objects.Items import Item +from trackers.Tracker_Term import Tracker_Term +from trackers.Tracker_Regex import Tracker_Regex +from trackers.Tracker_Yara import Tracker_Yara class PgpDump(AbstractModule): @@ -53,6 +56,10 @@ class PgpDump(AbstractModule): # Waiting time in seconds between to message processed self.pending_seconds = 1 + self.tracker_term = Tracker_Term() + self.tracker_regex = Tracker_Regex() + self.tracker_yara = Tracker_Yara() + # init self.item_id = None self.keys = set() @@ -209,10 +216,16 @@ class PgpDump(AbstractModule): pgp = Pgps.Pgp(name, 'name') pgp.add(date, self.item_id) print(f' name: {name}') + self.tracker_term.compute(self.item_id, item_content=name) + self.tracker_regex.compute(self.item_id, content=name) + self.tracker_yara.compute(self.item_id, item_content=name) for mail in self.mails: pgp = Pgps.Pgp(mail, 'mail') pgp.add(date, self.item_id) print(f' mail: {mail}') + self.tracker_term.compute(self.item_id, item_content=mail) + self.tracker_regex.compute(self.item_id, content=mail) + self.tracker_yara.compute(self.item_id, item_content=mail) # Keys extracted from PGP PRIVATE KEY BLOCK for key in self.private_keys: diff --git a/bin/packages/Term.py b/bin/packages/Term.py index bc4fced0..abcc6f83 100755 --- a/bin/packages/Term.py +++ b/bin/packages/Term.py @@ -102,7 +102,7 @@ def get_text_word_frequency(item_content, filtering=True): # # TODO: create all tracked words def get_tracked_words_list(): return list(r_serv_term.smembers('all:tracker:word')) - + def get_set_tracked_words_list(): set_list = r_serv_term.smembers('all:tracker:set') all_set_list = [] diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 5c65b605..c3243abc 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -43,6 +43,10 @@ subscribe = Redis_D4_client subscribe = Redis publish = Redis_Tags +[Tracker_Typo_Squatting] +subscribe = Redis_Host +publish = Redis_Tags + [Tracker_Term] subscribe = Redis_Global publish = Redis_Tags diff --git a/bin/trackers/Tracker_Regex.py b/bin/trackers/Tracker_Regex.py index f955be8c..d213e42d 100755 --- a/bin/trackers/Tracker_Regex.py +++ b/bin/trackers/Tracker_Regex.py @@ -48,7 +48,7 @@ class Tracker_Regex(AbstractModule): self.redis_logger.info(f"Module: {self.module_name} Launched") - def compute(self, item_id): + def compute(self, item_id, content=None): # refresh Tracked regex if self.last_refresh < Tracker.get_tracker_last_updated_by_type('regex'): self.dict_regex_tracked = Term.get_regex_tracked_words_dict() @@ -58,7 +58,8 @@ class Tracker_Regex(AbstractModule): item = Item(item_id) item_id = item.get_id() - content = item.get_content() + if not content: + content = item.get_content() for regex in self.dict_regex_tracked: matched = self.regex_findall(self.dict_regex_tracked[regex], item_id, content) @@ -76,6 +77,7 @@ class Tracker_Regex(AbstractModule): # date = item.get_date() item_source = item.get_source() print(f'new tracked regex found: {tracker_name} in {item_id}') + self.redis_logger.warning(f'new tracked regex found: {tracker_name} in {item_id}') for tracker_uuid in uuid_list: tracker = Tracker.Tracker(tracker_uuid) diff --git a/bin/trackers/Tracker_Term.py b/bin/trackers/Tracker_Term.py index b4ed5a87..fa43fc1d 100755 --- a/bin/trackers/Tracker_Term.py +++ b/bin/trackers/Tracker_Term.py @@ -61,7 +61,7 @@ class Tracker_Term(AbstractModule): self.redis_logger.info(f"Module: {self.module_name} Launched") - def compute(self, item_id): + def compute(self, item_id, item_content=None): # refresh Tracked term if self.last_refresh_word < Term.get_tracked_term_last_updated_by_type('word'): self.list_tracked_words = Term.get_tracked_words_list() @@ -78,7 +78,8 @@ class Tracker_Term(AbstractModule): # Cast message as Item item = Item(item_id) item_date = item.get_date() - item_content = item.get_content() + if not item_content: + item_content = item.get_content() signal.alarm(self.max_execution_time) @@ -120,7 +121,7 @@ class Tracker_Term(AbstractModule): item_id = item.get_id() item_date = item.get_date() item_source = item.get_source() - self.redis_logger.info(f'new tracked term found: {term} in {item_id}') + self.redis_logger.warning(f'new tracked term found: {term} in {item_id}') print(f'new tracked term found: {term} in {item_id}') for term_uuid in uuid_list: tracker_sources = Tracker.get_tracker_uuid_sources(term_uuid) diff --git a/bin/trackers/Tracker_Typo_Squatting.py b/bin/trackers/Tracker_Typo_Squatting.py new file mode 100755 index 00000000..34116beb --- /dev/null +++ b/bin/trackers/Tracker_Typo_Squatting.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The Tracker_Typo_Squatting Module +=================== + +""" + +################################## +# Import External packages +################################## +import os +import sys +import time +import requests + + +sys.path.append(os.environ['AIL_BIN']) +################################## +# Import Project packages +################################## +from modules.abstract_module import AbstractModule +import NotificationHelper +from packages.Item import Item +from lib import Tracker + +class Tracker_Typo_Squatting(AbstractModule): + mail_body_template = "AIL Framework,\nNew occurrence for tracked Typo: {}\nitem id: {}\nurl: {}{}" + + """ + Tracker_Typo_Squatting module for AIL framework + """ + + def __init__(self): + super(Tracker_Typo_Squatting, self).__init__() + + self.pending_seconds = 5 + + self.full_item_url = self.process.config.get("Notifications", "ail_domain") + "/object/item?id=" + + # loads typosquatting + self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list() + self.last_refresh_typosquat = time.time() + + self.redis_logger.info(f"Module: {self.module_name} Launched") + + def compute(self, message): + # refresh Tracked typo + if self.last_refresh_typosquat < Tracker.get_tracker_last_updated_by_type('typosquatting'): + self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list() + self.last_refresh_typosquat = time.time() + self.redis_logger.debug('Tracked typosquatting refreshed') + print('Tracked typosquatting refreshed') + + host, id = message.split() + + # Cast message as Item + for tracker in self.typosquat_tracked_words_list: + if host in self.typosquat_tracked_words_list[tracker]: + item = Item(id) + self.new_tracker_found(tracker, 'typosquatting', item) + + def new_tracker_found(self, tracker, tracker_type, item): + item_id = item.get_id() + item_date = item.get_date() + item_source = item.get_source() + #self.redis_logger.info(f'new tracked typo found: {tracker} in {item_id}') + print(f'new tracked typosquatting found: {tracker} in {item_id}') + self.redis_logger.warning(f'tracker typosquatting: {tracker} in {item_id}') + + print(Tracker.get_tracker_uuid_list(tracker, tracker_type)) + for tracker_uuid in Tracker.get_tracker_uuid_list(tracker, tracker_type): + # Source Filtering + tracker_sources = Tracker.get_tracker_uuid_sources(tracker) + if tracker_sources and item_source not in tracker_sources: + continue + + Tracker.add_tracked_item(tracker_uuid, item_id) + + # Tags + tags_to_add = Tracker.get_tracker_tags(tracker_uuid) + for tag in tags_to_add: + msg = f'{tag};{item_id}' + self.send_message_to_queue(msg, 'Tags') + + mail_to_notify = Tracker.get_tracker_mails(tracker_uuid) + if mail_to_notify: + mail_subject = Tracker.get_email_subject(tracker_uuid) + mail_body = Tracker_Typo_Squatting.mail_body_template.format(tracker, item_id, self.full_item_url, item_id) + for mail in mail_to_notify: + NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body) + + # Webhook + webhook_to_post = Tracker.get_tracker_webhook(tracker_uuid) + if webhook_to_post: + json_request = {"trackerId": tracker_uuid, + "itemId": item_id, + "itemURL": self.full_item_url + item_id, + "tracker": tracker, + "itemSource": item_source, + "itemDate": item_date, + "tags": tags_to_add, + "emailNotification": f'{mail_to_notify}', + "trackerType": tracker_type + } + try: + response = requests.post(webhook_to_post, json=json_request) + if response.status_code >= 400: + self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: {response.reason}") + except: + self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: Something went wrong") + + + +if __name__ == '__main__': + module = Tracker_Typo_Squatting() + module.run() + #module.compute('g00gle.com tests/2020/01/01/test.gz') diff --git a/bin/trackers/Tracker_Yara.py b/bin/trackers/Tracker_Yara.py index e5194178..ec29aad8 100755 --- a/bin/trackers/Tracker_Yara.py +++ b/bin/trackers/Tracker_Yara.py @@ -45,7 +45,7 @@ class Tracker_Yara(AbstractModule): self.redis_logger.info(f"Module: {self.module_name} Launched") - def compute(self, item_id): + def compute(self, item_id, item_content=None): # refresh YARA list if self.last_refresh < Tracker.get_tracker_last_updated_by_type('yara'): self.rules = Tracker.reload_yara_rules() @@ -54,12 +54,14 @@ class Tracker_Yara(AbstractModule): print('Tracked set refreshed') self.item = Item(item_id) - item_content = self.item.get_content() + if not item_content: + item_content = self.item.get_content() + try: yara_match = self.rules.match(data=item_content, callback=self.yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=60) if yara_match: - self.redis_logger.info(f'{self.item.get_id()}: {yara_match}') + self.redis_logger.warning(f'tracker yara: new match {self.item.get_id()}: {yara_match}') print(f'{self.item.get_id()}: {yara_match}') except yara.TimeoutError as e: print(f'{self.item.get_id()}: yara scanning timed out') diff --git a/configs/core.cfg.sample b/configs/core.cfg.sample index 05db26e1..5608b505 100644 --- a/configs/core.cfg.sample +++ b/configs/core.cfg.sample @@ -22,6 +22,18 @@ pystemonpath = /home/pystemon/pystemon/ sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt +##### Logs ###### +[Logs] +# activate syslog +ail_logs_syslog = False +ail_logs_syslog_server = +# default=514 +ail_logs_syslog_port = +# ['auth', 'authpriv', 'cron', 'daemon', 'ftp', 'kern', 'lpr', 'mail', 'news', 'syslog', 'user', 'uucp', 'local0', 'local1', 'local2', 'local3', 'local4', 'local5', 'local6', 'local7'] +ail_logs_syslog_facility = +# ['DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR', 'CRITICAL'] +ail_logs_syslog_level = + ##### Notifications ###### [Notifications] ail_domain = https://localhost:7000 @@ -228,7 +240,7 @@ dns = 8.8.8.8 dns = 8.8.8.8 [Web] -dns = 149.13.33.69 +dns = 8.8.8.8 # Indexer configuration [Indexer] @@ -247,7 +259,8 @@ maxDuplicateToPushToMISP=10 # e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557 [ZMQ_Global] #address = tcp://crf.circl.lu:5556 -address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556 +# address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556 +address = tcp://127.0.0.1:5556 channel = 102 bind = tcp://127.0.0.1:5556 diff --git a/requirements.txt b/requirements.txt index ef308825..978eaf9f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -71,6 +71,8 @@ flask>=1.1.4 flask-login bcrypt>3.1.6 +# Ail typo squatting +ail_typo_squatting # Tests nose>=1.3.7 diff --git a/update/v4.2.1/Update.py b/update/v4.2.1/Update.py new file mode 100755 index 00000000..19085e78 --- /dev/null +++ b/update/v4.2.1/Update.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +import os +import re +import sys +import time +import redis +import datetime + +sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) +import ConfigLoader + +sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin')) +from ail_updater import AIL_Updater + +class Updater(AIL_Updater): + """default Updater.""" + + def __init__(self, version): + super(Updater, self).__init__(version) + +if __name__ == '__main__': + + updater = Updater('v4.2.1') + updater.run_update() diff --git a/update/v4.2.1/Update.sh b/update/v4.2.1/Update.sh new file mode 100755 index 00000000..bf04638b --- /dev/null +++ b/update/v4.2.1/Update.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1; + +export PATH=$AIL_HOME:$PATH +export PATH=$AIL_REDIS:$PATH +export PATH=$AIL_ARDB:$PATH +export PATH=$AIL_BIN:$PATH +export PATH=$AIL_FLASK:$PATH + +GREEN="\\033[1;32m" +DEFAULT="\\033[0;39m" + +echo -e $GREEN"Shutting down AIL ..."$DEFAULT +bash ${AIL_BIN}/LAUNCH.sh -ks +wait + +# SUBMODULES # +git submodule update + +echo "" +echo -e $GREEN"Updating pusblogger ..."$DEFAULT +pip3 install -U pubsublogger + +exit 0 diff --git a/var/www/modules/hunter/Flask_hunter.py b/var/www/modules/hunter/Flask_hunter.py index c1be3fe6..e4357053 100644 --- a/var/www/modules/hunter/Flask_hunter.py +++ b/var/www/modules/hunter/Flask_hunter.py @@ -85,6 +85,16 @@ def tracked_menu_yara(): global_trackers = Tracker.get_global_trackers_metadata(tracker_type=tracker_type) return render_template("trackersManagement.html", user_trackers=user_trackers, global_trackers=global_trackers, bootstrap_label=bootstrap_label, tracker_type=tracker_type) +@hunter.route("/trackers/typosquatting") +@login_required +@login_read_only +def tracked_menu_typosquatting(): + filter_type = 'typosquatting' + user_id = current_user.get_id() + user_term = Term.get_all_user_tracked_terms(user_id, filter_type=filter_type) + global_term = Term.get_all_global_tracked_terms(filter_type=filter_type) + return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type) + @hunter.route("/tracker/add", methods=['GET', 'POST']) @login_required @@ -224,6 +234,13 @@ def show_tracker(): yara_rule_content = Tracker.get_yara_rule_content(tracker_metadata['tracker']) else: yara_rule_content = None + + if tracker_metadata['type'] == 'typosquatting': + typo_squatting = list(Tracker.get_tracker_typosquatting_domains(tracker_uuid)) + typo_squatting.sort() + else: + typo_squatting = None + if date_from: res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id) @@ -241,6 +258,7 @@ def show_tracker(): return render_template("showTracker.html", tracker_metadata=tracker_metadata, yara_rule_content=yara_rule_content, + typo_squatting=typo_squatting, bootstrap_label=bootstrap_label) @hunter.route("/tracker/update_tracker_description", methods=['POST']) diff --git a/var/www/modules/hunter/templates/edit_tracker.html b/var/www/modules/hunter/templates/edit_tracker.html index 3bbdd23e..39892b37 100644 --- a/var/www/modules/hunter/templates/edit_tracker.html +++ b/var/www/modules/hunter/templates/edit_tracker.html @@ -104,6 +104,7 @@ +
Terms to track (space separated)
@@ -209,6 +210,12 @@ $(document).ready(function(){ $("#tracker").hide(); $("#nb_word").hide(); $("#yara_rule").show(); + } else if (tracker_type=="typosquatting") { + $("#tracker_desc").text("Generation of variation for domain name. Only one domain name at a time."); + $("#tracker_desc").show(); + $("#tracker").show(); + $("#nb_word").hide(); + $("#yara_rule").hide(); } }); diff --git a/var/www/modules/hunter/templates/showTracker.html b/var/www/modules/hunter/templates/showTracker.html index 5f551d24..ca6fdf6b 100644 --- a/var/www/modules/hunter/templates/showTracker.html +++ b/var/www/modules/hunter/templates/showTracker.html @@ -69,29 +69,47 @@Type | -Tracker | -Date added | -Access Level | -Created by | -First seen | -Last seen | - {% if tracker_metadata['webhook'] %} -Webhook | - {% endif %} -Tags | -|
---|---|---|---|---|---|---|---|---|---|
Type | +Tracker | +Date added | +Access Level | +Created by | +First seen | +Last seen | + {% if tracker_metadata['webhook'] %} +Webhook | + {% endif %} +Tags | +|
{{ tracker_metadata['type'] }} | -{{ tracker_metadata['tracker'] }} | + {% if tracker_metadata['type'] == 'typosquatting' %} +
+
+ {{ tracker_metadata['tracker'].split(",")[0] }}
+
+
+
+
+ {% if typo_squatting %}
+ {% for typo in typo_squatting %}
+ {{typo}}
+
+ + {% endfor %} + {%endif%} + |
+ {% else %}
+ {{ tracker_metadata['tracker'] }} | + {% endif %}{{ tracker_metadata['date'][0:4] }}/{{ tracker_metadata['date'][4:6] }}/{{ tracker_metadata['date'][6:8] }} |
{% if tracker_metadata['level'] == 0 %}
diff --git a/var/www/modules/restApi/Flask_restApi.py b/var/www/modules/restApi/Flask_restApi.py
index 55c52cf0..049dab4d 100644
--- a/var/www/modules/restApi/Flask_restApi.py
+++ b/var/www/modules/restApi/Flask_restApi.py
@@ -16,6 +16,7 @@ sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
+from lib import crawlers
from lib import Users
from lib.objects.Items import Item
from lib import Tag
@@ -46,8 +47,8 @@ restApi = Blueprint('restApi', __name__, template_folder='templates')
# ============ AUTH FUNCTIONS ============
-def check_token_format(strg, search=re.compile(r'[^a-zA-Z0-9_-]').search):
- return not bool(search(strg))
+def check_token_format(token, search=re.compile(r'[^a-zA-Z0-9_-]').search):
+ return not bool(search(token))
def verify_token(token):
if len(token) != 41:
@@ -90,7 +91,7 @@ def get_auth_from_header():
def authErrors(user_role):
# Check auth
if not request.headers.get('Authorization'):
- return ({'status': 'error', 'reason': 'Authentication needed'}, 401)
+ return {'status': 'error', 'reason': 'Authentication needed'}, 401
token = get_auth_from_header()
data = None
# verify token format
@@ -102,7 +103,7 @@ def authErrors(user_role):
if login_failed_ip:
login_failed_ip = int(login_failed_ip)
if login_failed_ip >= 5:
- return ({'status': 'error', 'reason': 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_ip_api:{}'.format(current_ip)))}, 401)
+ return {'status': 'error', 'reason': 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_ip_api:{}'.format(current_ip)))}, 401
try:
authenticated = False
@@ -146,9 +147,6 @@ def is_valid_uuid_v4(header_uuid):
except:
return False
-def one():
- return 1
-
# ============= ROUTES ==============
# @restApi.route("/api", methods=['GET'])
@@ -576,6 +574,20 @@ def get_crawled_domain_list():
dict_res['domain_type'] = domain_type
return create_json_response(dict_res, res[1])
+# # TODO: ADD RESULT JSON Response
+@restApi.route("api/v1/add/crawler/task", methods=['POST'])
+@token_required('analyst')
+def add_crawler_task():
+ data = request.get_json()
+ user_token = get_auth_from_header()
+ user_id = get_user_from_token(user_token)
+ res = crawlers.api_add_crawler_task(data, user_id=user_id)
+ if res:
+ return create_json_response(res[0], res[1])
+
+ dict_res = {'url': data['url']}
+ return create_json_response(dict_res, 200)
+
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # IMPORT # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
diff --git a/var/www/templates/hunter/menu_sidebar.html b/var/www/templates/hunter/menu_sidebar.html
index 7410b5f0..d727810e 100644
--- a/var/www/templates/hunter/menu_sidebar.html
+++ b/var/www/templates/hunter/menu_sidebar.html
@@ -42,6 +42,12 @@
{
YARA
+
+ |