mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 00:28:22 +00:00
fix: [core] fix merge
This commit is contained in:
parent
524a404dc8
commit
126ecb2e39
8 changed files with 72 additions and 139 deletions
|
@ -282,10 +282,10 @@ function launching_scripts {
|
||||||
##################################
|
##################################
|
||||||
# TRACKERS MODULES #
|
# TRACKERS MODULES #
|
||||||
##################################
|
##################################
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Typo_Squatting" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Typo_Squatting.py; read x"
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tracker_Term" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Term.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
screen -S "Script_AIL" -X screen -t "Tracker_Typo_Squatting" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Typo_Squatting.py; read x"
|
||||||
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tracker_Regex" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Regex.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Tracker_Yara" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Yara.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tracker_Yara" bash -c "cd ${AIL_BIN}/trackers; ${ENV_PY} ./Tracker_Yara.py; read x"
|
||||||
|
|
|
@ -1,57 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*-coding:UTF-8 -*
|
|
||||||
"""
|
|
||||||
The JSON Receiver Module
|
|
||||||
================
|
|
||||||
|
|
||||||
Recieve Json Items (example: Twitter feeder)
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
import datetime
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from packages import Tag
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
|
||||||
import item_basic
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer'))
|
|
||||||
from Default_json import Default_json
|
|
||||||
|
|
||||||
class Ail_bgp_monitor(Default_json):
|
|
||||||
"""urlextract Feeder functions"""
|
|
||||||
|
|
||||||
def __init__(self, name, json_item):
|
|
||||||
super().__init__(name, json_item)
|
|
||||||
|
|
||||||
def get_feeder_name(self):
|
|
||||||
return 'bgp_monitor'
|
|
||||||
|
|
||||||
# # define item id
|
|
||||||
# def get_item_id(self):
|
|
||||||
# # use twitter timestamp ?
|
|
||||||
# item_date = datetime.date.today().strftime("%Y/%m/%d")
|
|
||||||
# item_id = str(self.json_item['meta']['twitter:url-extracted'])
|
|
||||||
# item_id = item_id.split('//')
|
|
||||||
# if len(item_id) > 1:
|
|
||||||
# item_id = ''.join(item_id[1:])
|
|
||||||
# else:
|
|
||||||
# item_id = item_id[0]
|
|
||||||
# item_id = item_id.replace('/', '_')
|
|
||||||
# if len(item_id) > 215:
|
|
||||||
# item_id = '{}{}.gz'.format(item_id[:215], str(uuid.uuid4()))
|
|
||||||
# else:
|
|
||||||
# item_id = '{}{}.gz'.format(item_id, str(uuid.uuid4()))
|
|
||||||
# return os.path.join('urlextract', item_date, item_id)
|
|
||||||
|
|
||||||
def process_json_meta(self, process, item_id):
|
|
||||||
'''
|
|
||||||
Process JSON meta filed.
|
|
||||||
'''
|
|
||||||
json_meta = self.get_json_meta()
|
|
||||||
|
|
||||||
tag = 'infoleak:automatic-detection=bgp_monitor'
|
|
||||||
Tag.add_tag('item', tag, item_id)
|
|
35
bin/importer/feeders/BgpMonitor.py
Executable file
35
bin/importer/feeders/BgpMonitor.py
Executable file
|
@ -0,0 +1,35 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
"""
|
||||||
|
The Bgp Monitor Feeder Importer Module
|
||||||
|
================
|
||||||
|
|
||||||
|
Process Bgp Monitor JSON
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from importer.feeders.Default import DefaultFeeder
|
||||||
|
from lib.objects.Items import Item
|
||||||
|
|
||||||
|
|
||||||
|
class BgpMonitorFeeder(DefaultFeeder):
|
||||||
|
"""BgpMonitorFeeder Feeder functions"""
|
||||||
|
|
||||||
|
def __init__(self, json_data):
|
||||||
|
super().__init__(json_data)
|
||||||
|
self.name = 'bgp_monitor'
|
||||||
|
|
||||||
|
def process_meta(self):
|
||||||
|
"""
|
||||||
|
Process JSON meta filed.
|
||||||
|
"""
|
||||||
|
# DIRTY FIX
|
||||||
|
tag = 'infoleak:automatic-detection=bgp_monitor'
|
||||||
|
item = Item(self.get_item_id())
|
||||||
|
item.add_tag(tag)
|
|
@ -1146,7 +1146,6 @@ class CrawlerTask:
|
||||||
def get_proxy(self):
|
def get_proxy(self):
|
||||||
return r_crawler.hget(f'crawler:task:{self.uuid}', 'proxy')
|
return r_crawler.hget(f'crawler:task:{self.uuid}', 'proxy')
|
||||||
|
|
||||||
<<<<<<< HEAD
|
|
||||||
def get_parent(self):
|
def get_parent(self):
|
||||||
return r_crawler.hget(f'crawler:task:{self.uuid}', 'parent')
|
return r_crawler.hget(f'crawler:task:{self.uuid}', 'parent')
|
||||||
|
|
||||||
|
@ -1316,22 +1315,6 @@ def create_task(url, depth=1, har=True, screenshot=True, header=None, cookiejar=
|
||||||
|
|
||||||
|
|
||||||
## -- CRAWLER TASK -- ##
|
## -- CRAWLER TASK -- ##
|
||||||
=======
|
|
||||||
def send_url_to_crawl_in_queue(crawler_mode, crawler_type, url):
|
|
||||||
print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
|
|
||||||
r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
|
|
||||||
# add auto crawled url for user UI
|
|
||||||
if crawler_mode == 'auto':
|
|
||||||
r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url)
|
|
||||||
|
|
||||||
def add_url_to_crawl_in_queue(url, crawler_mode='manual'): # crawler_type
|
|
||||||
#print(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
|
|
||||||
r_serv_onion.sadd(f'{crawler_type}_crawler_priority_queue', f'{url};{crawler_mode}')
|
|
||||||
# CURRENTLY DISABLED
|
|
||||||
# # add auto crawled url for user UI
|
|
||||||
# if crawler_mode == 'auto':
|
|
||||||
# r_serv_onion.sadd(f'auto_crawler_url:{crawler_type}', url)
|
|
||||||
>>>>>>> master
|
|
||||||
|
|
||||||
#### CRAWLER TASK API ####
|
#### CRAWLER TASK API ####
|
||||||
|
|
||||||
|
|
|
@ -102,7 +102,7 @@ def get_text_word_frequency(item_content, filtering=True):
|
||||||
# # TODO: create all tracked words
|
# # TODO: create all tracked words
|
||||||
def get_tracked_words_list():
|
def get_tracked_words_list():
|
||||||
return list(r_serv_term.smembers('all:tracker:word'))
|
return list(r_serv_term.smembers('all:tracker:word'))
|
||||||
|
|
||||||
def get_set_tracked_words_list():
|
def get_set_tracked_words_list():
|
||||||
set_list = r_serv_term.smembers('all:tracker:set')
|
set_list = r_serv_term.smembers('all:tracker:set')
|
||||||
all_set_list = []
|
all_set_list = []
|
||||||
|
|
|
@ -13,18 +13,18 @@ The Tracker_Typo_Squatting Module
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.environ['AIL_BIN'])
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
##################################
|
##################################
|
||||||
# Import Project packages
|
# Import Project packages
|
||||||
##################################
|
##################################
|
||||||
from modules.abstract_module import AbstractModule
|
from modules.abstract_module import AbstractModule
|
||||||
import NotificationHelper
|
from lib.objects.Items import Item
|
||||||
from packages.Item import Item
|
|
||||||
from lib import Tracker
|
from lib import Tracker
|
||||||
|
|
||||||
|
from exporter.MailExporter import MailExporterTracker
|
||||||
|
from exporter.WebHookExporter import WebHookExporterTracker
|
||||||
|
|
||||||
class Tracker_Typo_Squatting(AbstractModule):
|
class Tracker_Typo_Squatting(AbstractModule):
|
||||||
mail_body_template = "AIL Framework,\nNew occurrence for tracked Typo: {}\nitem id: {}\nurl: {}{}"
|
mail_body_template = "AIL Framework,\nNew occurrence for tracked Typo: {}\nitem id: {}\nurl: {}{}"
|
||||||
|
|
||||||
|
@ -37,12 +37,14 @@ class Tracker_Typo_Squatting(AbstractModule):
|
||||||
|
|
||||||
self.pending_seconds = 5
|
self.pending_seconds = 5
|
||||||
|
|
||||||
self.full_item_url = self.process.config.get("Notifications", "ail_domain") + "/object/item?id="
|
# Refresh typo squatting
|
||||||
|
|
||||||
# loads typosquatting
|
|
||||||
self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list()
|
self.typosquat_tracked_words_list = Tracker.get_typosquatting_tracked_words_list()
|
||||||
self.last_refresh_typosquat = time.time()
|
self.last_refresh_typosquat = time.time()
|
||||||
|
|
||||||
|
# Exporter
|
||||||
|
self.exporters = {'mail': MailExporterTracker(),
|
||||||
|
'webhook': WebHookExporterTracker()}
|
||||||
|
|
||||||
self.redis_logger.info(f"Module: {self.module_name} Launched")
|
self.redis_logger.info(f"Module: {self.module_name} Launched")
|
||||||
|
|
||||||
def compute(self, message):
|
def compute(self, message):
|
||||||
|
@ -53,64 +55,39 @@ class Tracker_Typo_Squatting(AbstractModule):
|
||||||
self.redis_logger.debug('Tracked typosquatting refreshed')
|
self.redis_logger.debug('Tracked typosquatting refreshed')
|
||||||
print('Tracked typosquatting refreshed')
|
print('Tracked typosquatting refreshed')
|
||||||
|
|
||||||
host, id = message.split()
|
host, item_id = message.split()
|
||||||
|
|
||||||
# Cast message as Item
|
# Cast message as Item
|
||||||
for tracker in self.typosquat_tracked_words_list:
|
for tracker in self.typosquat_tracked_words_list:
|
||||||
if host in self.typosquat_tracked_words_list[tracker]:
|
if host in self.typosquat_tracked_words_list[tracker]:
|
||||||
item = Item(id)
|
item = Item(item_id)
|
||||||
self.new_tracker_found(tracker, 'typosquatting', item)
|
self.new_tracker_found(tracker, 'typosquatting', item)
|
||||||
|
|
||||||
def new_tracker_found(self, tracker, tracker_type, item):
|
def new_tracker_found(self, tracker, tracker_type, item):
|
||||||
item_id = item.get_id()
|
item_id = item.get_id()
|
||||||
item_date = item.get_date()
|
|
||||||
item_source = item.get_source()
|
item_source = item.get_source()
|
||||||
#self.redis_logger.info(f'new tracked typo found: {tracker} in {item_id}')
|
|
||||||
print(f'new tracked typosquatting found: {tracker} in {item_id}')
|
print(f'new tracked typosquatting found: {tracker} in {item_id}')
|
||||||
self.redis_logger.warning(f'tracker typosquatting: {tracker} in {item_id}')
|
self.redis_logger.warning(f'tracker typosquatting: {tracker} in {item_id}')
|
||||||
|
|
||||||
print(Tracker.get_tracker_uuid_list(tracker, tracker_type))
|
|
||||||
for tracker_uuid in Tracker.get_tracker_uuid_list(tracker, tracker_type):
|
for tracker_uuid in Tracker.get_tracker_uuid_list(tracker, tracker_type):
|
||||||
|
tracker = Tracker.Tracker(tracker_uuid)
|
||||||
|
|
||||||
# Source Filtering
|
# Source Filtering
|
||||||
tracker_sources = Tracker.get_tracker_uuid_sources(tracker)
|
tracker_sources = tracker.get_sources()
|
||||||
if tracker_sources and item_source not in tracker_sources:
|
if tracker_sources and item_source not in tracker_sources:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
Tracker.add_tracked_item(tracker_uuid, item_id)
|
Tracker.add_tracked_item(tracker_uuid, item_id)
|
||||||
|
|
||||||
# Tags
|
for tag in tracker.get_tags():
|
||||||
tags_to_add = Tracker.get_tracker_tags(tracker_uuid)
|
|
||||||
for tag in tags_to_add:
|
|
||||||
msg = f'{tag};{item_id}'
|
msg = f'{tag};{item_id}'
|
||||||
self.send_message_to_queue(msg, 'Tags')
|
self.send_message_to_queue(msg, 'Tags')
|
||||||
|
|
||||||
mail_to_notify = Tracker.get_tracker_mails(tracker_uuid)
|
if tracker.mail_export():
|
||||||
if mail_to_notify:
|
self.exporters['mail'].export(tracker, item)
|
||||||
mail_subject = Tracker.get_email_subject(tracker_uuid)
|
|
||||||
mail_body = Tracker_Typo_Squatting.mail_body_template.format(tracker, item_id, self.full_item_url, item_id)
|
|
||||||
for mail in mail_to_notify:
|
|
||||||
NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body)
|
|
||||||
|
|
||||||
# Webhook
|
|
||||||
webhook_to_post = Tracker.get_tracker_webhook(tracker_uuid)
|
|
||||||
if webhook_to_post:
|
|
||||||
json_request = {"trackerId": tracker_uuid,
|
|
||||||
"itemId": item_id,
|
|
||||||
"itemURL": self.full_item_url + item_id,
|
|
||||||
"tracker": tracker,
|
|
||||||
"itemSource": item_source,
|
|
||||||
"itemDate": item_date,
|
|
||||||
"tags": tags_to_add,
|
|
||||||
"emailNotification": f'{mail_to_notify}',
|
|
||||||
"trackerType": tracker_type
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
response = requests.post(webhook_to_post, json=json_request)
|
|
||||||
if response.status_code >= 400:
|
|
||||||
self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: {response.reason}")
|
|
||||||
except:
|
|
||||||
self.redis_logger.error(f"Webhook request failed for {webhook_to_post}\nReason: Something went wrong")
|
|
||||||
|
|
||||||
|
if tracker.webhook_export():
|
||||||
|
self.exporters['webhook'].export(tracker, item)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -2,17 +2,13 @@
|
||||||
# -*-coding:UTF-8 -*
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import redis
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.environ['AIL_HOME'])
|
||||||
import ConfigLoader
|
##################################
|
||||||
|
# Import Project packages
|
||||||
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
|
##################################
|
||||||
from ail_updater import AIL_Updater
|
from update.bin.old_ail_updater import AIL_Updater
|
||||||
|
|
||||||
class Updater(AIL_Updater):
|
class Updater(AIL_Updater):
|
||||||
"""default Updater."""
|
"""default Updater."""
|
||||||
|
@ -20,7 +16,7 @@ class Updater(AIL_Updater):
|
||||||
def __init__(self, version):
|
def __init__(self, version):
|
||||||
super(Updater, self).__init__(version)
|
super(Updater, self).__init__(version)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
updater = Updater('v4.2.1')
|
updater = Updater('v4.2.1')
|
||||||
updater.run_update()
|
updater.run_update()
|
||||||
|
|
|
@ -89,12 +89,12 @@ def tracked_menu_yara():
|
||||||
@login_required
|
@login_required
|
||||||
@login_read_only
|
@login_read_only
|
||||||
def tracked_menu_typosquatting():
|
def tracked_menu_typosquatting():
|
||||||
filter_type = 'typosquatting'
|
tracker_type = 'typosquatting'
|
||||||
user_id = current_user.get_id()
|
user_id = current_user.get_id()
|
||||||
user_term = Term.get_all_user_tracked_terms(user_id, filter_type=filter_type)
|
user_trackers = Tracker.get_user_trackers_metadata(user_id, tracker_type=tracker_type)
|
||||||
global_term = Term.get_all_global_tracked_terms(filter_type=filter_type)
|
global_trackers = Tracker.get_global_trackers_metadata(tracker_type=tracker_type)
|
||||||
return render_template("trackersManagement.html", user_term=user_term, global_term=global_term, bootstrap_label=bootstrap_label, filter_type=filter_type)
|
return render_template("trackersManagement.html", user_trackers=user_trackers, global_trackers=global_trackers,
|
||||||
|
bootstrap_label=bootstrap_label, tracker_type=tracker_type)
|
||||||
|
|
||||||
@hunter.route("/tracker/add", methods=['GET', 'POST'])
|
@hunter.route("/tracker/add", methods=['GET', 'POST'])
|
||||||
@login_required
|
@login_required
|
||||||
|
@ -241,10 +241,9 @@ def show_tracker():
|
||||||
else:
|
else:
|
||||||
typo_squatting = None
|
typo_squatting = None
|
||||||
|
|
||||||
|
|
||||||
if date_from:
|
if date_from:
|
||||||
res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id)
|
res = Term.parse_get_tracker_term_item({'uuid': tracker_uuid, 'date_from': date_from, 'date_to': date_to}, user_id)
|
||||||
if res[1] !=200:
|
if res[1] != 200:
|
||||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||||
tracker_metadata['items'] = res[0]['items']
|
tracker_metadata['items'] = res[0]['items']
|
||||||
tracker_metadata['date_from'] = res[0]['date_from']
|
tracker_metadata['date_from'] = res[0]['date_from']
|
||||||
|
@ -257,9 +256,9 @@ def show_tracker():
|
||||||
tracker_metadata['sources'] = sorted(tracker_metadata['sources'])
|
tracker_metadata['sources'] = sorted(tracker_metadata['sources'])
|
||||||
|
|
||||||
return render_template("showTracker.html", tracker_metadata=tracker_metadata,
|
return render_template("showTracker.html", tracker_metadata=tracker_metadata,
|
||||||
yara_rule_content=yara_rule_content,
|
yara_rule_content=yara_rule_content,
|
||||||
typo_squatting=typo_squatting,
|
typo_squatting=typo_squatting,
|
||||||
bootstrap_label=bootstrap_label)
|
bootstrap_label=bootstrap_label)
|
||||||
|
|
||||||
@hunter.route("/tracker/update_tracker_description", methods=['POST'])
|
@hunter.route("/tracker/update_tracker_description", methods=['POST'])
|
||||||
@login_required
|
@login_required
|
||||||
|
|
Loading…
Reference in a new issue