mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 00:28:22 +00:00
chg: merge master -> crawler_manager
This commit is contained in:
commit
5960a8c694
37 changed files with 667 additions and 258 deletions
4
HOWTO.md
4
HOWTO.md
|
@ -109,11 +109,11 @@ There are two types of installation. You can install a *local* or a *remote* Spl
|
||||||
|
|
||||||
### Installation/Configuration
|
### Installation/Configuration
|
||||||
|
|
||||||
1. *(Splash host)* Launch ``crawler_hidden_services_install.sh`` to install all requirements (type ``y`` if a localhost splah server is used or use the ``-y`` option)
|
1. *(Splash host)* Launch ``crawler_hidden_services_install.sh`` to install all requirements (type ``y`` if a localhost splash server is used or use the ``-y`` option)
|
||||||
|
|
||||||
2. *(Splash host)* To install and setup your tor proxy:
|
2. *(Splash host)* To install and setup your tor proxy:
|
||||||
- Install the tor proxy: ``sudo apt-get install tor -y``
|
- Install the tor proxy: ``sudo apt-get install tor -y``
|
||||||
(Not required if ``Splah host == AIL host`` - The tor proxy is installed by default in AIL)
|
(Not required if ``Splash host == AIL host`` - The tor proxy is installed by default in AIL)
|
||||||
|
|
||||||
(Warning: Some v3 onion address are not resolved with the tor proxy provided via apt get. Use the tor proxy provided by [The torproject](https://2019.www.torproject.org/docs/debian) to solve this issue)
|
(Warning: Some v3 onion address are not resolved with the tor proxy provided via apt get. Use the tor proxy provided by [The torproject](https://2019.www.torproject.org/docs/debian) to solve this issue)
|
||||||
- Allow Tor to bind to any interface or to the docker interface (by default binds to 127.0.0.1 only) in ``/etc/tor/torrc``
|
- Allow Tor to bind to any interface or to the docker interface (by default binds to 127.0.0.1 only) in ``/etc/tor/torrc``
|
||||||
|
|
|
@ -82,10 +82,10 @@ Installation
|
||||||
Type these command lines for a fully automated installation and start AIL framework:
|
Type these command lines for a fully automated installation and start AIL framework:
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/ail-project/ail-framework.git
|
git clone https://github.com/ail-project/ail-framework.git
|
||||||
cd AIL-framework
|
cd ail-framework
|
||||||
./installing_deps.sh
|
./installing_deps.sh
|
||||||
|
|
||||||
cd ~/AIL-framework/
|
cd ~/ail-framework/
|
||||||
cd bin/
|
cd bin/
|
||||||
./LAUNCH.sh -l
|
./LAUNCH.sh -l
|
||||||
```
|
```
|
||||||
|
|
|
@ -200,8 +200,8 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Phone" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Phone.py; read x"
|
screen -S "Script_AIL" -X screen -t "Phone" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Phone.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Release" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Release.py; read x"
|
#screen -S "Script_AIL" -X screen -t "Release" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Release.py; read x"
|
||||||
sleep 0.1
|
#sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Cve" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Cve.py; read x"
|
screen -S "Script_AIL" -X screen -t "Cve" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Cve.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "WebStats" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./WebStats.py; read x"
|
screen -S "Script_AIL" -X screen -t "WebStats" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./WebStats.py; read x"
|
||||||
|
|
|
@ -20,8 +20,12 @@ from Helper import Process
|
||||||
from packages import Paste
|
from packages import Paste
|
||||||
import ailleakObject
|
import ailleakObject
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
||||||
|
import Tag
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
import item_basic
|
||||||
|
|
||||||
from pymisp import PyMISP
|
from pymisp import PyMISP
|
||||||
|
|
||||||
|
@ -54,15 +58,15 @@ from thehive4py.models import Case, CaseTask, CustomFieldHelper
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def create_the_hive_alert(source, path, tag):
|
def create_the_hive_alert(source, item_id, tag):
|
||||||
# # TODO: check items status (processed by all modules)
|
# # TODO: check items status (processed by all modules)
|
||||||
# # TODO: add item metadata: decoded content, link to auto crawled content, pgp correlation, cryptocurrency correlation...
|
# # TODO: add item metadata: decoded content, link to auto crawled content, pgp correlation, cryptocurrency correlation...
|
||||||
# # # TODO: description, add AIL link:show items ?
|
# # # TODO: description, add AIL link:show items ?
|
||||||
tags = list( r_serv_metadata.smembers('tag:{}'.format(path)) )
|
tags = list( r_serv_metadata.smembers('tag:{}'.format(item_id)) )
|
||||||
|
|
||||||
artifacts = [
|
artifacts = [
|
||||||
AlertArtifact( dataType='uuid-ail', data=r_serv_db.get('ail:uuid') ),
|
AlertArtifact( dataType='uuid-ail', data=r_serv_db.get('ail:uuid') ),
|
||||||
AlertArtifact( dataType='file', data=path, tags=tags )
|
AlertArtifact( dataType='file', data=item_basic.get_item_filepath(item_id), tags=tags )
|
||||||
]
|
]
|
||||||
|
|
||||||
# Prepare the sample Alert
|
# Prepare the sample Alert
|
||||||
|
@ -94,34 +98,30 @@ def create_the_hive_alert(source, path, tag):
|
||||||
def feeder(message, count=0):
|
def feeder(message, count=0):
|
||||||
|
|
||||||
if flag_the_hive or flag_misp:
|
if flag_the_hive or flag_misp:
|
||||||
tag, path = message.split(';')
|
tag, item_id = message.split(';')
|
||||||
|
|
||||||
## FIXME: remove it
|
## FIXME: remove it
|
||||||
if PASTES_FOLDER not in path:
|
if not item_basic.exist_item(item_id):
|
||||||
path = os.path.join(PASTES_FOLDER, path)
|
|
||||||
try:
|
|
||||||
paste = Paste.Paste(path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
if count < 10:
|
if count < 10:
|
||||||
r_serv_db.zincrby('mess_not_saved_export', message, 1)
|
r_serv_db.zincrby('mess_not_saved_export', message, 1)
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
r_serv_db.zrem('mess_not_saved_export', message)
|
r_serv_db.zrem('mess_not_saved_export', message)
|
||||||
print('Error: {} do not exist, tag= {}'.format(path, tag))
|
print('Error: {} do not exist, tag= {}'.format(item_id, tag))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
source = '/'.join(paste.p_path.split('/')[-6:])
|
source = item_basic.get_source(item_id)
|
||||||
|
|
||||||
if HiveApi != False:
|
if HiveApi != False:
|
||||||
if int(r_serv_db.get('hive:auto-alerts')) == 1:
|
if int(r_serv_db.get('hive:auto-alerts')) == 1:
|
||||||
whitelist_hive = r_serv_db.scard('whitelist_hive')
|
|
||||||
if r_serv_db.sismember('whitelist_hive', tag):
|
if r_serv_db.sismember('whitelist_hive', tag):
|
||||||
create_the_hive_alert(source, path, tag)
|
create_the_hive_alert(source, item_id, tag)
|
||||||
else:
|
else:
|
||||||
print('hive, auto alerts creation disable')
|
print('hive, auto alerts creation disable')
|
||||||
if flag_misp:
|
if flag_misp:
|
||||||
if int(r_serv_db.get('misp:auto-events')) == 1:
|
if int(r_serv_db.get('misp:auto-events')) == 1:
|
||||||
if r_serv_db.sismember('whitelist_misp', tag):
|
if r_serv_db.sismember('whitelist_misp', tag):
|
||||||
misp_wrapper.pushToMISP(uuid_ail, path, tag)
|
misp_wrapper.pushToMISP(uuid_ail, item_id, tag)
|
||||||
else:
|
else:
|
||||||
print('misp, auto events creation disable')
|
print('misp, auto events creation disable')
|
||||||
|
|
||||||
|
@ -161,15 +161,15 @@ if __name__ == "__main__":
|
||||||
print('Not connected to MISP')
|
print('Not connected to MISP')
|
||||||
|
|
||||||
if flag_misp:
|
if flag_misp:
|
||||||
try:
|
#try:
|
||||||
misp_wrapper = ailleakObject.ObjectWrapper(pymisp)
|
misp_wrapper = ailleakObject.ObjectWrapper(pymisp)
|
||||||
r_serv_db.set('ail:misp', True)
|
r_serv_db.set('ail:misp', True)
|
||||||
print('Connected to MISP:', misp_url)
|
print('Connected to MISP:', misp_url)
|
||||||
except Exception as e:
|
#except Exception as e:
|
||||||
flag_misp = False
|
# flag_misp = False
|
||||||
r_serv_db.set('ail:misp', False)
|
# r_serv_db.set('ail:misp', False)
|
||||||
print(e)
|
# print(e)
|
||||||
print('Not connected to MISP')
|
# print('Not connected to MISP')
|
||||||
|
|
||||||
# create The HIVE connection
|
# create The HIVE connection
|
||||||
if flag_the_hive:
|
if flag_the_hive:
|
||||||
|
|
|
@ -23,7 +23,7 @@ config_loader = ConfigLoader.ConfigLoader()
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
publisher.channel = "Script"
|
publisher.channel = "Script"
|
||||||
|
|
||||||
def sendEmailNotification(recipient, alert_name, content):
|
def sendEmailNotification(recipient, mail_subject, mail_body):
|
||||||
|
|
||||||
sender = config_loader.get_config_str("Notifications", "sender")
|
sender = config_loader.get_config_str("Notifications", "sender")
|
||||||
sender_user = config_loader.get_config_str("Notifications", "sender_user")
|
sender_user = config_loader.get_config_str("Notifications", "sender_user")
|
||||||
|
@ -60,14 +60,13 @@ def sendEmailNotification(recipient, alert_name, content):
|
||||||
mime_msg = MIMEMultipart()
|
mime_msg = MIMEMultipart()
|
||||||
mime_msg['From'] = sender
|
mime_msg['From'] = sender
|
||||||
mime_msg['To'] = recipient
|
mime_msg['To'] = recipient
|
||||||
mime_msg['Subject'] = "AIL Framework " + alert_name + " Alert"
|
mime_msg['Subject'] = mail_subject
|
||||||
|
|
||||||
body = content
|
mime_msg.attach(MIMEText(mail_body, 'plain'))
|
||||||
mime_msg.attach(MIMEText(body, 'plain'))
|
|
||||||
|
|
||||||
smtp_server.sendmail(sender, recipient, mime_msg.as_string())
|
smtp_server.sendmail(sender, recipient, mime_msg.as_string())
|
||||||
smtp_server.quit()
|
smtp_server.quit()
|
||||||
print('Send notification ' + alert_name + ' to '+recipient)
|
print('Send notification: ' + mail_subject + ' to '+recipient)
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
traceback.print_tb(err.__traceback__)
|
traceback.print_tb(err.__traceback__)
|
||||||
|
|
|
@ -10,7 +10,6 @@ import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import signal
|
|
||||||
|
|
||||||
from Helper import Process
|
from Helper import Process
|
||||||
from pubsublogger import publisher
|
from pubsublogger import publisher
|
||||||
|
@ -20,18 +19,16 @@ import NotificationHelper
|
||||||
from packages import Item
|
from packages import Item
|
||||||
from packages import Term
|
from packages import Term
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
|
import Tracker
|
||||||
|
import regex_helper
|
||||||
|
|
||||||
full_item_url = "/showsavedpaste/?paste="
|
full_item_url = "/showsavedpaste/?paste="
|
||||||
mail_body_template = "AIL Framework,\nNew occurrence for term tracked regex: {}\nitem id: {}\nurl: {}{}"
|
mail_body_template = "AIL Framework,\nNew occurrence for term tracked regex: {}\nitem id: {}\nurl: {}{}"
|
||||||
|
|
||||||
dict_regex_tracked = Term.get_regex_tracked_words_dict()
|
dict_regex_tracked = Term.get_regex_tracked_words_dict()
|
||||||
last_refresh = time.time()
|
last_refresh = time.time()
|
||||||
|
|
||||||
class TimeoutException(Exception):
|
|
||||||
pass
|
|
||||||
def timeout_handler(signum, frame):
|
|
||||||
raise TimeoutException
|
|
||||||
signal.signal(signal.SIGALRM, timeout_handler)
|
|
||||||
|
|
||||||
def new_term_found(term, term_type, item_id, item_date):
|
def new_term_found(term, term_type, item_id, item_date):
|
||||||
uuid_list = Term.get_term_uuid_list(term, 'regex')
|
uuid_list = Term.get_term_uuid_list(term, 'regex')
|
||||||
print('new tracked term found: {} in {}'.format(term, item_id))
|
print('new tracked term found: {} in {}'.format(term, item_id))
|
||||||
|
@ -46,9 +43,10 @@ def new_term_found(term, term_type, item_id, item_date):
|
||||||
|
|
||||||
mail_to_notify = Term.get_term_mails(term_uuid)
|
mail_to_notify = Term.get_term_mails(term_uuid)
|
||||||
if mail_to_notify:
|
if mail_to_notify:
|
||||||
|
mail_subject = Tracker.get_email_subject(term_uuid)
|
||||||
mail_body = mail_body_template.format(term, item_id, full_item_url, item_id)
|
mail_body = mail_body_template.format(term, item_id, full_item_url, item_id)
|
||||||
for mail in mail_to_notify:
|
for mail in mail_to_notify:
|
||||||
NotificationHelper.sendEmailNotification(mail, 'Term Tracker', mail_body)
|
NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
publisher.port = 6380
|
publisher.port = 6380
|
||||||
|
@ -56,11 +54,14 @@ if __name__ == "__main__":
|
||||||
publisher.info("Script RegexTracker started")
|
publisher.info("Script RegexTracker started")
|
||||||
|
|
||||||
config_section = 'RegexTracker'
|
config_section = 'RegexTracker'
|
||||||
|
module_name = "RegexTracker"
|
||||||
p = Process(config_section)
|
p = Process(config_section)
|
||||||
max_execution_time = p.config.getint(config_section, "max_execution_time")
|
max_execution_time = p.config.getint(config_section, "max_execution_time")
|
||||||
|
|
||||||
ull_item_url = p.config.get("Notifications", "ail_domain") + full_item_url
|
ull_item_url = p.config.get("Notifications", "ail_domain") + full_item_url
|
||||||
|
|
||||||
|
redis_cache_key = regex_helper.generate_redis_cache_key(module_name)
|
||||||
|
|
||||||
# Regex Frequency
|
# Regex Frequency
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
|
@ -72,20 +73,10 @@ if __name__ == "__main__":
|
||||||
item_content = Item.get_item_content(item_id)
|
item_content = Item.get_item_content(item_id)
|
||||||
|
|
||||||
for regex in dict_regex_tracked:
|
for regex in dict_regex_tracked:
|
||||||
|
matched = regex_helper.regex_search(module_name, redis_cache_key, dict_regex_tracked[regex], item_id, item_content, max_time=max_execution_time)
|
||||||
signal.alarm(max_execution_time)
|
|
||||||
try:
|
|
||||||
matched = dict_regex_tracked[regex].search(item_content)
|
|
||||||
except TimeoutException:
|
|
||||||
print ("{0} processing timeout".format(item_id))
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
signal.alarm(0)
|
|
||||||
|
|
||||||
if matched:
|
if matched:
|
||||||
new_term_found(regex, 'regex', item_id, item_date)
|
new_term_found(regex, 'regex', item_id, item_date)
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
|
|
|
@ -45,4 +45,5 @@ if __name__ == '__main__':
|
||||||
tag, item_id = message.split(';')
|
tag, item_id = message.split(';')
|
||||||
|
|
||||||
Tag.add_tag("item", tag, item_id)
|
Tag.add_tag("item", tag, item_id)
|
||||||
|
|
||||||
p.populate_set_out(message, 'MISP_The_Hive_feeder')
|
p.populate_set_out(message, 'MISP_The_Hive_feeder')
|
||||||
|
|
|
@ -18,6 +18,8 @@ import NotificationHelper
|
||||||
from packages import Item
|
from packages import Item
|
||||||
from packages import Term
|
from packages import Term
|
||||||
|
|
||||||
|
from lib import Tracker
|
||||||
|
|
||||||
full_item_url = "/showsavedpaste/?paste="
|
full_item_url = "/showsavedpaste/?paste="
|
||||||
|
|
||||||
mail_body_template = "AIL Framework,\nNew occurrence for term tracked term: {}\nitem id: {}\nurl: {}{}"
|
mail_body_template = "AIL Framework,\nNew occurrence for term tracked term: {}\nitem id: {}\nurl: {}{}"
|
||||||
|
@ -48,9 +50,10 @@ def new_term_found(term, term_type, item_id, item_date):
|
||||||
|
|
||||||
mail_to_notify = Term.get_term_mails(term_uuid)
|
mail_to_notify = Term.get_term_mails(term_uuid)
|
||||||
if mail_to_notify:
|
if mail_to_notify:
|
||||||
|
mail_subject = Tracker.get_email_subject(term_uuid)
|
||||||
mail_body = mail_body_template.format(term, item_id, full_item_url, item_id)
|
mail_body = mail_body_template.format(term, item_id, full_item_url, item_id)
|
||||||
for mail in mail_to_notify:
|
for mail in mail_to_notify:
|
||||||
NotificationHelper.sendEmailNotification(mail, 'Term Tracker', mail_body)
|
NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -4,7 +4,10 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from pymisp import MISPEvent, MISPObject
|
||||||
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
|
from pymisp.tools.abstractgenerator import AbstractMISPObjectGenerator
|
||||||
|
MISPEvent
|
||||||
|
|
||||||
from packages import Paste
|
from packages import Paste
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
@ -12,28 +15,10 @@ from io import BytesIO
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
import item_basic
|
||||||
|
|
||||||
class AilLeakObject(AbstractMISPObjectGenerator):
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'export'))
|
||||||
def __init__(self, uuid_ail, p_source, p_date, p_content, p_duplicate, p_duplicate_number):
|
import MispExport
|
||||||
super(AbstractMISPObjectGenerator, self).__init__('ail-leak')
|
|
||||||
self._uuid = uuid_ail
|
|
||||||
self._p_source = p_source
|
|
||||||
self._p_date = p_date
|
|
||||||
self._p_content = p_content
|
|
||||||
self._p_duplicate = p_duplicate
|
|
||||||
self._p_duplicate_number = p_duplicate_number
|
|
||||||
self.generate_attributes()
|
|
||||||
|
|
||||||
def generate_attributes(self):
|
|
||||||
self.add_attribute('origin', value=self._p_source, type='text')
|
|
||||||
self.add_attribute('last-seen', value=self._p_date, type='datetime')
|
|
||||||
if self._p_duplicate_number > 0:
|
|
||||||
self.add_attribute('duplicate', value=self._p_duplicate, type='text')
|
|
||||||
self.add_attribute('duplicate_number', value=self._p_duplicate_number, type='counter')
|
|
||||||
self._pseudofile = BytesIO(self._p_content.encode())
|
|
||||||
res = self.add_attribute('raw-data', value=self._p_source, data=self._pseudofile, type="attachment")# , ShadowAttribute=self.p_tag)
|
|
||||||
#res.add_shadow_attributes(tag)
|
|
||||||
self.add_attribute('sensor', value=self._uuid, type="text")
|
|
||||||
|
|
||||||
class ObjectWrapper:
|
class ObjectWrapper:
|
||||||
def __init__(self, pymisp):
|
def __init__(self, pymisp):
|
||||||
|
@ -45,53 +30,48 @@ class ObjectWrapper:
|
||||||
config_loader = None
|
config_loader = None
|
||||||
self.attribute_to_tag = None
|
self.attribute_to_tag = None
|
||||||
|
|
||||||
def add_new_object(self, uuid_ail, path, p_source, tag):
|
def add_new_object(self, uuid_ail, item_id, tag):
|
||||||
self.uuid_ail = uuid_ail
|
self.uuid_ail = uuid_ail
|
||||||
self.path = path
|
|
||||||
self.p_source = p_source
|
|
||||||
self.paste = Paste.Paste(path)
|
|
||||||
self.p_date = self.date_to_str(self.paste.p_date)
|
|
||||||
self.p_content = self.paste.get_p_content()
|
|
||||||
self.p_tag = tag
|
|
||||||
|
|
||||||
temp = self.paste._get_p_duplicate()
|
# self.paste = Paste.Paste(path)
|
||||||
|
# temp = self.paste._get_p_duplicate()
|
||||||
|
#
|
||||||
|
# #beautifier
|
||||||
|
# if not temp:
|
||||||
|
# temp = ''
|
||||||
|
#
|
||||||
|
# p_duplicate_number = len(temp) if len(temp) >= 0 else 0
|
||||||
|
#
|
||||||
|
# to_ret = ""
|
||||||
|
# for dup in temp[:10]:
|
||||||
|
# dup = dup.replace('\'','\"').replace('(','[').replace(')',']')
|
||||||
|
# dup = json.loads(dup)
|
||||||
|
# algo = dup[0]
|
||||||
|
# path = dup[1].split('/')[-6:]
|
||||||
|
# path = '/'.join(path)[:-3] # -3 removes .gz
|
||||||
|
# if algo == 'tlsh':
|
||||||
|
# perc = 100 - int(dup[2])
|
||||||
|
# else:
|
||||||
|
# perc = dup[2]
|
||||||
|
# to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
|
||||||
|
# p_duplicate = to_ret
|
||||||
|
|
||||||
#beautifier
|
return MispExport.export_ail_item(item_id, [tag])
|
||||||
if not temp:
|
|
||||||
temp = ''
|
|
||||||
|
|
||||||
p_duplicate_number = len(temp) if len(temp) >= 0 else 0
|
|
||||||
|
|
||||||
to_ret = ""
|
|
||||||
for dup in temp[:10]:
|
|
||||||
dup = dup.replace('\'','\"').replace('(','[').replace(')',']')
|
|
||||||
dup = json.loads(dup)
|
|
||||||
algo = dup[0]
|
|
||||||
path = dup[1].split('/')[-6:]
|
|
||||||
path = '/'.join(path)[:-3] # -3 removes .gz
|
|
||||||
if algo == 'tlsh':
|
|
||||||
perc = 100 - int(dup[2])
|
|
||||||
else:
|
|
||||||
perc = dup[2]
|
|
||||||
to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
|
|
||||||
p_duplicate = to_ret
|
|
||||||
|
|
||||||
self.mispObject = AilLeakObject(self.uuid_ail, self.p_source, self.p_date, self.p_content, p_duplicate, p_duplicate_number)
|
|
||||||
|
|
||||||
def date_to_str(self, date):
|
def date_to_str(self, date):
|
||||||
return "{0}-{1}-{2}".format(date.year, date.month, date.day)
|
return "{0}-{1}-{2}".format(date.year, date.month, date.day)
|
||||||
|
|
||||||
def get_all_related_events(self):
|
def get_all_related_events(self, to_search):
|
||||||
to_search = "Daily AIL-leaks"
|
result = self.pymisp.search(controller='events', eventinfo=to_search, metadata=False)
|
||||||
result = self.pymisp.search_all(to_search)
|
|
||||||
events = []
|
events = []
|
||||||
for e in result['response']:
|
if result:
|
||||||
events.append({'id': e['Event']['id'], 'org_id': e['Event']['org_id'], 'info': e['Event']['info']})
|
for e in result:
|
||||||
|
events.append({'id': e['Event']['id'], 'org_id': e['Event']['org_id'], 'info': e['Event']['info']})
|
||||||
return events
|
return events
|
||||||
|
|
||||||
def get_daily_event_id(self):
|
def get_daily_event_id(self):
|
||||||
to_match = "Daily AIL-leaks {}".format(datetime.date.today())
|
to_match = "Daily AIL-leaks {}".format(datetime.date.today())
|
||||||
events = self.get_all_related_events()
|
events = self.get_all_related_events(to_match)
|
||||||
for dic in events:
|
for dic in events:
|
||||||
info = dic['info']
|
info = dic['info']
|
||||||
e_id = dic['id']
|
e_id = dic['id']
|
||||||
|
@ -99,8 +79,8 @@ class ObjectWrapper:
|
||||||
print('Found: ', info, '->', e_id)
|
print('Found: ', info, '->', e_id)
|
||||||
self.currentID_date = datetime.date.today()
|
self.currentID_date = datetime.date.today()
|
||||||
return e_id
|
return e_id
|
||||||
created_event = self.create_daily_event()['Event']
|
created_event = self.create_daily_event()
|
||||||
new_id = created_event['id']
|
new_id = created_event['Event']['id']
|
||||||
print('New event created:', new_id)
|
print('New event created:', new_id)
|
||||||
self.currentID_date = datetime.date.today()
|
self.currentID_date = datetime.date.today()
|
||||||
return new_id
|
return new_id
|
||||||
|
@ -120,17 +100,20 @@ class ObjectWrapper:
|
||||||
orgc_id = None
|
orgc_id = None
|
||||||
sharing_group_id = None
|
sharing_group_id = None
|
||||||
date = None
|
date = None
|
||||||
event = self.pymisp.new_event(distribution, threat,
|
|
||||||
analysis, info, date,
|
event = MISPEvent()
|
||||||
published, orgc_id, org_id, sharing_group_id)
|
event.distribution = distribution
|
||||||
eventUuid = event['Event']['uuid']
|
event.info = info
|
||||||
self.pymisp.tag(eventUuid, 'infoleak:output-format="ail-daily"')
|
event.analysis = analysis
|
||||||
return event
|
event.threat = threat
|
||||||
|
event.published = published
|
||||||
|
|
||||||
|
event.add_tag('infoleak:output-format="ail-daily"')
|
||||||
|
existing_event = self.pymisp.add_event(event)
|
||||||
|
return existing_event
|
||||||
|
|
||||||
# Publish object to MISP
|
# Publish object to MISP
|
||||||
def pushToMISP(self, uuid_ail, path, tag):
|
def pushToMISP(self, uuid_ail, item_id, tag):
|
||||||
self._p_source = path.split('/')[-5:]
|
|
||||||
self._p_source = '/'.join(self._p_source)[:-3]
|
|
||||||
|
|
||||||
if self.currentID_date != datetime.date.today(): #refresh id
|
if self.currentID_date != datetime.date.today(): #refresh id
|
||||||
self.eventID_to_push = self.get_daily_event_id()
|
self.eventID_to_push = self.get_daily_event_id()
|
||||||
|
@ -138,42 +121,37 @@ class ObjectWrapper:
|
||||||
mispTYPE = 'ail-leak'
|
mispTYPE = 'ail-leak'
|
||||||
|
|
||||||
# paste object already exist
|
# paste object already exist
|
||||||
if self.paste_object_exist(self.eventID_to_push, self._p_source):
|
if self.paste_object_exist(self.eventID_to_push, item_id):
|
||||||
# add new tag
|
# add new tag
|
||||||
self.tag(self.attribute_to_tag, tag)
|
self.tag(self.attribute_to_tag, tag)
|
||||||
print(self._p_source + ' tagged: ' + tag)
|
print(item_id + ' tagged: ' + tag)
|
||||||
#create object
|
#create object
|
||||||
else:
|
else:
|
||||||
self.add_new_object(uuid_ail, path, self._p_source, tag)
|
misp_obj = self.add_new_object(uuid_ail, item_id, tag)
|
||||||
|
|
||||||
|
# deprecated
|
||||||
|
# try:
|
||||||
|
# templateID = [x['ObjectTemplate']['id'] for x in self.pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == mispTYPE][0]
|
||||||
|
# except IndexError:
|
||||||
|
# valid_types = ", ".join([x['ObjectTemplate']['name'] for x in self.pymisp.get_object_templates_list()])
|
||||||
|
# print ("Template for type %s not found! Valid types are: %s" % (mispTYPE, valid_types))
|
||||||
|
|
||||||
|
|
||||||
try:
|
r = self.pymisp.add_object(self.eventID_to_push, misp_obj, pythonify=True)
|
||||||
templateID = [x['ObjectTemplate']['id'] for x in self.pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == mispTYPE][0]
|
|
||||||
except IndexError:
|
|
||||||
valid_types = ", ".join([x['ObjectTemplate']['name'] for x in self.pymisp.get_object_templates_list()])
|
|
||||||
print ("Template for type %s not found! Valid types are: %s" % (mispTYPE, valid_types))
|
|
||||||
r = self.pymisp.add_object(self.eventID_to_push, templateID, self.mispObject)
|
|
||||||
if 'errors' in r:
|
if 'errors' in r:
|
||||||
print(r)
|
print(r)
|
||||||
else:
|
else:
|
||||||
# tag new object
|
print('Pushed:', tag, '->', item_id)
|
||||||
self.set_attribute_to_tag_uuid(self.eventID_to_push, self._p_source)
|
|
||||||
self.tag(self.attribute_to_tag, tag)
|
|
||||||
print('Pushed:', tag, '->', self._p_source)
|
|
||||||
|
|
||||||
def paste_object_exist(self, eventId, source):
|
def paste_object_exist(self, eventId, item_id):
|
||||||
res = self.pymisp.search(controller='attributes', eventid=eventId, values=source)
|
res = self.pymisp.search(controller='attributes', eventid=eventId, value=item_id)
|
||||||
# object already exist
|
# object already exist
|
||||||
if res['response']:
|
if res.get('Attribute', []):
|
||||||
self.attribute_to_tag = res['response']['Attribute'][0]['uuid']
|
self.attribute_to_tag = res['Attribute'][0]['uuid']
|
||||||
return True
|
return True
|
||||||
# new object
|
# new object
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def set_attribute_to_tag_uuid(self, eventId, source):
|
|
||||||
res = self.pymisp.search(controller='attributes', eventid=eventId, values=source)
|
|
||||||
self.attribute_to_tag = res['response']['Attribute'][0]['uuid']
|
|
||||||
|
|
||||||
def tag(self, uuid, tag):
|
def tag(self, uuid, tag):
|
||||||
self.pymisp.tag(uuid, tag)
|
self.pymisp.tag(uuid, tag)
|
||||||
|
|
46
bin/export/Export.py
Executable file
46
bin/export/Export.py
Executable file
|
@ -0,0 +1,46 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import redis
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
|
import ConfigLoader
|
||||||
|
|
||||||
|
## LOAD CONFIG ##
|
||||||
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
|
r_serv_cache = config_loader.get_redis_conn("Redis_Cache")
|
||||||
|
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
|
||||||
|
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
||||||
|
config_loader = None
|
||||||
|
## -- ##
|
||||||
|
|
||||||
|
def get_ail_uuid():
|
||||||
|
uuid_ail = r_serv_db.get('ail:uuid')
|
||||||
|
if uuid_ail is None:
|
||||||
|
uuid_ail = str(uuid4())
|
||||||
|
r_serv_db.set('ail:uuid', uuid_ail)
|
||||||
|
return uuid_ail
|
||||||
|
|
||||||
|
def load_tags_to_export_in_cache():
|
||||||
|
all_exports = ['misp', 'thehive']
|
||||||
|
for export_target in all_exports:
|
||||||
|
# save solo tags in cache
|
||||||
|
all_tags_to_export = Tag.get_list_of_solo_tags_to_export_by_type()
|
||||||
|
if len(all_tags_to_export) > 1:
|
||||||
|
r_serv_cache.sadd('to_export:solo_tags:{}'.format(export_target), *all_tags_to_export)
|
||||||
|
elif all_tags_to_export:
|
||||||
|
r_serv_cache.sadd('to_export:solo_tags:{}'.format(export_target), all_tags_to_export[0])
|
||||||
|
|
||||||
|
# save combinaison of tags in cache
|
||||||
|
pass
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
# # set default
|
||||||
|
# if r_serv_db.get('hive:auto-alerts') is None:
|
||||||
|
# r_serv_db.set('hive:auto-alerts', 0)
|
||||||
|
#
|
||||||
|
# if r_serv_db.get('misp:auto-events') is None:
|
||||||
|
# r_serv_db.set('misp:auto-events', 0)
|
|
@ -7,6 +7,7 @@ import sys
|
||||||
import uuid
|
import uuid
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'export'))
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
||||||
import Item
|
import Item
|
||||||
|
@ -19,6 +20,7 @@ import Screenshot
|
||||||
import Correlate_object
|
import Correlate_object
|
||||||
|
|
||||||
import AILObjects
|
import AILObjects
|
||||||
|
import Export
|
||||||
|
|
||||||
# # TODO: # FIXME: REFRACTOR ME => use UI/Global config
|
# # TODO: # FIXME: REFRACTOR ME => use UI/Global config
|
||||||
sys.path.append('../../configs/keys')
|
sys.path.append('../../configs/keys')
|
||||||
|
@ -59,8 +61,12 @@ def tag_misp_object_attributes(l_ref_obj_attr, tags):
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
obj_attr.add_tag(tag)
|
obj_attr.add_tag(tag)
|
||||||
|
|
||||||
def export_ail_item(item_id):
|
def export_ail_item(item_id, tags=[]):
|
||||||
dict_metadata = Item.get_item({'id': item_id, 'date':True, 'tags':True, 'raw_content':True})[0]
|
dict_metadata = Item.get_item({'id': item_id, 'date':True, 'tags':True, 'raw_content':True})[0]
|
||||||
|
# force tags
|
||||||
|
for tag in tags:
|
||||||
|
if tag not in dict_metadata['tags']:
|
||||||
|
dict_metadata['tags'].append(tag)
|
||||||
|
|
||||||
#obj = MISPObject('ail-item', standalone=True)
|
#obj = MISPObject('ail-item', standalone=True)
|
||||||
obj = MISPObject('ail-leak', standalone=True)
|
obj = MISPObject('ail-leak', standalone=True)
|
||||||
|
@ -69,6 +75,7 @@ def export_ail_item(item_id):
|
||||||
l_obj_attr = []
|
l_obj_attr = []
|
||||||
l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['date']) )
|
l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['date']) )
|
||||||
l_obj_attr.append( obj.add_attribute('raw-data', value=item_id, data=dict_metadata['raw_content']) )
|
l_obj_attr.append( obj.add_attribute('raw-data', value=item_id, data=dict_metadata['raw_content']) )
|
||||||
|
l_obj_attr.append( obj.add_attribute('sensor', value=Export.get_ail_uuid()) )
|
||||||
|
|
||||||
# add tags
|
# add tags
|
||||||
if dict_metadata['tags']:
|
if dict_metadata['tags']:
|
||||||
|
|
|
@ -57,14 +57,20 @@ topic = '102'
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
time.sleep(base_sleeptime + sleep_inc)
|
time.sleep(base_sleeptime + sleep_inc)
|
||||||
paste = r.lpop("pastes")
|
item_id = r.lpop("pastes")
|
||||||
if paste is None:
|
if item_id is None:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
print(paste)
|
print(item_id)
|
||||||
with open(pystemonpath+paste, 'rb') as f: #.read()
|
full_item_path = os.path.join(pystemonpath, item_id)
|
||||||
|
if not os.path.isfile(full_item_path):
|
||||||
|
print('Error: {}, file not found'.format(full_item_path))
|
||||||
|
sleep_inc = 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
with open(full_item_path, 'rb') as f: #.read()
|
||||||
messagedata = f.read()
|
messagedata = f.read()
|
||||||
path_to_send = os.path.join(pastes_directory,paste)
|
path_to_send = os.path.join(pastes_directory, item_id)
|
||||||
|
|
||||||
s = b' '.join( [ topic.encode(), path_to_send.encode(), base64.b64encode(messagedata) ] )
|
s = b' '.join( [ topic.encode(), path_to_send.encode(), base64.b64encode(messagedata) ] )
|
||||||
socket.send(s)
|
socket.send(s)
|
||||||
|
|
|
@ -14,6 +14,7 @@ import datetime
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||||
import item_basic
|
import item_basic
|
||||||
|
import Username
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'import', 'ail_json_importer'))
|
||||||
from Default_json import Default_json
|
from Default_json import Default_json
|
||||||
|
@ -34,11 +35,13 @@ class Ail_feeder_twitter(Default_json):
|
||||||
item_id = str(self.json_item['meta']['twitter:tweet_id'])
|
item_id = str(self.json_item['meta']['twitter:tweet_id'])
|
||||||
return os.path.join('twitter', item_date, item_id) + '.gz'
|
return os.path.join('twitter', item_date, item_id) + '.gz'
|
||||||
|
|
||||||
# # TODO:
|
|
||||||
def process_json_meta(self, process, item_id):
|
def process_json_meta(self, process, item_id):
|
||||||
'''
|
'''
|
||||||
Process JSON meta filed.
|
Process JSON meta filed.
|
||||||
'''
|
'''
|
||||||
twitter_id = str(self.json_item['meta']['twitter:tweet_id'])
|
twitter_id = str(self.json_item['meta']['twitter:tweet_id'])
|
||||||
item_basic.add_map_obj_id_item_id(twitter_id, item_id, 'twitter_id')
|
item_basic.add_map_obj_id_item_id(twitter_id, item_id, 'twitter_id')
|
||||||
|
username = str(self.json_item['meta']['twitter:id'])
|
||||||
|
item_date = item_basic.get_item_date(item_id)
|
||||||
|
Username.save_item_correlation('twitter', username, item_id, item_date)
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -13,7 +13,7 @@ import ConfigLoader
|
||||||
import Decoded
|
import Decoded
|
||||||
import Domain
|
import Domain
|
||||||
import Screenshot
|
import Screenshot
|
||||||
import telegram
|
import Username
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||||
import Pgp
|
import Pgp
|
||||||
|
@ -36,7 +36,7 @@ def is_valid_object_subtype(object_type, object_subtype):
|
||||||
elif object_type == 'cryptocurrency':
|
elif object_type == 'cryptocurrency':
|
||||||
return Cryptocurrency.cryptocurrency.is_valid_obj_subtype(object_subtype)
|
return Cryptocurrency.cryptocurrency.is_valid_obj_subtype(object_subtype)
|
||||||
elif object_type == 'username':
|
elif object_type == 'username':
|
||||||
return telegram.correlation.is_valid_obj_subtype(object_subtype)
|
return Username.correlation.is_valid_obj_subtype(object_subtype)
|
||||||
elif object_subtype == None:
|
elif object_subtype == None:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -69,7 +69,7 @@ def exist_object(object_type, correlation_id, type_id=None): # => work on object
|
||||||
elif object_type == 'cryptocurrency':
|
elif object_type == 'cryptocurrency':
|
||||||
return Cryptocurrency.cryptocurrency.exist_correlation(type_id, correlation_id)
|
return Cryptocurrency.cryptocurrency.exist_correlation(type_id, correlation_id)
|
||||||
elif object_type == 'username':
|
elif object_type == 'username':
|
||||||
return telegram.correlation.exist_correlation(type_id, correlation_id)
|
return Username.correlation.exist_correlation(type_id, correlation_id)
|
||||||
elif object_type == 'screenshot' or object_type == 'image':
|
elif object_type == 'screenshot' or object_type == 'image':
|
||||||
return Screenshot.exist_screenshot(correlation_id)
|
return Screenshot.exist_screenshot(correlation_id)
|
||||||
else:
|
else:
|
||||||
|
@ -88,7 +88,7 @@ def get_object_metadata(object_type, correlation_id, type_id=None):
|
||||||
elif object_type == 'cryptocurrency':
|
elif object_type == 'cryptocurrency':
|
||||||
return Cryptocurrency.cryptocurrency.get_metadata(type_id, correlation_id)
|
return Cryptocurrency.cryptocurrency.get_metadata(type_id, correlation_id)
|
||||||
elif object_type == 'username':
|
elif object_type == 'username':
|
||||||
return telegram.correlation.get_metadata(type_id, correlation_id)
|
return Username.correlation.get_metadata(type_id, correlation_id)
|
||||||
elif object_type == 'screenshot' or object_type == 'image':
|
elif object_type == 'screenshot' or object_type == 'image':
|
||||||
return Screenshot.get_metadata(correlation_id)
|
return Screenshot.get_metadata(correlation_id)
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ def get_object_correlation(object_type, value, correlation_names=None, correlati
|
||||||
elif object_type == 'cryptocurrency':
|
elif object_type == 'cryptocurrency':
|
||||||
return Cryptocurrency.cryptocurrency.get_correlation_all_object(requested_correl_type, value, correlation_objects=correlation_objects)
|
return Cryptocurrency.cryptocurrency.get_correlation_all_object(requested_correl_type, value, correlation_objects=correlation_objects)
|
||||||
elif object_type == 'username':
|
elif object_type == 'username':
|
||||||
return telegram.correlation.get_correlation_all_object(requested_correl_type, value, correlation_objects=correlation_objects)
|
return Username.correlation.get_correlation_all_object(requested_correl_type, value, correlation_objects=correlation_objects)
|
||||||
elif object_type == 'screenshot' or object_type == 'image':
|
elif object_type == 'screenshot' or object_type == 'image':
|
||||||
return Screenshot.get_screenshot_correlated_object(value, correlation_objects=correlation_objects)
|
return Screenshot.get_screenshot_correlated_object(value, correlation_objects=correlation_objects)
|
||||||
return {}
|
return {}
|
||||||
|
@ -157,6 +157,9 @@ def get_correlation_node_icon(correlation_name, correlation_type=None, value=Non
|
||||||
if correlation_type == 'telegram':
|
if correlation_type == 'telegram':
|
||||||
icon_class = 'fab'
|
icon_class = 'fab'
|
||||||
icon_text = '\uf2c6'
|
icon_text = '\uf2c6'
|
||||||
|
elif correlation_type == 'twitter':
|
||||||
|
icon_class = 'fab'
|
||||||
|
icon_text = '\uf099'
|
||||||
else:
|
else:
|
||||||
icon_text = '\uf007'
|
icon_text = '\uf007'
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
import Correlate_object
|
import Correlate_object
|
||||||
import Screenshot
|
import Screenshot
|
||||||
import telegram
|
import Username
|
||||||
|
|
||||||
config_loader = ConfigLoader.ConfigLoader()
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
|
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
|
||||||
|
@ -564,7 +564,7 @@ def get_domain_username(domain, currencies_type=None, get_nb=False):
|
||||||
:param currencies_type: list of pgp type
|
:param currencies_type: list of pgp type
|
||||||
:type currencies_type: list, optional
|
:type currencies_type: list, optional
|
||||||
'''
|
'''
|
||||||
return telegram.correlation.get_domain_correlation_dict(domain, correlation_type=currencies_type, get_nb=get_nb)
|
return Username.correlation.get_domain_correlation_dict(domain, correlation_type=currencies_type, get_nb=get_nb)
|
||||||
|
|
||||||
def get_domain_decoded(domain):
|
def get_domain_decoded(domain):
|
||||||
'''
|
'''
|
||||||
|
|
25
bin/lib/Tracker.py
Executable file
25
bin/lib/Tracker.py
Executable file
|
@ -0,0 +1,25 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import redis
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
|
import ConfigLoader
|
||||||
|
#import item_basic
|
||||||
|
|
||||||
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
|
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
|
def get_tracker_description(tracker_uuid):
|
||||||
|
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'description')
|
||||||
|
|
||||||
|
def get_email_subject(tracker_uuid):
|
||||||
|
tracker_description = get_tracker_description(tracker_uuid)
|
||||||
|
if not tracker_description:
|
||||||
|
return "AIL framework: Tracker Alert"
|
||||||
|
else:
|
||||||
|
return 'AIL framework: {}'.format(tracker_description)
|
21
bin/lib/Username.py
Executable file
21
bin/lib/Username.py
Executable file
|
@ -0,0 +1,21 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import redis
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
|
import ConfigLoader
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||||
|
import Correlation
|
||||||
|
|
||||||
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
|
r_serv_crawler = config_loader.get_redis_conn("ARDB_Onion")
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
|
correlation = Correlation.Correlation('username', ['telegram', 'twitter'])
|
||||||
|
|
||||||
|
def save_item_correlation(subtype, username, item_id, item_date):
|
||||||
|
correlation.save_item_correlation(subtype, username, item_id, item_date)
|
29
bin/lib/domain_basic.py
Executable file
29
bin/lib/domain_basic.py
Executable file
|
@ -0,0 +1,29 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
``basic domain lib``
|
||||||
|
===================
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import redis
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
|
import ConfigLoader
|
||||||
|
|
||||||
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
|
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
|
def get_domain_type(domain):
|
||||||
|
if str(domain).endswith('.onion'):
|
||||||
|
return 'onion'
|
||||||
|
else:
|
||||||
|
return 'regular'
|
||||||
|
|
||||||
|
def delete_domain_item_core(item_id, domain, port):
|
||||||
|
domain_type = get_domain_type(domain)
|
||||||
|
r_serv_onion.zrem('crawler_history_{}:{}:{}'.format(domain_type, domain, port), item_id)
|
103
bin/lib/index_whoosh.py
Executable file
103
bin/lib/index_whoosh.py
Executable file
|
@ -0,0 +1,103 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import redis
|
||||||
|
|
||||||
|
from shutil import rmtree
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
|
import ConfigLoader
|
||||||
|
|
||||||
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
|
INDEX_PATH = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Indexer", "path"))
|
||||||
|
all_index_file = os.path.join(INDEX_PATH, 'all_index.txt')
|
||||||
|
config_loader = None
|
||||||
|
|
||||||
|
def get_first_index_name():
|
||||||
|
with open(all_index_file) as f:
|
||||||
|
first_index = f.readline().replace('\n', '')
|
||||||
|
return first_index
|
||||||
|
|
||||||
|
def get_last_index_name():
|
||||||
|
with open(all_index_file) as f:
|
||||||
|
for line in f: # # FIXME: replace by tail ?
|
||||||
|
pass
|
||||||
|
last_index = line.replace('\n', '')
|
||||||
|
return last_index
|
||||||
|
|
||||||
|
def get_all_index():
|
||||||
|
all_index = []
|
||||||
|
with open(all_index_file) as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.replace('\n', '')
|
||||||
|
if line:
|
||||||
|
all_index.append(line)
|
||||||
|
return all_index
|
||||||
|
|
||||||
|
def get_index_full_path(index_name):
|
||||||
|
return os.path.join(INDEX_PATH, index_name)
|
||||||
|
|
||||||
|
# remove empty line
|
||||||
|
def check_index_list_integrity():
|
||||||
|
with open(all_index_file, 'r') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
with open(all_index_file, 'w') as f:
|
||||||
|
for line in lines:
|
||||||
|
if line != '\n':
|
||||||
|
f.write(line)
|
||||||
|
|
||||||
|
def _remove_index_name_from_all_index(index_name):
|
||||||
|
with open(all_index_file, 'r') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
with open(all_index_file, 'w') as f:
|
||||||
|
for line in lines:
|
||||||
|
if line.replace('\n', '') != index_name:
|
||||||
|
f.write(line)
|
||||||
|
|
||||||
|
def delete_index_by_name(index_name):
|
||||||
|
index_path = get_index_full_path(index_name)
|
||||||
|
index_path = os.path.realpath(index_path)
|
||||||
|
# incorrect filename
|
||||||
|
if not os.path.commonprefix([index_path, INDEX_PATH]) == INDEX_PATH:
|
||||||
|
raise Exception('Path traversal detected {}'.format(index_path))
|
||||||
|
if not os.path.isdir(index_path):
|
||||||
|
print('Error: The index directory {} doesn\'t exist'.format(index_path))
|
||||||
|
return None
|
||||||
|
res = rmtree(index_path)
|
||||||
|
_remove_index_name_from_all_index(index_name)
|
||||||
|
|
||||||
|
def delete_first_index():
|
||||||
|
index_name = get_first_index_name()
|
||||||
|
delete_index_by_name(index_name)
|
||||||
|
|
||||||
|
def delete_last_index():
|
||||||
|
index_name = get_last_index_name()
|
||||||
|
delete_index_by_name(index_name)
|
||||||
|
|
||||||
|
#### DATA RETENTION ####
|
||||||
|
|
||||||
|
#keep time most recent index
|
||||||
|
def delete_older_index_by_time(int_time):
|
||||||
|
all_index = get_all_index()
|
||||||
|
if all_index:
|
||||||
|
if int(all_index[-1]) > int_time: # make sure to keep one files
|
||||||
|
for index_name in all_index:
|
||||||
|
if int(index_name) < int_time:
|
||||||
|
print('deleting index {} ...'.format(index_name))
|
||||||
|
delete_index_by_name(index_name)
|
||||||
|
|
||||||
|
# keep x most recent index
|
||||||
|
def delete_older_index(number_of_index_to_keep):
|
||||||
|
if number_of_index_to_keep > 1:
|
||||||
|
all_index = get_all_index()
|
||||||
|
if len(get_all_index()) > number_of_index_to_keep:
|
||||||
|
for index_name in all_index[0:-number_of_index_to_keep]:
|
||||||
|
print('deleting index {} ...'.format(index_name))
|
||||||
|
delete_index_by_name(index_name)
|
||||||
|
|
||||||
|
##-- DATA RETENTION --##
|
||||||
|
|
||||||
|
# if __name__ == '__main__':
|
||||||
|
# delete_older_index(3)
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import gzip
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
@ -12,6 +13,7 @@ config_loader = ConfigLoader.ConfigLoader()
|
||||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
|
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
|
||||||
PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
|
PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
|
||||||
|
|
||||||
|
r_cache = config_loader.get_redis_conn("Redis_Cache")
|
||||||
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
||||||
config_loader = None
|
config_loader = None
|
||||||
|
|
||||||
|
@ -33,6 +35,9 @@ def get_item_date(item_id, add_separator=False):
|
||||||
else:
|
else:
|
||||||
return '{}{}{}'.format(l_directory[-4], l_directory[-3], l_directory[-2])
|
return '{}{}{}'.format(l_directory[-4], l_directory[-3], l_directory[-2])
|
||||||
|
|
||||||
|
def get_source(item_id):
|
||||||
|
return item_id.split('/')[-5]
|
||||||
|
|
||||||
# # TODO: add an option to check the tag
|
# # TODO: add an option to check the tag
|
||||||
def is_crawled(item_id):
|
def is_crawled(item_id):
|
||||||
return item_id.startswith('crawled')
|
return item_id.startswith('crawled')
|
||||||
|
@ -40,6 +45,102 @@ def is_crawled(item_id):
|
||||||
def get_item_domain(item_id):
|
def get_item_domain(item_id):
|
||||||
return item_id[19:-36]
|
return item_id[19:-36]
|
||||||
|
|
||||||
|
def get_item_content(item_id):
|
||||||
|
item_full_path = os.path.join(PASTES_FOLDER, item_id)
|
||||||
|
try:
|
||||||
|
item_content = r_cache.get(item_full_path)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
item_content = None
|
||||||
|
except Exception as e:
|
||||||
|
item_content = None
|
||||||
|
if item_content is None:
|
||||||
|
try:
|
||||||
|
with gzip.open(item_full_path, 'r') as f:
|
||||||
|
item_content = f.read().decode()
|
||||||
|
r_cache.set(item_full_path, item_content)
|
||||||
|
r_cache.expire(item_full_path, 300)
|
||||||
|
except:
|
||||||
|
item_content = ''
|
||||||
|
return str(item_content)
|
||||||
|
|
||||||
|
#### TREE CHILD/FATHER ####
|
||||||
|
def is_father(item_id):
|
||||||
|
return r_serv_metadata.exists('paste_children:{}'.format(item_id))
|
||||||
|
|
||||||
|
def is_children(item_id):
|
||||||
|
return r_serv_metadata.hexists('paste_metadata:{}'.format(item_id), 'father')
|
||||||
|
|
||||||
|
def is_root_node():
|
||||||
|
if is_father(item_id) and not is_children(item_id):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_node(item_id):
|
||||||
|
if is_father(item_id) or is_children(item_id):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_leaf(item_id):
|
||||||
|
if not is_father(item_id) and is_children(item_id):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_domain_root(item_id):
|
||||||
|
if not is_crawled(item_id):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
domain = get_item_domain(item_id)
|
||||||
|
item_father = get_item_parent(item_id)
|
||||||
|
if not is_crawled(item_father):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
# same domain
|
||||||
|
if get_item_domain(item_father) == domain:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_nb_children(item_id):
|
||||||
|
return r_serv_metadata.scard('paste_children:{}'.format(item_id))
|
||||||
|
|
||||||
|
|
||||||
|
def get_item_parent(item_id):
|
||||||
|
return r_serv_metadata.hget('paste_metadata:{}'.format(item_id), 'father')
|
||||||
|
|
||||||
|
def get_item_children(item_id):
|
||||||
|
return list(r_serv_metadata.smembers('paste_children:{}'.format(item_id)))
|
||||||
|
|
||||||
|
def add_item_parent(item_parent, item_id):
|
||||||
|
return item_basic.add_item_parent(item_parent, item_id)
|
||||||
|
|
||||||
|
# # TODO: handle domain last origin in domain lib
|
||||||
|
def _delete_node(item_id):
|
||||||
|
# only if item isn't deleted
|
||||||
|
#if is_crawled(item_id):
|
||||||
|
# r_serv_metadata.hrem('paste_metadata:{}'.format(item_id), 'real_link')
|
||||||
|
for chidren_id in get_item_children(item_id):
|
||||||
|
r_serv_metadata.hdel('paste_metadata:{}'.format(chidren_id), 'father')
|
||||||
|
r_serv_metadata.delete('paste_children:{}'.format(item_id))
|
||||||
|
|
||||||
|
# delete regular
|
||||||
|
# simple if leaf
|
||||||
|
|
||||||
|
# delete item node
|
||||||
|
|
||||||
|
def get_all_domain_node_by_item_id(item_id, l_nodes=[]):
|
||||||
|
domain = get_item_domain(item_id)
|
||||||
|
for child_id in get_item_children(item_id):
|
||||||
|
if get_item_domain(child_id) == domain:
|
||||||
|
l_nodes.append(child_id)
|
||||||
|
l_nodes = get_all_domain_node_by_item_id(child_id, l_nodes)
|
||||||
|
return l_nodes
|
||||||
|
|
||||||
|
##-- --##
|
||||||
|
|
||||||
|
|
||||||
def add_item_parent_by_parent_id(parent_type, parent_id, item_id):
|
def add_item_parent_by_parent_id(parent_type, parent_id, item_id):
|
||||||
parent_item_id = get_obj_id_item_id(parent_type, parent_id)
|
parent_item_id = get_obj_id_item_id(parent_type, parent_id)
|
||||||
if parent_item_id:
|
if parent_item_id:
|
||||||
|
@ -50,9 +151,9 @@ def add_item_parent(parent_item_id, item_id):
|
||||||
r_serv_metadata.sadd('paste_children:{}'.format(parent_item_id), item_id)
|
r_serv_metadata.sadd('paste_children:{}'.format(parent_item_id), item_id)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def add_map_obj_id_item_id(obj_id, item_id, obj_type):
|
# TODO:
|
||||||
if obj_type == 'twitter_id':
|
# FIXME:
|
||||||
r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id)
|
#### UNKNOW SECTION ####
|
||||||
|
|
||||||
def get_obj_id_item_id(parent_type, parent_id):
|
def get_obj_id_item_id(parent_type, parent_id):
|
||||||
all_parents_type = ['twitter_id']
|
all_parents_type = ['twitter_id']
|
||||||
|
@ -60,3 +161,11 @@ def get_obj_id_item_id(parent_type, parent_id):
|
||||||
return r_serv_metadata.hget('map:twitter_id:item_id', parent_id)
|
return r_serv_metadata.hget('map:twitter_id:item_id', parent_id)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def add_map_obj_id_item_id(obj_id, item_id, obj_type):
|
||||||
|
if obj_type == 'twitter_id':
|
||||||
|
r_serv_metadata.hset('map:twitter_id:item_id', obj_id, item_id)
|
||||||
|
|
||||||
|
# delete twitter id
|
||||||
|
|
||||||
|
##-- --##
|
||||||
|
|
|
@ -73,3 +73,30 @@ def regex_findall(module_name, redis_key, regex, item_id, item_content, max_time
|
||||||
print("Caught KeyboardInterrupt, terminating workers")
|
print("Caught KeyboardInterrupt, terminating workers")
|
||||||
proc.terminate()
|
proc.terminate()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
def _regex_search(redis_key, regex, item_content):
|
||||||
|
first_occ = regex.search(item_content)
|
||||||
|
if first_occ:
|
||||||
|
r_serv_cache.set(redis_key, first_occ)
|
||||||
|
|
||||||
|
def regex_search(module_name, redis_key, regex, item_id, item_content, max_time=30):
|
||||||
|
proc = Proc(target=_regex_search, args=(redis_key, regex, item_content, ))
|
||||||
|
try:
|
||||||
|
proc.start()
|
||||||
|
proc.join(max_time)
|
||||||
|
if proc.is_alive():
|
||||||
|
proc.terminate()
|
||||||
|
Statistics.incr_module_timeout_statistic(module_name)
|
||||||
|
err_mess = "{}: processing timeout: {}".format(module_name, item_id)
|
||||||
|
print(err_mess)
|
||||||
|
publisher.info(err_mess)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
first_occ = r_serv_cache.get(redis_key)
|
||||||
|
r_serv_cache.delete(redis_key)
|
||||||
|
proc.terminate()
|
||||||
|
return first_occ
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("Caught KeyboardInterrupt, terminating workers")
|
||||||
|
proc.terminate()
|
||||||
|
sys.exit(0)
|
||||||
|
|
|
@ -5,20 +5,16 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
|
||||||
import Correlation
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
import Username
|
||||||
|
|
||||||
config_loader = ConfigLoader.ConfigLoader()
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
r_serv_crawler = config_loader.get_redis_conn("ARDB_Onion")
|
r_serv_crawler = config_loader.get_redis_conn("ARDB_Onion")
|
||||||
config_loader = None
|
config_loader = None
|
||||||
|
|
||||||
correlation = Correlation.Correlation('username', ['telegram'])
|
|
||||||
|
|
||||||
def save_item_correlation(username, item_id, item_date):
|
def save_item_correlation(username, item_id, item_date):
|
||||||
correlation.save_item_correlation('telegram', username, item_id, item_date)
|
Username.save_item_correlation('telegram', username, item_id, item_date)
|
||||||
|
|
||||||
def save_telegram_invite_hash(invite_hash, item_id):
|
def save_telegram_invite_hash(invite_hash, item_id):
|
||||||
r_serv_crawler.sadd('telegram:invite_code', '{};{}'.format(invite_hash, item_id))
|
r_serv_crawler.sadd('telegram:invite_code', '{};{}'.format(invite_hash, item_id))
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import gzip
|
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
@ -16,11 +15,14 @@ import Pgp
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import item_basic
|
import item_basic
|
||||||
|
import domain_basic
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
import Correlate_object
|
import Correlate_object
|
||||||
import Decoded
|
import Decoded
|
||||||
import Screenshot
|
import Screenshot
|
||||||
import telegram
|
import Username
|
||||||
|
|
||||||
|
from item_basic import *
|
||||||
|
|
||||||
config_loader = ConfigLoader.ConfigLoader()
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
# get and sanityze PASTE DIRECTORY
|
# get and sanityze PASTE DIRECTORY
|
||||||
|
@ -30,6 +32,7 @@ PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
|
||||||
r_cache = config_loader.get_redis_conn("Redis_Cache")
|
r_cache = config_loader.get_redis_conn("Redis_Cache")
|
||||||
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
||||||
screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"))
|
screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"))
|
||||||
|
|
||||||
config_loader = None
|
config_loader = None
|
||||||
|
|
||||||
def exist_item(item_id):
|
def exist_item(item_id):
|
||||||
|
@ -48,7 +51,7 @@ def get_item_date(item_id, add_separator=False):
|
||||||
return item_basic.get_item_date(item_id, add_separator=add_separator)
|
return item_basic.get_item_date(item_id, add_separator=add_separator)
|
||||||
|
|
||||||
def get_source(item_id):
|
def get_source(item_id):
|
||||||
return item_id.split('/')[-5]
|
return item_basic.get_source(item_id)
|
||||||
|
|
||||||
def get_item_basename(item_id):
|
def get_item_basename(item_id):
|
||||||
return os.path.basename(item_id)
|
return os.path.basename(item_id)
|
||||||
|
@ -71,22 +74,7 @@ def get_lines_info(item_id, item_content=None):
|
||||||
|
|
||||||
|
|
||||||
def get_item_content(item_id):
|
def get_item_content(item_id):
|
||||||
item_full_path = os.path.join(PASTES_FOLDER, item_id)
|
return item_basic.get_item_content(item_id)
|
||||||
try:
|
|
||||||
item_content = r_cache.get(item_full_path)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
item_content = None
|
|
||||||
except Exception as e:
|
|
||||||
item_content = None
|
|
||||||
if item_content is None:
|
|
||||||
try:
|
|
||||||
with gzip.open(item_full_path, 'r') as f:
|
|
||||||
item_content = f.read().decode()
|
|
||||||
r_cache.set(item_full_path, item_content)
|
|
||||||
r_cache.expire(item_full_path, 300)
|
|
||||||
except:
|
|
||||||
item_content = ''
|
|
||||||
return str(item_content)
|
|
||||||
|
|
||||||
# API
|
# API
|
||||||
def get_item(request_dict):
|
def get_item(request_dict):
|
||||||
|
@ -168,15 +156,15 @@ def get_item_pgp(item_id, currencies_type=None, get_nb=False):
|
||||||
'''
|
'''
|
||||||
return Pgp.pgp.get_item_correlation_dict(item_id, correlation_type=currencies_type, get_nb=get_nb)
|
return Pgp.pgp.get_item_correlation_dict(item_id, correlation_type=currencies_type, get_nb=get_nb)
|
||||||
|
|
||||||
def get_item_username(item_id, currencies_type=None, get_nb=False):
|
def get_item_username(item_id, sub_type=None, get_nb=False):
|
||||||
'''
|
'''
|
||||||
Return all pgp of a given item.
|
Return all pgp of a given item.
|
||||||
|
|
||||||
:param item_id: item id
|
:param item_id: item id
|
||||||
:param currencies_type: list of cryptocurrencies type
|
:param sub_type: list of username type
|
||||||
:type currencies_type: list, optional
|
:type sub_type: list, optional
|
||||||
'''
|
'''
|
||||||
return telegram.correlation.get_item_correlation_dict(item_id, correlation_type=currencies_type, get_nb=get_nb)
|
return Username.correlation.get_item_correlation_dict(item_id, correlation_type=sub_type, get_nb=get_nb)
|
||||||
|
|
||||||
def get_item_decoded(item_id):
|
def get_item_decoded(item_id):
|
||||||
'''
|
'''
|
||||||
|
@ -292,14 +280,8 @@ def get_domain(item_id):
|
||||||
item_id = item_id[-1]
|
item_id = item_id[-1]
|
||||||
return item_id[:-36]
|
return item_id[:-36]
|
||||||
|
|
||||||
def get_item_parent(item_id):
|
def get_item_domain_with_port(item_id):
|
||||||
return r_serv_metadata.hget('paste_metadata:{}'.format(item_id), 'father')
|
return r_serv_metadata.hget('paste_metadata:{}'.format(item_id), 'domain')
|
||||||
|
|
||||||
def get_item_children(item_id):
|
|
||||||
return list(r_serv_metadata.smembers('paste_children:{}'.format(item_id)))
|
|
||||||
|
|
||||||
def add_item_parent(item_parent, item_id):
|
|
||||||
return item_basic.add_item_parent(item_parent, item_id)
|
|
||||||
|
|
||||||
def get_item_link(item_id):
|
def get_item_link(item_id):
|
||||||
return r_serv_metadata.hget('paste_metadata:{}'.format(item_id), 'real_link')
|
return r_serv_metadata.hget('paste_metadata:{}'.format(item_id), 'real_link')
|
||||||
|
@ -423,12 +405,32 @@ def delete_item(obj_id):
|
||||||
else:
|
else:
|
||||||
for obj2_id in obj_correlations[correlation]:
|
for obj2_id in obj_correlations[correlation]:
|
||||||
Correlate_object.delete_obj_relationship(correlation, obj2_id, 'item', obj_id)
|
Correlate_object.delete_obj_relationship(correlation, obj2_id, 'item', obj_id)
|
||||||
|
|
||||||
|
# delete father/child
|
||||||
|
delete_node(obj_id)
|
||||||
|
|
||||||
|
# delete item metadata
|
||||||
|
r_serv_metadata.delete('paste_metadata:{}'.format(obj_id))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
### REQUIRE MORE WORK
|
|
||||||
# delete child/son !!!
|
|
||||||
### TODO in inport V2
|
### TODO in inport V2
|
||||||
# delete from tracked items
|
# delete from tracked items
|
||||||
# delete from queue
|
# delete from queue
|
||||||
###
|
###
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
#### ####
|
||||||
|
def delete_node(item_id):
|
||||||
|
if is_node(item_id):
|
||||||
|
if is_crawled(item_id):
|
||||||
|
delete_domain_node(item_id)
|
||||||
|
item_basic._delete_node(item_id)
|
||||||
|
|
||||||
|
def delete_domain_node(item_id):
|
||||||
|
if is_domain_root(item_id):
|
||||||
|
# remove from domain history
|
||||||
|
domain, port = get_item_domain_with_port(item_id).split(':')
|
||||||
|
domain_basic.delete_domain_item_core(item_id, domain, port)
|
||||||
|
for child_id in get_all_domain_node_by_item_id(item_id):
|
||||||
|
delete_item(child_id)
|
||||||
|
|
|
@ -8,10 +8,10 @@ import datetime
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
|
||||||
import Date
|
import Date
|
||||||
import Item
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||||
import ConfigLoader
|
import ConfigLoader
|
||||||
|
import item_basic
|
||||||
|
|
||||||
from pytaxonomies import Taxonomies
|
from pytaxonomies import Taxonomies
|
||||||
from pymispgalaxies import Galaxies, Clusters
|
from pymispgalaxies import Galaxies, Clusters
|
||||||
|
@ -383,8 +383,8 @@ def add_obj_tag(object_type, object_id, tag, obj_date=None):
|
||||||
r_serv_tags.sadd('{}:{}'.format(tag, obj_date), object_id)
|
r_serv_tags.sadd('{}:{}'.format(tag, obj_date), object_id)
|
||||||
|
|
||||||
# add domain tag
|
# add domain tag
|
||||||
if Item.is_crawled(object_id) and tag!='infoleak:submission="crawler"' and tag != 'infoleak:submission="manual"':
|
if item_basic.is_crawled(object_id) and tag!='infoleak:submission="crawler"' and tag != 'infoleak:submission="manual"':
|
||||||
domain = Item.get_item_domain(object_id)
|
domain = item_basic.get_item_domain(object_id)
|
||||||
add_tag("domain", tag, domain)
|
add_tag("domain", tag, domain)
|
||||||
else:
|
else:
|
||||||
r_serv_metadata.sadd('tag:{}'.format(object_id), tag)
|
r_serv_metadata.sadd('tag:{}'.format(object_id), tag)
|
||||||
|
@ -432,7 +432,7 @@ def delete_tag(object_type, tag, object_id, obj_date=None):
|
||||||
# # TODO: move me
|
# # TODO: move me
|
||||||
def get_obj_date(object_type, object_id):
|
def get_obj_date(object_type, object_id):
|
||||||
if object_type == "item":
|
if object_type == "item":
|
||||||
return int(Item.get_item_date(object_id))
|
return int(item_basic.get_item_date(object_id))
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -573,3 +573,19 @@ def get_obj_by_tags(object_type, l_tags, date_from=None, date_to=None, nb_obj=50
|
||||||
l_tagged_obj = list(l_tagged_obj)
|
l_tagged_obj = list(l_tagged_obj)
|
||||||
|
|
||||||
return {"tagged_obj":l_tagged_obj, "page":page, "nb_pages":nb_pages, "nb_first_elem":start+1, "nb_last_elem":stop, "nb_all_elem":nb_all_elem}
|
return {"tagged_obj":l_tagged_obj, "page":page, "nb_pages":nb_pages, "nb_first_elem":start+1, "nb_last_elem":stop, "nb_all_elem":nb_all_elem}
|
||||||
|
|
||||||
|
|
||||||
|
#### TAGS EXPORT ####
|
||||||
|
# # TODO:
|
||||||
|
def is_updated_tags_to_export(): # by type
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_list_of_solo_tags_to_export_by_type(export_type): # by type
|
||||||
|
if export_type in ['misp', 'thehive']:
|
||||||
|
return r_serv_db.smembers('whitelist_{}'.format(export_type))
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
#r_serv_db.smembers('whitelist_hive')
|
||||||
|
|
||||||
|
|
||||||
|
#### -- ####
|
||||||
|
|
|
@ -39,6 +39,8 @@ sender_user =
|
||||||
[Flask]
|
[Flask]
|
||||||
#Proxying requests to the app
|
#Proxying requests to the app
|
||||||
baseUrl = /
|
baseUrl = /
|
||||||
|
#Host to bind to
|
||||||
|
host = 0.0.0.0
|
||||||
#Flask server port
|
#Flask server port
|
||||||
port = 7000
|
port = 7000
|
||||||
#Number of logs to display in the dashboard
|
#Number of logs to display in the dashboard
|
||||||
|
|
|
@ -8,10 +8,10 @@ install_docker() {
|
||||||
sudo docker pull scrapinghub/splash;
|
sudo docker pull scrapinghub/splash;
|
||||||
}
|
}
|
||||||
|
|
||||||
install_python_requirement() {
|
# install_python_requirement() {
|
||||||
. ./AILENV/bin/activate;
|
# . ./AILENV/bin/activate;
|
||||||
pip3 install -U -r crawler_requirements.txt;
|
# pip3 install -U -r crawler_requirements.txt;
|
||||||
}
|
# }
|
||||||
|
|
||||||
install_all() {
|
install_all() {
|
||||||
read -p "Do you want to install docker? (use local splash server) [y/n] " -n 1 -r
|
read -p "Do you want to install docker? (use local splash server) [y/n] " -n 1 -r
|
||||||
|
@ -20,7 +20,6 @@ install_all() {
|
||||||
then
|
then
|
||||||
install_docker;
|
install_docker;
|
||||||
fi
|
fi
|
||||||
install_python_requirement;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
usage() {
|
usage() {
|
||||||
|
|
|
@ -16,7 +16,6 @@ if [ -z "$VIRTUAL_ENV" ]; then
|
||||||
echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate
|
echo export AIL_REDIS=$(pwd)/redis/src/ >> ./AILENV/bin/activate
|
||||||
echo export AIL_ARDB=$(pwd)/ardb/src/ >> ./AILENV/bin/activate
|
echo export AIL_ARDB=$(pwd)/ardb/src/ >> ./AILENV/bin/activate
|
||||||
|
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# activate virtual environment
|
# activate virtual environment
|
||||||
|
|
|
@ -88,8 +88,10 @@ fi
|
||||||
# create AILENV + intall python packages
|
# create AILENV + intall python packages
|
||||||
./install_virtualenv.sh
|
./install_virtualenv.sh
|
||||||
|
|
||||||
# force virtual environment
|
# force virtualenv activation
|
||||||
. ./AILENV/bin/activate
|
if [ -z "$VIRTUAL_ENV" ]; then
|
||||||
|
. ./AILENV/bin/activate
|
||||||
|
fi
|
||||||
|
|
||||||
pushd ${AIL_BIN}/helper/gen_cert
|
pushd ${AIL_BIN}/helper/gen_cert
|
||||||
./gen_root.sh
|
./gen_root.sh
|
||||||
|
@ -98,8 +100,8 @@ wait
|
||||||
wait
|
wait
|
||||||
popd
|
popd
|
||||||
|
|
||||||
cp ${AIL_BIN}/helper/gen_cert/server.crt ${AIL_FLASK}server.crt
|
cp ${AIL_BIN}/helper/gen_cert/server.crt ${AIL_FLASK}/server.crt
|
||||||
cp ${AIL_BIN}/helper/gen_cert/server.key ${AIL_FLASK}server.key
|
cp ${AIL_BIN}/helper/gen_cert/server.key ${AIL_FLASK}/server.key
|
||||||
|
|
||||||
mkdir -p $AIL_HOME/PASTES
|
mkdir -p $AIL_HOME/PASTES
|
||||||
|
|
||||||
|
@ -111,7 +113,7 @@ $AIL_HOME/doc/generate_modules_data_flow_graph.sh
|
||||||
# init update version
|
# init update version
|
||||||
pushd ${AIL_HOME}
|
pushd ${AIL_HOME}
|
||||||
# shallow clone
|
# shallow clone
|
||||||
git fetch --tags --prune --unshallow
|
git fetch --tags --prune --depth=10000
|
||||||
git describe --abbrev=0 --tags | tr -d '\n' > ${AIL_HOME}/update/current_version
|
git describe --abbrev=0 --tags | tr -d '\n' > ${AIL_HOME}/update/current_version
|
||||||
echo "AIL current version:"
|
echo "AIL current version:"
|
||||||
git describe --abbrev=0 --tags
|
git describe --abbrev=0 --tags
|
||||||
|
|
71
reset_AIL.sh
71
reset_AIL.sh
|
@ -6,6 +6,29 @@ GREEN="\\033[1;32m"
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
||||||
|
|
||||||
|
function helptext {
|
||||||
|
echo -e $GREEN"
|
||||||
|
|
||||||
|
.o. ooooo ooooo
|
||||||
|
.888. \`888' \`888'
|
||||||
|
.8\"888. 888 888
|
||||||
|
.8' \`888. 888 888
|
||||||
|
.88ooo8888. 888 888
|
||||||
|
.8' \`888. 888 888 o
|
||||||
|
o88o o8888o o o888o o o888ooooood8
|
||||||
|
|
||||||
|
Analysis Information Leak framework
|
||||||
|
"$DEFAULT"
|
||||||
|
Use this script to reset AIL (DB + stored items):
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
-----
|
||||||
|
reset_AIL.sh
|
||||||
|
[--softReset] Keep All users accounts
|
||||||
|
[-h | --help] Help
|
||||||
|
"
|
||||||
|
}
|
||||||
|
|
||||||
function reset_dir {
|
function reset_dir {
|
||||||
# Access dirs and delete
|
# Access dirs and delete
|
||||||
cd $AIL_HOME
|
cd $AIL_HOME
|
||||||
|
@ -23,16 +46,6 @@ function reset_dir {
|
||||||
popd
|
popd
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ $userInput -eq $num ]
|
|
||||||
then
|
|
||||||
if [ -d DATA_ARDB/ ]; then
|
|
||||||
pushd DATA_ARDB/
|
|
||||||
rm -r *
|
|
||||||
echo 'cleaned DATA_ARDB'
|
|
||||||
popd
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d logs/ ]; then
|
if [ -d logs/ ]; then
|
||||||
pushd logs/
|
pushd logs/
|
||||||
rm *
|
rm *
|
||||||
|
@ -97,29 +110,36 @@ function flush_DB_keep_user {
|
||||||
bash ${AIL_BIN}LAUNCH.sh -k
|
bash ${AIL_BIN}LAUNCH.sh -k
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function validate_reset {
|
||||||
|
echo -e $RED"WARNING: DELETE AIL DATA"$DEFAULT
|
||||||
|
|
||||||
|
# Make sure the reseting is intentional
|
||||||
|
num=$(( ( RANDOM % 100 ) + 1 ))
|
||||||
|
|
||||||
|
echo -e $RED"To reset the platform, enter the following number: "$DEFAULT $num
|
||||||
|
read userInput
|
||||||
|
|
||||||
|
if [ $userInput -eq $num ]
|
||||||
|
then
|
||||||
|
echo "Reseting AIL..."
|
||||||
|
else
|
||||||
|
echo "Wrong number"
|
||||||
|
exit 1;
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function soft_reset {
|
function soft_reset {
|
||||||
|
validate_reset;
|
||||||
reset_dir;
|
reset_dir;
|
||||||
flush_DB_keep_user;
|
flush_DB_keep_user;
|
||||||
}
|
}
|
||||||
|
|
||||||
#If no params,
|
#If no params,
|
||||||
[[ $@ ]] || {
|
[[ $@ ]] || {
|
||||||
# Make sure the reseting is intentional
|
validate_reset;
|
||||||
num=$(( ( RANDOM % 100 ) + 1 ))
|
|
||||||
|
|
||||||
echo -e $RED"To reset the platform, enter the following number: "$DEFAULT $num
|
|
||||||
read userInput
|
|
||||||
|
|
||||||
if [ $userInput -eq $num ]
|
|
||||||
then
|
|
||||||
echo "Reseting AIL..."
|
|
||||||
else
|
|
||||||
echo "Wrong number"
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
|
|
||||||
num=$(( ( RANDOM % 100 ) + 1 ))
|
num=$(( ( RANDOM % 100 ) + 1 ))
|
||||||
echo -e $RED"If yes you want to delete the DB , enter the following number: "$DEFAULT $num
|
echo -e $RED"If you want to delete the DB , enter the following number: "$DEFAULT $num
|
||||||
read userInput
|
read userInput
|
||||||
|
|
||||||
reset_dir;
|
reset_dir;
|
||||||
|
@ -143,6 +163,9 @@ while [ "$1" != "" ]; do
|
||||||
case $1 in
|
case $1 in
|
||||||
--softReset ) soft_reset;
|
--softReset ) soft_reset;
|
||||||
;;
|
;;
|
||||||
|
-h | --help ) helptext;
|
||||||
|
exit
|
||||||
|
;;
|
||||||
* ) exit 1
|
* ) exit 1
|
||||||
esac
|
esac
|
||||||
shift
|
shift
|
||||||
|
|
|
@ -49,6 +49,7 @@ Flask_dir = os.environ['AIL_FLASK']
|
||||||
# CONFIG #
|
# CONFIG #
|
||||||
config_loader = ConfigLoader.ConfigLoader()
|
config_loader = ConfigLoader.ConfigLoader()
|
||||||
baseUrl = config_loader.get_config_str("Flask", "baseurl")
|
baseUrl = config_loader.get_config_str("Flask", "baseurl")
|
||||||
|
host = config_loader.get_config_str("Flask", "host")
|
||||||
baseUrl = baseUrl.replace('/', '')
|
baseUrl = baseUrl.replace('/', '')
|
||||||
if baseUrl != '':
|
if baseUrl != '':
|
||||||
baseUrl = '/'+baseUrl
|
baseUrl = '/'+baseUrl
|
||||||
|
@ -253,4 +254,4 @@ r_serv_db.sadd('list_export_tags', 'infoleak:submission="manual"')
|
||||||
# ============ MAIN ============
|
# ============ MAIN ============
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app.run(host='0.0.0.0', port=FLASK_PORT, threaded=True, ssl_context=ssl_context)
|
app.run(host=host, port=FLASK_PORT, threaded=True, ssl_context=ssl_context)
|
||||||
|
|
|
@ -25,7 +25,7 @@ import Correlate_object
|
||||||
import Domain
|
import Domain
|
||||||
import Screenshot
|
import Screenshot
|
||||||
import btc_ail
|
import btc_ail
|
||||||
import telegram
|
import Username
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
||||||
import Cryptocurrency
|
import Cryptocurrency
|
||||||
|
@ -110,7 +110,7 @@ def get_card_metadata(object_type, correlation_id, type_id=None, expand_card=Fal
|
||||||
card_dict["sparkline"] = Pgp.pgp.get_list_nb_previous_correlation_object(type_id, correlation_id, 6)
|
card_dict["sparkline"] = Pgp.pgp.get_list_nb_previous_correlation_object(type_id, correlation_id, 6)
|
||||||
card_dict["icon"] = Correlate_object.get_correlation_node_icon(object_type, type_id)
|
card_dict["icon"] = Correlate_object.get_correlation_node_icon(object_type, type_id)
|
||||||
elif object_type == 'username':
|
elif object_type == 'username':
|
||||||
card_dict["sparkline"] = telegram.correlation.get_list_nb_previous_correlation_object(type_id, correlation_id, 6)
|
card_dict["sparkline"] = Username.correlation.get_list_nb_previous_correlation_object(type_id, correlation_id, 6)
|
||||||
card_dict["icon"] = Correlate_object.get_correlation_node_icon(object_type, type_id)
|
card_dict["icon"] = Correlate_object.get_correlation_node_icon(object_type, type_id)
|
||||||
elif object_type == 'decoded':
|
elif object_type == 'decoded':
|
||||||
card_dict["sparkline"] = Decoded.get_list_nb_previous_hash(correlation_id, 6)
|
card_dict["sparkline"] = Decoded.get_list_nb_previous_hash(correlation_id, 6)
|
||||||
|
|
|
@ -115,7 +115,7 @@ def showDomain():
|
||||||
dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags'])
|
dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags'])
|
||||||
dict_domain['history'] = domain.get_domain_history_with_status()
|
dict_domain['history'] = domain.get_domain_history_with_status()
|
||||||
dict_domain['crawler_history'] = domain.get_domain_items_crawled(items_link=True, epoch=epoch, item_screenshot=True, item_tag=True) # # TODO: handle multiple port
|
dict_domain['crawler_history'] = domain.get_domain_items_crawled(items_link=True, epoch=epoch, item_screenshot=True, item_tag=True) # # TODO: handle multiple port
|
||||||
if dict_domain['crawler_history']['items']:
|
if dict_domain['crawler_history'].get('items', []):
|
||||||
dict_domain['crawler_history']['random_item'] = random.choice(dict_domain['crawler_history']['items'])
|
dict_domain['crawler_history']['random_item'] = random.choice(dict_domain['crawler_history']['items'])
|
||||||
|
|
||||||
return render_template("showDomain.html", dict_domain=dict_domain, bootstrap_label=bootstrap_label,
|
return render_template("showDomain.html", dict_domain=dict_domain, bootstrap_label=bootstrap_label,
|
||||||
|
|
|
@ -54,7 +54,7 @@ def login():
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
username = request.form.get('username')
|
username = request.form.get('username')
|
||||||
password = request.form.get('password')
|
password = request.form.get('password')
|
||||||
#next_page = request.form.get('next_page')
|
next_page = request.form.get('next_page')
|
||||||
|
|
||||||
if username is not None:
|
if username is not None:
|
||||||
user = User.get(username)
|
user = User.get(username)
|
||||||
|
@ -74,7 +74,10 @@ def login():
|
||||||
if user.request_password_change():
|
if user.request_password_change():
|
||||||
return redirect(url_for('root.change_password'))
|
return redirect(url_for('root.change_password'))
|
||||||
else:
|
else:
|
||||||
return redirect(url_for('dashboard.index'))
|
if next_page and next_page!='None':
|
||||||
|
return redirect(next_page)
|
||||||
|
else:
|
||||||
|
return redirect(url_for('dashboard.index'))
|
||||||
# login failed
|
# login failed
|
||||||
else:
|
else:
|
||||||
# set brute force protection
|
# set brute force protection
|
||||||
|
@ -91,9 +94,9 @@ def login():
|
||||||
return 'please provide a valid username'
|
return 'please provide a valid username'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
#next_page = request.args.get('next')
|
next_page = request.args.get('next')
|
||||||
error = request.args.get('error')
|
error = request.args.get('error')
|
||||||
return render_template("login.html" , error=error)
|
return render_template("login.html" , next_page=next_page, error=error)
|
||||||
|
|
||||||
@root.route('/change_password', methods=['POST', 'GET'])
|
@root.route('/change_password', methods=['POST', 'GET'])
|
||||||
@login_required
|
@login_required
|
||||||
|
|
|
@ -39,7 +39,7 @@ hashDecoded = Blueprint('hashDecoded', __name__, template_folder='templates')
|
||||||
## TODO: put me in option
|
## TODO: put me in option
|
||||||
all_cryptocurrency = ['bitcoin', 'ethereum', 'bitcoin-cash', 'litecoin', 'monero', 'zcash', 'dash']
|
all_cryptocurrency = ['bitcoin', 'ethereum', 'bitcoin-cash', 'litecoin', 'monero', 'zcash', 'dash']
|
||||||
all_pgpdump = ['key', 'name', 'mail']
|
all_pgpdump = ['key', 'name', 'mail']
|
||||||
all_username = ['telegram']
|
all_username = ['telegram', 'twitter']
|
||||||
|
|
||||||
# ============ FUNCTIONS ============
|
# ============ FUNCTIONS ============
|
||||||
|
|
||||||
|
@ -128,6 +128,8 @@ def get_icon(correlation_type, type_id):
|
||||||
elif correlation_type == 'username':
|
elif correlation_type == 'username':
|
||||||
if type_id == 'telegram':
|
if type_id == 'telegram':
|
||||||
icon_text = 'fab fa-telegram-plane'
|
icon_text = 'fab fa-telegram-plane'
|
||||||
|
elif type_id == 'twitter':
|
||||||
|
icon_text = 'fab fa-twitter'
|
||||||
return icon_text
|
return icon_text
|
||||||
|
|
||||||
def get_icon_text(correlation_type, type_id):
|
def get_icon_text(correlation_type, type_id):
|
||||||
|
@ -153,6 +155,8 @@ def get_icon_text(correlation_type, type_id):
|
||||||
elif correlation_type == 'username':
|
elif correlation_type == 'username':
|
||||||
if type_id == 'telegram':
|
if type_id == 'telegram':
|
||||||
icon_text = '\uf2c6'
|
icon_text = '\uf2c6'
|
||||||
|
elif type_id == 'twitter':
|
||||||
|
icon_text = '\uf099'
|
||||||
return icon_text
|
return icon_text
|
||||||
|
|
||||||
def get_all_types_id(correlation_type):
|
def get_all_types_id(correlation_type):
|
||||||
|
|
|
@ -152,7 +152,16 @@
|
||||||
</svg>
|
</svg>
|
||||||
telegram
|
telegram
|
||||||
</div>
|
</div>
|
||||||
</td>
|
<div class="my-1">
|
||||||
|
<svg height="26" width="26">
|
||||||
|
<g class="nodes">
|
||||||
|
<circle cx="13" cy="13" r="13" fill="#4dffff"></circle>
|
||||||
|
<text x="13" y="13" text-anchor="middle" dominant-baseline="central" class="graph_node_icon fab" font-size="16px"></text>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
twitter
|
||||||
|
</div>
|
||||||
|
</td>f099
|
||||||
<td>
|
<td>
|
||||||
<div class="my-1">
|
<div class="my-1">
|
||||||
<svg height="26" width="26">
|
<svg height="26" width="26">
|
||||||
|
|
|
@ -73,6 +73,7 @@
|
||||||
<input type="email" id="inputEmail" name="username" class="form-control" placeholder="Email address" required autofocus>
|
<input type="email" id="inputEmail" name="username" class="form-control" placeholder="Email address" required autofocus>
|
||||||
<label for="inputPassword" class="sr-only">Password</label>
|
<label for="inputPassword" class="sr-only">Password</label>
|
||||||
<input type="password" id="inputPassword" name="password" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Password" required>
|
<input type="password" id="inputPassword" name="password" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Password" required>
|
||||||
|
<input type="text" id="next_page" name="next_page" value="{{next_page}}" hidden>
|
||||||
{% if error %}
|
{% if error %}
|
||||||
<div class="invalid-feedback">
|
<div class="invalid-feedback">
|
||||||
{{error}}
|
{{error}}
|
||||||
|
|
|
@ -62,8 +62,6 @@ mv temp/d3.min.js ./static/js/
|
||||||
mv temp/moment-2.24.0/min/moment.min.js ./static/js/
|
mv temp/moment-2.24.0/min/moment.min.js ./static/js/
|
||||||
mv temp/jquery-date-range-picker-0.20.0/dist/jquery.daterangepicker.min.js ./static/js/
|
mv temp/jquery-date-range-picker-0.20.0/dist/jquery.daterangepicker.min.js ./static/js/
|
||||||
|
|
||||||
rm -rf temp
|
|
||||||
|
|
||||||
JQVERSION="3.4.1"
|
JQVERSION="3.4.1"
|
||||||
wget -q http://code.jquery.com/jquery-${JQVERSION}.js -O ./static/js/jquery.js
|
wget -q http://code.jquery.com/jquery-${JQVERSION}.js -O ./static/js/jquery.js
|
||||||
|
|
||||||
|
@ -76,9 +74,11 @@ wget -q https://cdn.datatables.net/1.10.20/css/dataTables.bootstrap4.min.css -O
|
||||||
wget -q https://cdn.datatables.net/1.10.20/js/dataTables.bootstrap4.min.js -O ./static/js/dataTables.bootstrap.min.js
|
wget -q https://cdn.datatables.net/1.10.20/js/dataTables.bootstrap4.min.js -O ./static/js/dataTables.bootstrap.min.js
|
||||||
|
|
||||||
#Ressources for bootstrap popover
|
#Ressources for bootstrap popover
|
||||||
POPPER_VERSION="2.0.6"
|
POPPER_VERSION="1.16.1"
|
||||||
wget -q https://unpkg.com/@popperjs/core@${POPPER_VERSION}/dist/umd/popper.min.js -O ./static/js/popper.min.js
|
wget -q https://github.com/FezVrasta/popper.js/archive/v${POPPER_VERSION}.zip -O temp/popper.zip
|
||||||
wget -q https://unpkg.com/@popperjs/core@${POPPER_VERSION}/dist/umd/popper.min.js.map -O ./static/js/popper.min.js.map
|
unzip -qq temp/popper.zip -d temp/
|
||||||
|
mv temp/popper-core-${POPPER_VERSION}/dist/umd/popper.min.js ./static/js/
|
||||||
|
mv temp/popper-core-${POPPER_VERSION}/dist/umd/popper.min.js.map ./static/js/
|
||||||
|
|
||||||
#Ressource for graph
|
#Ressource for graph
|
||||||
wget -q https://raw.githubusercontent.com/flot/flot/958e5fd43c6dff4bab3e1fd5cb6109df5c1e8003/jquery.flot.js -O ./static/js/jquery.flot.js
|
wget -q https://raw.githubusercontent.com/flot/flot/958e5fd43c6dff4bab3e1fd5cb6109df5c1e8003/jquery.flot.js -O ./static/js/jquery.flot.js
|
||||||
|
@ -88,7 +88,6 @@ wget -q https://raw.githubusercontent.com/flot/flot/958e5fd43c6dff4bab3e1fd5cb61
|
||||||
|
|
||||||
#Ressources for sparkline and canvasJS and slider
|
#Ressources for sparkline and canvasJS and slider
|
||||||
wget -q http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js
|
wget -q http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js
|
||||||
mkdir temp
|
|
||||||
wget -q http://canvasjs.com/fdm/chart/ -O temp/canvasjs.zip
|
wget -q http://canvasjs.com/fdm/chart/ -O temp/canvasjs.zip
|
||||||
unzip -qq temp/canvasjs.zip -d temp/
|
unzip -qq temp/canvasjs.zip -d temp/
|
||||||
mv temp/jquery.canvasjs.min.js ./static/js/jquery.canvasjs.min.js
|
mv temp/jquery.canvasjs.min.js ./static/js/jquery.canvasjs.min.js
|
||||||
|
@ -97,6 +96,8 @@ wget -q https://jqueryui.com/resources/download/jquery-ui-1.12.1.zip -O temp/jqu
|
||||||
unzip -qq temp/jquery-ui.zip -d temp/
|
unzip -qq temp/jquery-ui.zip -d temp/
|
||||||
mv temp/jquery-ui-1.12.1/jquery-ui.min.js ./static/js/jquery-ui.min.js
|
mv temp/jquery-ui-1.12.1/jquery-ui.min.js ./static/js/jquery-ui.min.js
|
||||||
mv temp/jquery-ui-1.12.1/jquery-ui.min.css ./static/css/jquery-ui.min.css
|
mv temp/jquery-ui-1.12.1/jquery-ui.min.css ./static/css/jquery-ui.min.css
|
||||||
|
|
||||||
|
|
||||||
rm -rf temp
|
rm -rf temp
|
||||||
|
|
||||||
mkdir -p ./static/image
|
mkdir -p ./static/image
|
||||||
|
@ -112,11 +113,11 @@ then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#Update MISP Taxonomies and Galaxies
|
#Update MISP Taxonomies and Galaxies
|
||||||
python3 -m pip install git+https://github.com/MISP/PyTaxonomies --upgrade
|
pip3 install git+https://github.com/MISP/PyTaxonomies --upgrade
|
||||||
python3 -m pip install git+https://github.com/MISP/PyMISPGalaxies --upgrade
|
pip3 install git+https://github.com/MISP/PyMISPGalaxies --upgrade
|
||||||
|
|
||||||
#Update PyMISP
|
#Update PyMISP
|
||||||
python3 -m pip install git+https://github.com/MISP/PyMISP --upgrade
|
pip3 install git+https://github.com/MISP/PyMISP --upgrade
|
||||||
|
|
||||||
#Update the Hive
|
#Update the Hive
|
||||||
python3 -m pip install thehive4py --upgrade
|
pip3 install thehive4py --upgrade
|
||||||
|
|
Loading…
Reference in a new issue