mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 00:28:22 +00:00
chg: [modules + correlation] migrate Cve,Iban,Language + fix correlation graph
This commit is contained in:
parent
3b07d88709
commit
eeff786ea5
18 changed files with 178 additions and 243 deletions
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
from pubsublogger import publisher
|
||||
from Helper import Process
|
||||
import datetime
|
||||
import time
|
||||
|
||||
if __name__ == "__main__":
|
||||
publisher.port = 6380
|
||||
publisher.channel = "Script"
|
||||
|
||||
config_section = 'DumpValidOnion'
|
||||
dump_file = 'dump.out'
|
||||
|
||||
p = Process(config_section)
|
||||
|
||||
# FUNCTIONS #
|
||||
publisher.info("Script subscribed to channel ValidOnion")
|
||||
|
||||
while True:
|
||||
message = p.get_from_set()
|
||||
if message is not None:
|
||||
f = open(dump_file, 'a')
|
||||
while message is not None:
|
||||
print(message)
|
||||
date = datetime.datetime.now()
|
||||
if message is not None:
|
||||
f.write(date.isoformat() + ' ' + message + '\n')
|
||||
else:
|
||||
break
|
||||
message = p.get_from_set()
|
||||
f.close()
|
||||
else:
|
||||
time.sleep(20)
|
|
@ -211,11 +211,17 @@ function launching_scripts {
|
|||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "CreditCards" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./CreditCards.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Cve" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Cve.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Decoder" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Decoder.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Duplicates" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Duplicates.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Iban" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Iban.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Keys" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Keys.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x"
|
||||
screen -S "Script_AIL" -X screen -t "Languages" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Languages.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Mail" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Mail.py; read x"
|
||||
sleep 0.1
|
||||
|
@ -223,6 +229,8 @@ function launching_scripts {
|
|||
# sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "ModuleStats" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./ModuleStats.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Onion" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Onion.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x"
|
||||
sleep 0.1
|
||||
|
||||
|
@ -267,22 +275,14 @@ function launching_scripts {
|
|||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Mixer" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Mixer.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Duplicates" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Duplicates.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "BankAccount" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./BankAccount.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "PgpDump" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./PgpDump.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Cryptocurrency" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Cryptocurrencies.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Tools" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tools.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Cve" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Cve.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "Languages" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Languages.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Script_AIL" -X screen -t "IPAddress" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./IPAddress.py; read x"
|
||||
|
||||
#screen -S "Script_AIL" -X screen -t "Release" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Release.py; read x"
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
from packages import Item
|
||||
from lib import Domain
|
||||
|
||||
from pubsublogger import publisher
|
||||
from Helper import Process
|
||||
|
||||
if __name__ == '__main__':
|
||||
publisher.port = 6380
|
||||
publisher.channel = 'Script'
|
||||
# Section name in bin/packages/modules.cfg
|
||||
config_section = 'Languages'
|
||||
# Setup the I/O queues
|
||||
p = Process(config_section)
|
||||
|
||||
while True:
|
||||
message = p.get_from_set()
|
||||
if message is None:
|
||||
publisher.debug("{} queue is empty, waiting".format(config_section))
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
item_id = Item.get_item_id(message)
|
||||
if Item.is_crawled(item_id):
|
||||
domain = Item.get_item_domain(item_id)
|
||||
Domain.add_domain_languages_by_item_id(domain, item_id)
|
|
@ -1,93 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is part of AIL framework - Analysis Information Leak framework
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Copyright (c) 2014 Alexandre Dulaunoy - a@foo.be
|
||||
|
||||
import argparse
|
||||
import gzip
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
def readdoc(path=None):
|
||||
if path is None:
|
||||
return False
|
||||
f = gzip.open(path, 'r')
|
||||
return f.read()
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
# Indexer configuration - index dir and schema setup
|
||||
indexpath = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Indexer", "path"))
|
||||
indexertype = config_loader.get_config_str("Indexer", "type")
|
||||
|
||||
argParser = argparse.ArgumentParser(description='Fulltext search for AIL')
|
||||
argParser.add_argument('-q', action='append', help='query to lookup (one or more)')
|
||||
argParser.add_argument('-n', action='store_true', default=False, help='return numbers of indexed documents')
|
||||
argParser.add_argument('-t', action='store_true', default=False, help='dump top 500 terms')
|
||||
argParser.add_argument('-l', action='store_true', default=False, help='dump all terms encountered in indexed documents')
|
||||
argParser.add_argument('-f', action='store_true', default=False, help='dump each matching document')
|
||||
argParser.add_argument('-v', action='store_true', default=False, help='Include filepath')
|
||||
argParser.add_argument('-s', action='append', help='search similar documents')
|
||||
|
||||
args = argParser.parse_args()
|
||||
|
||||
from whoosh import index
|
||||
from whoosh.fields import Schema, TEXT, ID
|
||||
|
||||
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)
|
||||
|
||||
ix = index.open_dir(indexpath)
|
||||
|
||||
from whoosh.qparser import QueryParser
|
||||
|
||||
if args.n:
|
||||
print(ix.doc_count_all())
|
||||
exit(0)
|
||||
|
||||
if args.l:
|
||||
xr = ix.searcher().reader()
|
||||
for x in xr.lexicon("content"):
|
||||
print (x)
|
||||
exit(0)
|
||||
|
||||
if args.t:
|
||||
xr = ix.searcher().reader()
|
||||
for x in xr.most_frequent_terms("content", number=500, prefix=''):
|
||||
print (x)
|
||||
exit(0)
|
||||
|
||||
if args.s:
|
||||
# By default, the index is not storing the vector of the document (Whoosh
|
||||
# document schema). It won't work if you don't change the schema of the
|
||||
# index for the content. It depends of your storage strategy.
|
||||
docnum = ix.searcher().document_number(path=args.s)
|
||||
r = ix.searcher().more_like(docnum, "content")
|
||||
for hit in r:
|
||||
print(hit["path"])
|
||||
exit(0)
|
||||
|
||||
if args.q is None:
|
||||
argParser.print_help()
|
||||
exit(1)
|
||||
|
||||
with ix.searcher() as searcher:
|
||||
query = QueryParser("content", ix.schema).parse(" ".join(args.q))
|
||||
results = searcher.search(query, limit=None)
|
||||
for x in results:
|
||||
if args.f:
|
||||
if args.v:
|
||||
print (x.items()[0][1])
|
||||
print (readdoc(path=x.items()[0][1]))
|
||||
else:
|
||||
print (x.items()[0][1])
|
||||
print
|
|
@ -99,7 +99,6 @@ def is_obj_correlated(obj_type, subtype, obj_id, obj2_type, subtype2, obj2_id):
|
|||
return r_metadata.sismember(f'correlation:obj:{obj_type}:{subtype}:{obj2_type}:{obj_id}', '{subtype2}:{obj2_id}')
|
||||
|
||||
def add_obj_correlation(obj1_type, subtype1, obj1_id, obj2_type, subtype2, obj2_id):
|
||||
print(obj1_type, subtype1, obj1_id, obj2_type, subtype2, obj2_id)
|
||||
if subtype1 is None:
|
||||
subtype1 = ''
|
||||
if subtype2 is None:
|
||||
|
@ -132,7 +131,7 @@ def get_correlations_graph_nodes_links(obj_type, subtype, obj_id, filter_types=[
|
|||
|
||||
obj_str_id = get_obj_str_id(obj_type, subtype, obj_id)
|
||||
|
||||
_get_correlations_graph_node(links, nodes, obj_type, subtype, obj_id, level, max_nodes, filter_types=[], previous_str_obj='')
|
||||
_get_correlations_graph_node(links, nodes, obj_type, subtype, obj_id, level, max_nodes, filter_types=filter_types, previous_str_obj='')
|
||||
return obj_str_id, nodes, links
|
||||
|
||||
|
||||
|
@ -140,8 +139,8 @@ def _get_correlations_graph_node(links, nodes, obj_type, subtype, obj_id, level,
|
|||
obj_str_id = get_obj_str_id(obj_type, subtype, obj_id)
|
||||
nodes.add(obj_str_id)
|
||||
|
||||
obj_correlations = get_correlations(obj_type, subtype, obj_id, filter_types=[])
|
||||
print(obj_correlations)
|
||||
obj_correlations = get_correlations(obj_type, subtype, obj_id, filter_types=filter_types)
|
||||
#print(obj_correlations)
|
||||
for correl_type in obj_correlations:
|
||||
for str_obj in obj_correlations[correl_type]:
|
||||
subtype2, obj2_id = str_obj.split(':', 1)
|
||||
|
|
|
@ -16,13 +16,27 @@ from lib.item_basic import is_crawled, get_item_domain
|
|||
|
||||
from packages import Date
|
||||
|
||||
sys.path.append('../../configs/keys')
|
||||
try:
|
||||
from virusTotalKEYS import vt_key
|
||||
if vt_key != '':
|
||||
VT_TOKEN = vt_key
|
||||
VT_ENABLED = True
|
||||
#print('VT submission is enabled')
|
||||
else:
|
||||
VT_ENABLED = False
|
||||
#print('VT submission is disabled')
|
||||
except:
|
||||
VT_TOKEN = None
|
||||
VT_ENABLED = False
|
||||
#print('VT submission is disabled')
|
||||
|
||||
config_loader = ConfigLoader()
|
||||
r_metadata = config_loader.get_db_conn("Kvrocks_Objects")
|
||||
|
||||
r_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
||||
HASH_DIR = config_loader.get_config_str('Directories', 'hash')
|
||||
baseurl = config_loader.get_config_str("Notifications", "ail_domain")
|
||||
VT_TOKEN = 'f1a6281c8a533172a45d901435452f67f5e61fd06a83dcc058f3f7b4aab66f5b'
|
||||
config_loader = None
|
||||
|
||||
|
||||
|
|
|
@ -44,6 +44,9 @@ class Domain(AbstractObject):
|
|||
else:
|
||||
return 'regular'
|
||||
|
||||
def exists(self):
|
||||
return r_onion.exists(f'{self.domain_type}_metadata:{self.id}')
|
||||
|
||||
def get_first_seen(self, r_int=False, separator=True):
|
||||
first_seen = r_onion.hget(f'{self.domain_type}_metadata:{self.id}', 'first_seen')
|
||||
if first_seen:
|
||||
|
@ -160,7 +163,7 @@ class Domain(AbstractObject):
|
|||
meta['type'] = self.domain_type
|
||||
meta['first_seen'] = self.get_first_seen()
|
||||
meta['last_check'] = self.get_last_check()
|
||||
meta['tags'] = self.get_tags()
|
||||
meta['tags'] = self.get_tags(r_list=True)
|
||||
meta['ports'] = self.get_ports()
|
||||
meta['status'] = self.is_up(ports=meta['ports'])
|
||||
|
||||
|
|
|
@ -52,6 +52,9 @@ class Item(AbstractObject):
|
|||
def __init__(self, id):
|
||||
super(Item, self).__init__('item', id)
|
||||
|
||||
def exists(self):
|
||||
return os.path.isfile(self.get_filename())
|
||||
|
||||
def get_date(self, separator=False):
|
||||
"""
|
||||
Returns Item date
|
||||
|
@ -250,7 +253,7 @@ class Item(AbstractObject):
|
|||
meta['id'] = self.id
|
||||
meta['date'] = self.get_date(separator=True) ############################ # TODO:
|
||||
meta['source'] = self.get_source()
|
||||
meta['tags'] = self.get_tags()
|
||||
meta['tags'] = self.get_tags(r_list=True)
|
||||
# optional meta fields
|
||||
if 'content' in options:
|
||||
meta['content'] = self.get_content()
|
||||
|
@ -296,6 +299,22 @@ class Item(AbstractObject):
|
|||
nb_line += 1
|
||||
return {'nb': nb_line, 'max_length': max_length}
|
||||
|
||||
def get_languages(self, min_len=600, num_langs=3, min_proportion=0.2, min_probability=0.7):
|
||||
all_languages = []
|
||||
## CLEAN CONTENT ##
|
||||
content = self.get_html2text_content(ignore_links=True)
|
||||
content = remove_all_urls_from_content(self.id, item_content=content) ##########################################
|
||||
# REMOVE USELESS SPACE
|
||||
content = ' '.join(content.split())
|
||||
#- CLEAN CONTENT -#
|
||||
#print(content)
|
||||
#print(len(content))
|
||||
if len(content) >= min_len: # # TODO: # FIXME: check num langs limit
|
||||
for lang in cld3.get_frequent_languages(content, num_langs=num_langs):
|
||||
if lang.proportion >= min_proportion and lang.probability >= min_probability and lang.is_reliable:
|
||||
all_languages.append(lang)
|
||||
return all_languages
|
||||
|
||||
############################################################################
|
||||
############################################################################
|
||||
|
||||
|
|
|
@ -36,6 +36,9 @@ class Screenshot(AbstractObject):
|
|||
# # TODO:
|
||||
pass
|
||||
|
||||
def exists(self):
|
||||
return os.path.isfile(self.get_filepath())
|
||||
|
||||
def get_link(self, flask_context=False):
|
||||
if flask_context:
|
||||
url = url_for('correlation.show_correlation', object_type=self.type, correlation_id=self.id)
|
||||
|
@ -76,22 +79,12 @@ class Screenshot(AbstractObject):
|
|||
def get_meta(self, options=set()):
|
||||
meta = {}
|
||||
meta['id'] = self.id
|
||||
metadata_dict['img'] = get_screenshot_rel_path(sha256_string) ######### # TODO: Rename ME ??????
|
||||
meta['tags'] = self.get_tags()
|
||||
meta['img'] = get_screenshot_rel_path(self.id) ######### # TODO: Rename ME ??????
|
||||
meta['tags'] = self.get_tags(r_list=True)
|
||||
# TODO: ADD IN ABSTRACT CLASS
|
||||
#meta['is_tags_safe'] = Tag.is_tags_safe(metadata_dict['tags']) ################## # TODO: ADD IN ABSZTRACT CLASS
|
||||
return meta
|
||||
|
||||
############################################################################
|
||||
############################################################################
|
||||
############################################################################
|
||||
|
||||
def exist_correlation(self):
|
||||
pass
|
||||
|
||||
############################################################################
|
||||
############################################################################
|
||||
|
||||
def get_screenshot_dir():
|
||||
return SCREENSHOT_FOLDER
|
||||
|
||||
|
|
|
@ -65,10 +65,10 @@ class AbstractObject(ABC):
|
|||
return dict_meta
|
||||
|
||||
## Tags ##
|
||||
def get_tags(self, r_set=False):
|
||||
def get_tags(self, r_list=False):
|
||||
tags = Tag.get_object_tags(self.type, self.id, self.get_subtype(r_str=True))
|
||||
if r_set:
|
||||
tags = set(tags)
|
||||
if r_list:
|
||||
tags = list(tags)
|
||||
return tags
|
||||
|
||||
def get_duplicates(self):
|
||||
|
@ -136,6 +136,13 @@ class AbstractObject(ABC):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self):
|
||||
"""
|
||||
Exists Object
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_meta(self):
|
||||
"""
|
||||
|
|
|
@ -44,6 +44,9 @@ class AbstractSubtypeObject(AbstractObject):
|
|||
self.type = obj_type
|
||||
self.subtype = subtype
|
||||
|
||||
def exists(self):
|
||||
return r_metadata.exists(f'{self.type}_metadata_{self.subtype}:{self.id}')
|
||||
|
||||
def get_first_seen(self, r_int=False):
|
||||
first_seen = r_metadata.hget(f'{self.type}_metadata_{self.subtype}:{self.id}', 'first_seen')
|
||||
if r_int:
|
||||
|
|
|
@ -37,6 +37,16 @@ class AILObjects(object): ## ??????????????????????
|
|||
def is_valid_object_type(obj_type):
|
||||
return obj_type in get_all_objects()
|
||||
|
||||
def sanitize_objs_types(objs):
|
||||
l_types = []
|
||||
print('sanitize')
|
||||
print(objs)
|
||||
print(get_all_objects())
|
||||
for obj in objs:
|
||||
if is_valid_object_type(obj):
|
||||
l_types.append(obj)
|
||||
return l_types
|
||||
|
||||
def get_object(obj_type, subtype, id):
|
||||
if obj_type == 'item':
|
||||
return Item(id)
|
||||
|
@ -53,6 +63,10 @@ def get_object(obj_type, subtype, id):
|
|||
elif obj_type == 'username':
|
||||
return Username(id, subtype)
|
||||
|
||||
def exists_obj(obj_type, subtype, id):
|
||||
object = get_object(obj_type, subtype, id)
|
||||
return object.exists()
|
||||
|
||||
def get_object_link(obj_type, subtype, id, flask_context=False):
|
||||
object = get_object(obj_type, subtype, id)
|
||||
return object.get_link(flask_context=flask_context)
|
||||
|
|
|
@ -13,14 +13,16 @@ It apply CVE regexes on paste content and warn if a reference to a CVE is spotte
|
|||
##################################
|
||||
# Import External packages
|
||||
##################################
|
||||
import time
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.append(os.environ['AIL_BIN'])
|
||||
##################################
|
||||
# Import Project packages
|
||||
##################################
|
||||
from modules.abstract_module import AbstractModule
|
||||
from packages import Paste
|
||||
from lib.objects.Items import Item
|
||||
|
||||
|
||||
class Cve(AbstractModule):
|
||||
|
@ -32,7 +34,7 @@ class Cve(AbstractModule):
|
|||
super(Cve, self).__init__()
|
||||
|
||||
# regex to find CVE
|
||||
self.reg_cve = re.compile(r'(CVE-)[1-2]\d{1,4}-\d{1,5}')
|
||||
self.reg_cve = re.compile(r'CVE-[1-2]\d{1,4}-\d{1,5}')
|
||||
|
||||
# Waiting time in secondes between to message proccessed
|
||||
self.pending_seconds = 1
|
||||
|
@ -43,28 +45,23 @@ class Cve(AbstractModule):
|
|||
|
||||
def compute(self, message):
|
||||
|
||||
filepath, count = message.split()
|
||||
paste = Paste.Paste(filepath)
|
||||
content = paste.get_p_content()
|
||||
item_id, count = message.split()
|
||||
item = Item(item_id)
|
||||
item_id = item.get_id()
|
||||
|
||||
# list of the regex results in the Paste, may be null
|
||||
results = set(self.reg_cve.findall(content))
|
||||
|
||||
# if the list is positive, we consider the Paste may contain a list of cve
|
||||
if len(results) > 0:
|
||||
warning = f'{paste.p_name} contains CVEs'
|
||||
cves = self.regex_findall(self.reg_cve, item_id, item.get_content())
|
||||
if cves:
|
||||
warning = f'{item_id} contains CVEs {cves}'
|
||||
print(warning)
|
||||
self.redis_logger.warning(warning)
|
||||
|
||||
msg = f'infoleak:automatic-detection="cve";{filepath}'
|
||||
msg = f'infoleak:automatic-detection="cve";{item_id}'
|
||||
# Send to Tags Queue
|
||||
self.send_message_to_queue(msg, 'Tags')
|
||||
# Send to Duplicate Queue
|
||||
self.send_message_to_queue(filepath, 'Duplicate')
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
module = Cve()
|
||||
module.run()
|
||||
|
36
bin/modules/Languages.py
Executable file
36
bin/modules/Languages.py
Executable file
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.environ['AIL_BIN'])
|
||||
##################################
|
||||
# Import Project packages
|
||||
##################################
|
||||
from modules.abstract_module import AbstractModule
|
||||
from lib.objects.Domains import Domain
|
||||
from lib.objects.Items import Item
|
||||
#from lib.ConfigLoader import ConfigLoader
|
||||
|
||||
class Languages(AbstractModule):
|
||||
"""
|
||||
Languages module for AIL framework
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(Languages, self).__init__()
|
||||
|
||||
# Send module state to logs
|
||||
self.redis_logger.info(f'Module {self.module_name} initialized')
|
||||
|
||||
def compute(self, message):
|
||||
item = Item(message)
|
||||
if item.is_crawled():
|
||||
domain = Domain(item.get_domain())
|
||||
for lang in item.get_languages(min_probability=0.8):
|
||||
domain.add_language(lang.language)
|
||||
|
||||
if __name__ == '__main__':
|
||||
module = Languages()
|
||||
module.run()
|
|
@ -26,7 +26,6 @@ sys.path.append(os.environ['AIL_BIN'])
|
|||
##################################
|
||||
from lib.objects import ail_objects
|
||||
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
|
@ -178,7 +177,7 @@ def show_correlation():
|
|||
correlation_objects.append('domain')
|
||||
correl_option = request.form.get('PasteCheck')
|
||||
if correl_option:
|
||||
correlation_objects.append('paste')
|
||||
correlation_objects.append('item')
|
||||
|
||||
# list as params
|
||||
correlation_names = ",".join(correlation_names)
|
||||
|
@ -198,8 +197,8 @@ def show_correlation():
|
|||
|
||||
expand_card = request.args.get('expand_card')
|
||||
|
||||
correlation_names = sanitise_correlation_names(request.args.get('correlation_names'))
|
||||
correlation_objects = sanitise_correlation_objects(request.args.get('correlation_objects'))
|
||||
correlation_names = ail_objects.sanitize_objs_types(request.args.get('correlation_names', '').split(','))
|
||||
correlation_objects = ail_objects.sanitize_objs_types(request.args.get('correlation_objects', '').split(','))
|
||||
|
||||
# # TODO: remove me, rename screenshot to image
|
||||
if object_type == 'image':
|
||||
|
@ -244,11 +243,11 @@ def get_description():
|
|||
|
||||
# check if correlation_id exist
|
||||
# # TODO: return error json
|
||||
if not Correlate_object.exist_object(object_type, correlation_id, type_id=type_id):
|
||||
if not ail_objects.exists_obj(object_type, type_id, correlation_id):
|
||||
return Response(json.dumps({"status": "error", "reason": "404 Not Found"}, indent=2, sort_keys=True), mimetype='application/json'), 404
|
||||
# oject exist
|
||||
else:
|
||||
res = Correlate_object.get_object_metadata(object_type, correlation_id, type_id=type_id)
|
||||
res = ail_objects.get_object_meta(object_type, type_id, correlation_id, flask_context=True)
|
||||
return jsonify(res)
|
||||
|
||||
@correlation.route('/correlation/graph_node_json')
|
||||
|
@ -260,8 +259,8 @@ def graph_node_json():
|
|||
obj_type = request.args.get('object_type') #######################
|
||||
max_nodes = sanitise_nb_max_nodes(request.args.get('max_nodes'))
|
||||
|
||||
correlation_names = sanitise_correlation_names(request.args.get('correlation_names'))
|
||||
correlation_objects = sanitise_correlation_objects(request.args.get('correlation_objects'))
|
||||
correlation_names = ail_objects.sanitize_objs_types(request.args.get('correlation_names', '').split(','))
|
||||
correlation_objects = ail_objects.sanitize_objs_types(request.args.get('correlation_objects', '').split(','))
|
||||
|
||||
# # TODO: remove me, rename screenshot
|
||||
if obj_type == 'image':
|
||||
|
|
|
@ -19,19 +19,27 @@ import Flask_config
|
|||
# Import Role_Manager
|
||||
from Role_Manager import login_admin, login_analyst, login_read_only
|
||||
|
||||
sys.path.append(os.environ['AIL_BIN'])
|
||||
##################################
|
||||
# Import Project packages
|
||||
##################################
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
||||
import Tag
|
||||
|
||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
|
||||
import crawlers
|
||||
import Domain
|
||||
import Language
|
||||
|
||||
sys.path.append(os.environ['AIL_BIN'])
|
||||
##################################
|
||||
# Import Project packages
|
||||
##################################
|
||||
from lib import crawlers
|
||||
from lib import Language
|
||||
from lib.objects import Domains
|
||||
|
||||
from lib import Domain # # # # # # # # # # # # # # # # TODO:
|
||||
|
||||
#import Config_DB
|
||||
|
||||
r_cache = Flask_config.r_cache
|
||||
r_serv_db = Flask_config.r_serv_db
|
||||
r_serv_tags = Flask_config.r_serv_tags
|
||||
bootstrap_label = Flask_config.bootstrap_label
|
||||
|
||||
# ============ BLUEPRINT ============
|
||||
|
@ -145,19 +153,21 @@ def showDomain():
|
|||
if res:
|
||||
return res
|
||||
|
||||
domain = Domain.Domain(domain_name, port=port)
|
||||
domain = Domains.Domain(domain_name)
|
||||
dom = Domain.Domain(domain_name, port=port)
|
||||
|
||||
dict_domain = domain.get_domain_metadata()
|
||||
dict_domain = dom.get_domain_metadata()
|
||||
dict_domain['domain'] = domain_name
|
||||
if domain.domain_was_up():
|
||||
dict_domain = {**dict_domain, **domain.get_domain_correlation()}
|
||||
dict_domain['correlation_nb'] = Domain.get_domain_total_nb_correlation(dict_domain)
|
||||
dict_domain['father'] = domain.get_domain_father()
|
||||
dict_domain['languages'] = Language.get_languages_from_iso(domain.get_domain_languages(), sort=True)
|
||||
dict_domain['tags'] = domain.get_domain_tags()
|
||||
if dom.domain_was_up():
|
||||
dict_domain = {**dict_domain, **domain.get_correlations()}
|
||||
print(dict_domain)
|
||||
dict_domain['correlation_nb'] = len(dict_domain['decoded']) + len(dict_domain['username']) + len(dict_domain['pgp']) + len(dict_domain['cryptocurrency']) + len(dict_domain['screenshot'])
|
||||
dict_domain['father'] = dom.get_domain_father()
|
||||
dict_domain['languages'] = Language.get_languages_from_iso(dom.get_domain_languages(), sort=True)
|
||||
dict_domain['tags'] = dom.get_domain_tags()
|
||||
dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags'])
|
||||
dict_domain['history'] = domain.get_domain_history_with_status()
|
||||
dict_domain['crawler_history'] = domain.get_domain_items_crawled(items_link=True, epoch=epoch, item_screenshot=True, item_tag=True) # # TODO: handle multiple port
|
||||
dict_domain['history'] = dom.get_domain_history_with_status()
|
||||
dict_domain['crawler_history'] = dom.get_domain_items_crawled(items_link=True, epoch=epoch, item_screenshot=True, item_tag=True) # # TODO: handle multiple port
|
||||
if dict_domain['crawler_history'].get('items', []):
|
||||
dict_domain['crawler_history']['random_item'] = random.choice(dict_domain['crawler_history']['items'])
|
||||
|
||||
|
|
|
@ -179,8 +179,8 @@
|
|||
<label class="form-check-label" for="DomainCheck">Domain</label>
|
||||
</div>
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" value="True" id="PasteCheck" name="PasteCheck" {%if "paste" in dict_object["correlation_objects"]%}checked{%endif%}>
|
||||
<label class="form-check-label" for="PasteCheck">Paste</label>
|
||||
<input class="form-check-input" type="checkbox" value="True" id="PasteCheck" name="PasteCheck" {%if "item" in dict_object["correlation_objects"]%}checked{%endif%}>
|
||||
<label class="form-check-label" for="PasteCheck">Item</label>
|
||||
</div>
|
||||
|
||||
</li>
|
||||
|
@ -495,7 +495,7 @@ if (d.popover) {
|
|||
desc = desc + "fa-times-circle\"></i>DOWN"
|
||||
}
|
||||
desc = desc + "</div></dd>"
|
||||
} else if (key!="tags" && key!="id" && key!="img") {
|
||||
} else if (key!="tags" && key!="id" && key!="img" && key!="icon" && key!="link" && key!="type") {
|
||||
desc = desc + "<dt class=\"col-sm-3 px-0\">" + sanitize_text(key) + "</dt><dd class=\"col-sm-9 px-0\">" + sanitize_text(data[key]) + "</dd>"
|
||||
}
|
||||
});
|
||||
|
|
|
@ -206,8 +206,7 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if 'pgp' in dict_domain%}
|
||||
{% if dict_domain['pgp']%}
|
||||
<div id="accordionpgp" class="mt-3">
|
||||
<div class="card">
|
||||
<div class="card-header" id="headingPgp">
|
||||
|
@ -215,7 +214,7 @@
|
|||
<div class="col-11">
|
||||
<div class="mt-2">
|
||||
<i class="fas fa-key"></i> PGP Dumps
|
||||
<div class="badge badge-warning">{{dict_domain['pgp']['nb']}}</div>
|
||||
<div class="badge badge-warning">{{dict_domain['pgp']|length}}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-1">
|
||||
|
@ -263,8 +262,9 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
|
||||
{{dict_domain['cryptocurrency']}}
|
||||
|
||||
{% if 'cryptocurrency' in dict_domain%}
|
||||
{% if dict_domain['cryptocurrency']%}
|
||||
<div id="accordioncurrency" class="mt-3">
|
||||
<div class="card">
|
||||
<div class="card-header" id="headingcurrency">
|
||||
|
@ -272,7 +272,7 @@
|
|||
<div class="col-11">
|
||||
<div class="mt-2">
|
||||
<i class="fas fa-coins"></i> Cryptocurrencies
|
||||
<div class="badge badge-warning">{{dict_domain['cryptocurrency']['nb']}}</div>
|
||||
<div class="badge badge-warning">{{dict_domain['cryptocurrency']|length}}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-1">
|
||||
|
@ -293,6 +293,7 @@
|
|||
</thead>
|
||||
<tbody>
|
||||
{% for dict_key in dict_domain['cryptocurrency']%}
|
||||
|
||||
{% if dict_key != "nb" %}
|
||||
{% if dict_key=="bitcoin" %}
|
||||
{% set var_icon = "fab fa-bitcoin" %}
|
||||
|
@ -321,7 +322,7 @@
|
|||
{% endif %}
|
||||
|
||||
|
||||
{% if 'screenshot' in dict_domain%}
|
||||
{% if dict_domain['screenshot']%}
|
||||
<div id="accordionscreenshot" class="mt-3">
|
||||
<div class="card">
|
||||
<div class="card-header" id="headingScreenshot">
|
||||
|
|
Loading…
Reference in a new issue