diff --git a/bin/DB_KVROCKS_MIGRATION.py b/bin/DB_KVROCKS_MIGRATION.py index 19f152fb..0d87b624 100755 --- a/bin/DB_KVROCKS_MIGRATION.py +++ b/bin/DB_KVROCKS_MIGRATION.py @@ -252,6 +252,8 @@ def trackers_migration(): for id in old_Tracker.get_retro_hunt_items_by_daterange(task_uuid, meta['date_from'], meta['date_to']): Tracker.save_retro_hunt_match(task_uuid, id) + Tracker._fix_db_custom_tags() + def investigations_migration(): print('INVESTIGATION MIGRATION...') diff --git a/bin/Helper.py b/bin/Helper.py index b23132f7..65a260d1 100755 --- a/bin/Helper.py +++ b/bin/Helper.py @@ -140,12 +140,6 @@ class Process(object): db=self.config.get('RedisPubSub', 'db'), decode_responses=True) - self.serv_statistics = redis.StrictRedis( - host=self.config.get('ARDB_Statistics', 'host'), - port=self.config.get('ARDB_Statistics', 'port'), - db=self.config.get('ARDB_Statistics', 'db'), - decode_responses=True) - self.moduleNum = os.getpid() def populate_set_in(self): @@ -181,9 +175,9 @@ class Process(object): try: if '.gz' in message: path = message.split(".")[-2].split("/")[-1] - #find start of path with AIL_HOME + # find start of path with AIL_HOME index_s = message.find(os.environ['AIL_HOME']) - #Stop when .gz + # Stop when .gz index_e = message.find(".gz")+3 if(index_s == -1): complete_path = message[0:index_e] @@ -200,7 +194,6 @@ class Process(object): self.r_temp.sadd("MODULE_TYPE_"+self.subscriber_name, str(self.moduleNum)) curr_date = datetime.date.today() - self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_in:'+self.subscriber_name, 1) return message except: @@ -238,6 +231,3 @@ class Process(object): continue self.pubsub.publish(message) - def incr_module_timeout_statistic(self): - curr_date = datetime.date.today() - self.serv_statistics.hincrby(curr_date.strftime("%Y%m%d"),'paste_by_modules_timeout:'+self.subscriber_name, 1) diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 476be2f6..480d203a 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -237,6 +237,8 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Telegram" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Telegram.py; read x" sleep 0.1 + screen -S "Script_AIL" -X screen -t "Tools" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Tools.py; read x" + sleep 0.1 screen -S "Script_AIL" -X screen -t "Hosts" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Hosts.py; read x" sleep 0.1 @@ -283,8 +285,6 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "Mixer" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Mixer.py; read x" sleep 0.1 - screen -S "Script_AIL" -X screen -t "Tools" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tools.py; read x" - sleep 0.1 screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x" sleep 0.1 screen -S "Script_AIL" -X screen -t "IPAddress" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./IPAddress.py; read x" diff --git a/bin/Release.py b/bin/Release.py deleted file mode 100755 index 6a49089f..00000000 --- a/bin/Release.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python3 -# -*-coding:UTF-8 -* -import time -from lib.objects.Items import Item -from pubsublogger import publisher -from Helper import Process -import re - -import signal - -class TimeoutException(Exception): - pass - -def timeout_handler(signum, frame): - raise TimeoutException - -signal.signal(signal.SIGALRM, timeout_handler) - -''' -This module takes its input from the global module. -It applies some regex and publish matched content -''' - -if __name__ == "__main__": - publisher.port = 6380 - publisher.channel = "Script" - config_section = "Release" - p = Process(config_section) - max_execution_time = p.config.getint("Curve", "max_execution_time") - publisher.info("Release scripts to find release names") - - movie = "[a-zA-Z0-9.]+\.[0-9]{4}.[a-zA-Z0-9.]+\-[a-zA-Z]+" - tv = "[a-zA-Z0-9.]+\.S[0-9]{2}E[0-9]{2}.[a-zA-Z0-9.]+\.[a-zA-Z0-9.]+\-[a-zA-Z0-9]+" - xxx = "[a-zA-Z0-9._]+.XXX.[a-zA-Z0-9.]+\-[a-zA-Z0-9]+" - - regexs = [movie, tv, xxx] - - regex = '|'.join(regexs) - while True: - signal.alarm(max_execution_time) - filepath = p.get_from_set() - if filepath is None: - publisher.debug("Script Release is Idling 10s") - print('Sleeping') - time.sleep(10) - continue - - item = Item(filepath) - content = item.get_content() - - #signal.alarm(max_execution_time) - try: - releases = set(re.findall(regex, content)) - if len(releases) == 0: - continue - - to_print = f'Release;{item.get_source()};{item.get_date()};{item.get_basename()};{len(releases)} releases;{item.get_id()}' - print(to_print) - if len(releases) > 30: - publisher.warning(to_print) - else: - publisher.info(to_print) - - except TimeoutException: - p.incr_module_timeout_statistic() - print(f"{item.get_id()} processing timeout") - continue - else: - signal.alarm(0) diff --git a/bin/SourceCode.py b/bin/SourceCode.py deleted file mode 100644 index 8ad1f269..00000000 --- a/bin/SourceCode.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python3 -# -*-coding:UTF-8 -* -import time -from lib.objects.Items import Item -from pubsublogger import publisher -from Helper import Process -import re - -if __name__ == "__main__": - publisher.port = 6380 - publisher.channel = "Script" - config_section = "SourceCode" - p = Process(config_section) - publisher.info("Finding Source Code") - - critical = 0 # AS TO BE IMPORTANT, MIGHT BE REMOVED - - # RELEVANT LANGUAGES - shell = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+\:\~\$" - c = r"\#include\ \<[a-z\/]+.h\>" - php = r"\<\?php" - python = r"import\ [\w]+" - bash = r"#!\/[\w]*\/bash" - javascript = r"function\(\)" - ruby = r"require \ [\w]+" - adr = r"0x[a-f0-9]{2}" - - # asm = r"\"((?s).{1}x[0-9a-f]{2}){3,}" ISSUES WITH FINDALL, pattern like \x54\xaf\x23\.. - - languages = [shell, c, php, bash, python, javascript, bash, ruby, adr] - regex = '|'.join(languages) - print(regex) - - while True: - message = p.get_from_set() - if message is None: - publisher.debug("Script Source Code is Idling 10s") - print('Sleeping') - time.sleep(10) - continue - - filepath, count = message.split() - - item = Item(filepath) - content = item.get_content() - match_set = set(re.findall(regex, content)) - if len(match_set) == 0: - continue - - to_print = f'SourceCode;{item.get_source()};{item.get_date()};{item.get_basename()};{item.get_id()}' - - if len(match_set) > critical: - publisher.warning(to_print) - else: - publisher.info(to_print) diff --git a/bin/export/Export.py b/bin/export/Export.py index c76e641c..82c946e8 100755 --- a/bin/export/Export.py +++ b/bin/export/Export.py @@ -72,6 +72,25 @@ def is_hive_connected(): HIVE_CLIENT = get_hive_client() +def sanitize_threat_level_hive(threat_level): + try: + int(threat_level) + if 1 <= threat_level <= 3: + return threat_level + else: + return 2 + except: + return 2 + +def sanitize_tlp_hive(tlp): + try: + int(tlp) + if 0 <= tlp <= 3: + return tlp + else: + return 2 + except: + return 2 def create_thehive_alert(item_id, tag_trigger): item = Item(item_id) diff --git a/bin/helper/dump_crawler_history.py b/bin/helper/dump_crawler_history.py deleted file mode 100755 index 1ecbf0e3..00000000 --- a/bin/helper/dump_crawler_history.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python3 -# -*-coding:UTF-8 -* - -import os -import sys -import gzip -import datetime -import redis -import json -import time - -import shutil - -sys.path.append(os.environ['AIL_BIN']) -sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages')) -from HiddenServices import HiddenServices -from Helper import Process - -def substract_date(date_from, date_to): - date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) - date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) - delta = date_to - date_from # timedelta - l_date = [] - for i in range(delta.days + 1): - date = date_from + datetime.timedelta(i) - l_date.append( date.strftime('%Y%m%d') ) - return l_date - -config_section = 'Keys' -p = Process(config_section) - -r_serv_onion = redis.StrictRedis( - host=p.config.get("ARDB_Onion", "host"), - port=p.config.getint("ARDB_Onion", "port"), - db=p.config.getint("ARDB_Onion", "db"), - decode_responses=True) - -date_from = '20190614' -date_to = '20190615' -service_type = 'onion' -date_range = substract_date(date_from, date_to) - -dir_path = os.path.join(os.environ['AIL_HOME'], 'temp') - -domain_skipped = [] - -for date in date_range: - domains_up = list(r_serv_onion.smembers('{}_up:{}'.format(service_type, date))) - if domains_up: - save_path = os.path.join(dir_path, date[0:4], date[4:6], date[6:8]) - try: - os.makedirs(save_path) - except FileExistsError: - pass - for domain in domains_up: - print(domain) - h = HiddenServices(domain, 'onion') - item_core = h.get_domain_crawled_core_item() - if 'root_item' in item_core: - l_pastes = h.get_last_crawled_pastes(item_root=item_core['root_item']) - try: - res = h.create_domain_basic_archive(l_pastes) - filename = os.path.join(save_path, '{}'.format(domain)) - with open(filename, 'wb') as f: - shutil.copyfileobj(res, f) - print('done') - except Exception as e: - print('skipped') - domain_skipped.append(domain) - pass - -print() -print() -print('DOMAINS SKIPPED: ') -for domain in domain_skipped: - print(domain) diff --git a/bin/helper/reprocess_bitcoin.py b/bin/helper/reprocess_bitcoin.py deleted file mode 100755 index 5e609f1f..00000000 --- a/bin/helper/reprocess_bitcoin.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python3 -# -*-coding:UTF-8 -* - -import os -import sys -import gzip -import base64 -import uuid -import datetime -import base64 -import redis -import json -import time - -sys.path.append(os.environ['AIL_BIN']) -from Helper import Process - -def substract_date(date_from, date_to): - date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) - date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) - delta = date_to - date_from # timedelta - l_date = [] - for i in range(delta.days + 1): - date = date_from + datetime.timedelta(i) - l_date.append( date.strftime('%Y%m%d') ) - return l_date - -config_section = 'Global' -p = Process(config_section) - -r_tags = redis.StrictRedis( - host=p.config.get("ARDB_Tags", "host"), - port=p.config.getint("ARDB_Tags", "port"), - db=p.config.getint("ARDB_Tags", "db"), - decode_responses=True) - -tag = 'infoleak:automatic-detection="bitcoin-address"' - -# get tag first/last seen -first_seen = r_tags.hget('tag_metadata:{}'.format(tag), 'first_seen') -last_seen = r_tags.hget('tag_metadata:{}'.format(tag), 'last_seen') - -l_dates = substract_date(first_seen, last_seen) - -# get all tagged items -for date in l_dates: - daily_tagged_items = r_tags.smembers('{}:{}'.format(tag, date)) - - for item in daily_tagged_items: - p.populate_set_out(item) diff --git a/bin/helper/reprocess_pgp_message.py b/bin/helper/reprocess_pgp_message.py deleted file mode 100755 index bf8d78ee..00000000 --- a/bin/helper/reprocess_pgp_message.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python3 -# -*-coding:UTF-8 -* - -import os -import sys -import gzip -import base64 -import uuid -import datetime -import base64 -import redis -import json -import time - -sys.path.append(os.environ['AIL_BIN']) -from Helper import Process - -def substract_date(date_from, date_to): - date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8])) - date_to = datetime.date(int(date_to[0:4]), int(date_to[4:6]), int(date_to[6:8])) - delta = date_to - date_from # timedelta - l_date = [] - for i in range(delta.days + 1): - date = date_from + datetime.timedelta(i) - l_date.append( date.strftime('%Y%m%d') ) - return l_date - -config_section = 'Keys' -p = Process(config_section) - -r_tags = redis.StrictRedis( - host=p.config.get("ARDB_Tags", "host"), - port=p.config.getint("ARDB_Tags", "port"), - db=p.config.getint("ARDB_Tags", "db"), - decode_responses=True) - -tag = 'infoleak:automatic-detection="pgp-message"' - -# get tag first/last seen -first_seen = r_tags.hget('tag_metadata:{}'.format(tag), 'first_seen') -last_seen = r_tags.hget('tag_metadata:{}'.format(tag), 'last_seen') - -l_dates = substract_date(first_seen, last_seen) - -# get all tagged items -for date in l_dates: - daily_tagged_items = r_tags.smembers('{}:{}'.format(tag, date)) - - for item in daily_tagged_items: - p.populate_set_out(item, 'PgpDump') diff --git a/bin/import/importer.py b/bin/import/importer.py index 41ce1e4f..841860a5 100755 --- a/bin/import/importer.py +++ b/bin/import/importer.py @@ -10,7 +10,6 @@ Recieve Json Items (example: Twitter feeder) import os import importlib import json -import redis import sys import time diff --git a/bin/lib/Config_DB.py b/bin/lib/Config_DB.py index 05be205e..acfd496b 100755 --- a/bin/lib/Config_DB.py +++ b/bin/lib/Config_DB.py @@ -15,7 +15,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) from lib import ConfigLoader config_loader = ConfigLoader.ConfigLoader() -r_serv_db = config_loader.get_redis_conn("ARDB_DB") +r_serv_db = config_loader.get_redis_conn("_DB") config_loader = None #### TO PUT IN CONFIG diff --git a/bin/lib/MispModules.py b/bin/lib/MispModules.py index f3361a59..4227e03d 100755 --- a/bin/lib/MispModules.py +++ b/bin/lib/MispModules.py @@ -3,6 +3,7 @@ import os import json import requests +import sys import configparser misp_module_url = 'http://localhost:6666' @@ -16,7 +17,7 @@ sys.path.append(os.environ['AIL_BIN']) from lib import ConfigLoader config_loader = ConfigLoader.ConfigLoader() -r_serv = config_loader.get_redis_conn("ARDB_DB") +r_serv = config_loader.get_redis_conn("_DB") config_loader = None def init_config(config_path=default_config_path): @@ -114,6 +115,7 @@ def parse_module_enrichment_response(misp_module_response): #print(response_types) return response_values + if __name__ == "__main__": load_modules_list() diff --git a/bin/lib/Statistics.py b/bin/lib/Statistics.py index 9b570eaa..f8c17d83 100755 --- a/bin/lib/Statistics.py +++ b/bin/lib/Statistics.py @@ -12,8 +12,7 @@ sys.path.append(os.environ['AIL_BIN']) from lib import ConfigLoader config_loader = ConfigLoader.ConfigLoader() -r_statistics = config_loader.get_redis_conn("ARDB_Statistics") -# r_serv_trend = ConfigLoader().get_redis_conn("ARDB_Trending") +r_statistics = config_loader.get_db_conn("Kvrocks_Stats") config_loader = None PIE_CHART_MAX_CARDINALITY = 8 @@ -113,11 +112,11 @@ def update_module_stats(module_name, num, keyword, date): if r_statistics.zcard(f'top_{module_name}_set_{date}') < PIE_CHART_MAX_CARDINALITY: r_statistics.zadd(f'top_{module_name}_set_{date}', {keyword: float(keyword_total_sum)}) - else: # zset at full capacity + else: # zset at full capacity member_set = r_statistics.zrangebyscore(f'top_{module_name}_set_{date}', '-inf', '+inf', withscores=True, start=0, num=1) # Member set is a list of (value, score) pairs if int(member_set[0][1]) < keyword_total_sum: - #remove min from set and add the new one + # remove min from set and add the new one r_statistics.zrem(f'top_{module_name}_set_{date}', member_set[0][0]) r_statistics.zadd(f'top_{module_name}_set_{date}', {keyword: float(keyword_total_sum)}) diff --git a/bin/lib/Tag.py b/bin/lib/Tag.py index 10281596..210bf94c 100755 --- a/bin/lib/Tag.py +++ b/bin/lib/Tag.py @@ -620,7 +620,7 @@ def update_tag_metadata(tag, date, delete=False): # # TODO: delete Tags # old # r_tags.smembers(f'{tag}:{date}') # r_tags.smembers(f'{obj_type}:{tag}') -def get_tag_objects(obj_type, subtype='', date=''): +def get_tag_objects(tag, obj_type, subtype='', date=''): if obj_type == 'item': return r_tags.smembers(f'{obj_type}:{subtype}:{tag}:{date}') else: @@ -1079,6 +1079,10 @@ def get_modal_add_tags(object_id, object_type='item', object_subtype=''): return {"active_taxonomies": get_active_taxonomies(), "active_galaxies": get_active_galaxies(), "object_id": object_id, "object_type": object_type, "object_subtype": object_subtype} +##################################################################################### +##################################################################################### +##################################################################################### + ######## NEW VERSION ######## def create_custom_tag(tag): r_tags.sadd('tags:custom', tag) @@ -1137,28 +1141,45 @@ def get_enabled_tags_with_synonyms_ui(): ## Objects tags ## -################################################################################### -################################################################################### -################################################################################### -################################################################################### ################################################################################### ################################################################################### ################################################################################### ################################################################################### ################################################################################### -def add_global_tag(tag, object_type=None): - ''' - Create a set of all tags used in AIL (all + by object) +# TYPE -> taxonomy/galaxy/custom - :param tag: tag - :type tag: str - :param object_type: object type - :type object_type: str - ''' - r_tags.sadd('list_tags', tag) - if object_type: - r_tags.sadd('list_tags:{}'.format(object_type), tag) +class Tag: + + def __int__(self, t_type, t_id, obj='item'): + self.type = t_type + self.id = t_id + self.obj = obj + + def get_first_seen(self): + pass + + def get_last_seen(self): + pass + + def get_color(self): + pass + + def is_enabled(self): + pass + + def get_meta(self): + meta = {'first_seen': self.get_first_seen(), + 'last_seen': self.get_last_seen(), + 'obj': self.obj, + 'tag': self.id, + 'type': self.type} + + +################################################################################### +################################################################################### +################################################################################### +################################################################################### def add_obj_tags(object_id, object_subtype, object_type, tags=[], galaxy_tags=[]): for tag in tags: @@ -1201,23 +1222,6 @@ def api_add_obj_tags(tags=[], galaxy_tags=[], object_id=None, object_type="item" res_dict['type'] = object_type return res_dict, 200 -# def add_tag(object_type, tag, object_id, obj_date=None): -# # new tag -# if not is_obj_tagged(object_id, tag): -# # # TODO: # FIXME: sanitize object_type -# if obj_date: -# try: -# obj_date = int(obj_date) -# except: -# obj_date = None -# if not obj_date: -# obj_date = get_obj_date(object_type, object_id) -# add_global_tag(tag, object_type=object_type) -# add_obj_tag(object_type, object_id, tag, obj_date=obj_date) -# update_tag_metadata(tag, obj_date, object_type=object_type) -# -# # create tags stats # # TODO: put me in cache -# r_tags.hincrby('daily_tags:{}'.format(datetime.date.today().strftime("%Y%m%d")), tag, 1) # def delete_obj_tag(object_type, object_id, tag, obj_date): # if object_type=="item": # # TODO: # FIXME: # REVIEW: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! diff --git a/bin/lib/Tracker.py b/bin/lib/Tracker.py index 57da2755..49102249 100755 --- a/bin/lib/Tracker.py +++ b/bin/lib/Tracker.py @@ -551,9 +551,11 @@ def create_tracker(tracker, tracker_type, user_id, level, tags, mails, descripti # create tracker tags list for tag in tags: - r_serv_tracker.sadd(f'tracker:tags:{tracker_uuid}', escape(tag)) + tag = escape(tag) + r_serv_tracker.sadd(f'tracker:tags:{tracker_uuid}', tag) + Tag.create_custom_tag(tag) - # create tracker tags mail notification list + # create tracker mail notification list for mail in mails: r_serv_tracker.sadd(f'tracker:mail:{tracker_uuid}', escape(mail)) @@ -1030,7 +1032,9 @@ def create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=[], for source in sources: r_serv_tracker.sadd(f'tracker:retro_hunt:task:sources:{task_uuid}', escape(source)) for tag in tags: - r_serv_tracker.sadd(f'tracker:retro_hunt:task:tags:{task_uuid}', escape(tag)) + tag = escape(tag) + r_serv_tracker.sadd(f'tracker:retro_hunt:task:tags:{task_uuid}', tag) + Tag.create_custom_tag(tag) for mail in mails: r_serv_tracker.sadd(f'tracker:retro_hunt:task:mails:{task_uuid}', escape(mail)) @@ -1364,14 +1368,13 @@ def get_trackers_tags(): def _fix_db_custom_tags(): for tag in get_trackers_tags(): if not Tag.is_taxonomie_tag(tag) and not Tag.is_galaxy_tag(tag): - print(tag) Tag.create_custom_tag(tag) #### -- #### -#if __name__ == '__main__': +if __name__ == '__main__': - #_fix_db_custom_tags() + _fix_db_custom_tags() # fix_all_tracker_uuid_list() # res = get_all_tracker_uuid() # print(len(res)) diff --git a/bin/lib/Users.py b/bin/lib/Users.py index 52056483..668e343e 100755 --- a/bin/lib/Users.py +++ b/bin/lib/Users.py @@ -17,7 +17,6 @@ from lib.ConfigLoader import ConfigLoader # Config config_loader = ConfigLoader() -# r_serv_db = config_loader.get_redis_conn("ARDB_DB") r_serv_db = config_loader.get_db_conn("Kvrocks_DB") config_loader = None diff --git a/bin/lib/ail_updates.py b/bin/lib/ail_updates.py index 0f2bb568..bf1b72a1 100755 --- a/bin/lib/ail_updates.py +++ b/bin/lib/ail_updates.py @@ -10,42 +10,42 @@ sys.path.append(os.environ['AIL_BIN']) # Import Project packages ################################## from lib.ConfigLoader import ConfigLoader -from lib import Users config_loader = ConfigLoader() -#r_serv_db = config_loader.get_redis_conn("Kvrocks_DB") -r_serv = config_loader.get_redis_conn("ARDB_DB") +r_db = config_loader.get_redis_conn("Kvrocks_DB") config_loader = None BACKGROUND_UPDATES = { - 'v1.5':{ - 'nb_updates': 5, - 'message': 'Tags and Screenshots' - }, - 'v2.4':{ - 'nb_updates': 1, - 'message': ' Domains Tags and Correlations' - }, - 'v2.6':{ - 'nb_updates': 1, - 'message': 'Domains Tags and Correlations' - }, - 'v2.7':{ - 'nb_updates': 1, - 'message': 'Domains Tags' - }, - 'v3.4':{ - 'nb_updates': 1, - 'message': 'Domains Languages' - }, - 'v3.7':{ - 'nb_updates': 1, - 'message': 'Trackers first_seen/last_seen' - } + 'v1.5': { + 'nb_updates': 5, + 'message': 'Tags and Screenshots' + }, + 'v2.4': { + 'nb_updates': 1, + 'message': ' Domains Tags and Correlations' + }, + 'v2.6': { + 'nb_updates': 1, + 'message': 'Domains Tags and Correlations' + }, + 'v2.7': { + 'nb_updates': 1, + 'message': 'Domains Tags' + }, + 'v3.4': { + 'nb_updates': 1, + 'message': 'Domains Languages' + }, + 'v3.7': { + 'nb_updates': 1, + 'message': 'Trackers first_seen/last_seen' + } } + def get_ail_version(): - return r_serv.get('ail:version') + return r_db.get('ail:version') + def get_ail_float_version(): version = get_ail_version() @@ -55,20 +55,23 @@ def get_ail_float_version(): version = 0 return version + def get_ail_all_updates(date_separator='-'): - dict_update = r_serv.hgetall('ail:update_date') + dict_update = r_db.hgetall('ail:update_date') if date_separator: for version in dict_update: u_date = dict_update[version] dict_update[version] = f'{u_date[0:4]}{date_separator}{u_date[4:6]}{date_separator}{u_date[6:8]}' return dict_update + def add_ail_update(version): - #Add new AIL version - r_serv.hset('ail:update_date', version, datetime.datetime.now().strftime("%Y%m%d")) - #Set current ail version + # Add new AIL version + r_db.hset('ail:update_date', version, datetime.datetime.now().strftime("%Y%m%d")) + # Set current ail version if float(version[1:]) > get_ail_float_version(): - r_serv.set('ail:version', version) + r_db.set('ail:version', version) + def check_version(version): if version[0] != 'v': @@ -83,75 +86,94 @@ def check_version(version): return False return True + #### UPDATE BACKGROUND #### def exits_background_update_to_launch(): - return r_serv.scard('ail:update:to_update') != 0 + return r_db.scard('ail:update:to_update') != 0 + def is_version_in_background_update(version): - return r_serv.sismember('ail:update:to_update', version) + return r_db.sismember('ail:update:to_update', version) + def get_all_background_updates_to_launch(): - return r_serv.smembers('ail:update:to_update') + return r_db.smembers('ail:update:to_update') + def get_current_background_update(): - return r_serv.get('ail:update:update_in_progress') + return r_db.get('ail:update:update_in_progress') + def get_current_background_update_script(): - return r_serv.get('ail:update:current_background_script') + return r_db.get('ail:update:current_background_script') + def get_current_background_update_script_path(version, script_name): return os.path.join(os.environ['AIL_HOME'], 'update', version, script_name) + def get_current_background_nb_update_completed(): - return r_serv.scard('ail:update:update_in_progress:completed') + return r_db.scard('ail:update:update_in_progress:completed') + def get_current_background_update_progress(): - progress = r_serv.get('ail:update:current_background_script_stat') + progress = r_db.get('ail:update:current_background_script_stat') if not progress: progress = 0 return int(progress) + def get_background_update_error(): - return r_serv.get('ail:update:error') + return r_db.get('ail:update:error') + def add_background_updates_to_launch(version): - return r_serv.sadd('ail:update:to_update', version) + return r_db.sadd('ail:update:to_update', version) + def start_background_update(version): - r_serv.delete('ail:update:error') - r_serv.set('ail:update:update_in_progress', version) + r_db.delete('ail:update:error') + r_db.set('ail:update:update_in_progress', version) + def set_current_background_update_script(script_name): - r_serv.set('ail:update:current_background_script', script_name) - r_serv.set('ail:update:current_background_script_stat', 0) + r_db.set('ail:update:current_background_script', script_name) + r_db.set('ail:update:current_background_script_stat', 0) + def set_current_background_update_progress(progress): - r_serv.set('ail:update:current_background_script_stat', progress) + r_db.set('ail:update:current_background_script_stat', progress) + def set_background_update_error(error): - r_serv.set('ail:update:error', error) + r_db.set('ail:update:error', error) + def end_background_update_script(): - r_serv.sadd('ail:update:update_in_progress:completed') + r_db.sadd('ail:update:update_in_progress:completed') + def end_background_update(version): - r_serv.delete('ail:update:update_in_progress') - r_serv.delete('ail:update:current_background_script') - r_serv.delete('ail:update:current_background_script_stat') - r_serv.delete('ail:update:update_in_progress:completed') - r_serv.srem('ail:update:to_update', version) + r_db.delete('ail:update:update_in_progress') + r_db.delete('ail:update:current_background_script') + r_db.delete('ail:update:current_background_script_stat') + r_db.delete('ail:update:update_in_progress:completed') + r_db.srem('ail:update:to_update', version) + def clear_background_update(): - r_serv.delete('ail:update:error') - r_serv.delete('ail:update:update_in_progress') - r_serv.delete('ail:update:current_background_script') - r_serv.delete('ail:update:current_background_script_stat') - r_serv.delete('ail:update:update_in_progress:completed') + r_db.delete('ail:update:error') + r_db.delete('ail:update:update_in_progress') + r_db.delete('ail:update:current_background_script') + r_db.delete('ail:update:current_background_script_stat') + r_db.delete('ail:update:update_in_progress:completed') + def get_update_background_message(version): return BACKGROUND_UPDATES[version]['message'] + # TODO: Detect error in subprocess def get_update_background_metadata(): dict_update = {} @@ -162,7 +184,7 @@ def get_update_background_metadata(): dict_update['script_progress'] = get_current_background_update_progress() dict_update['nb_update'] = BACKGROUND_UPDATES[dict_update['version']]['nb_updates'] dict_update['nb_completed'] = get_current_background_nb_update_completed() - dict_update['progress'] = int(dict_update['nb_completed']*100/dict_update['nb_update']) + dict_update['progress'] = int(dict_update['nb_completed'] * 100 / dict_update['nb_update']) dict_update['error'] = get_background_update_error() return dict_update @@ -170,14 +192,6 @@ def get_update_background_metadata(): ##-- UPDATE BACKGROUND --## - - - - - - -########################## - if __name__ == '__main__': res = check_version('v3.1..1') print(res) diff --git a/bin/lib/objects/Decodeds.py b/bin/lib/objects/Decodeds.py index 8d0bf2de..e826b48c 100755 --- a/bin/lib/objects/Decodeds.py +++ b/bin/lib/objects/Decodeds.py @@ -36,7 +36,6 @@ except: config_loader = ConfigLoader() r_objects = config_loader.get_db_conn("Kvrocks_Objects") -r_metadata = config_loader.get_redis_conn("ARDB_Metadata") HASH_DIR = config_loader.get_config_str('Directories', 'hash') baseurl = config_loader.get_config_str("Notifications", "ail_domain") config_loader = None diff --git a/bin/lib/objects/abstract_subtype_object.py b/bin/lib/objects/abstract_subtype_object.py index b501d0f3..6a8122c1 100755 --- a/bin/lib/objects/abstract_subtype_object.py +++ b/bin/lib/objects/abstract_subtype_object.py @@ -8,7 +8,6 @@ Base Class for AIL Objects ################################## import os import sys -from abc import abstractmethod # from flask import url_for @@ -25,7 +24,6 @@ from packages import Date # LOAD CONFIG config_loader = ConfigLoader() -r_metadata = config_loader.get_redis_conn("ARDB_Metadata") r_object = config_loader.get_db_conn("Kvrocks_Objects") config_loader = None @@ -51,13 +49,6 @@ class AbstractSubtypeObject(AbstractObject): def exists(self): return r_object.exists(f'meta:{self.type}:{self.subtype}:{self.id}') - # def exists(self): - # res = r_metadata.zscore(f'{self.type}_all:{self.subtype}', self.id) - # if res is not None: - # return True - # else: - # return False - def get_first_seen(self, r_int=False): first_seen = r_object.hget(f'meta:{self.type}:{self.subtype}:{self.id}', 'first_seen') if r_int: @@ -79,11 +70,11 @@ class AbstractSubtypeObject(AbstractObject): return last_seen def get_nb_seen(self): - return int(r_metadata.zscore(f'{self.type}_all:{self.subtype}', self.id)) + return int(r_object.zscore(f'{self.type}_all:{self.subtype}', self.id)) # # TODO: CHECK RESULT def get_nb_seen_by_date(self, date_day): - nb = r_metadata.hget(f'{self.type}:{self.subtype}:{date_day}', self.id) + nb = r_object.hget(f'{self.type}:{self.subtype}:{date_day}', self.id) if nb is None: return 0 else: @@ -134,9 +125,9 @@ class AbstractSubtypeObject(AbstractObject): self.update_daterange(date) update_obj_date(date, self.type, self.subtype) # daily - r_metadata.hincrby(f'{self.type}:{self.subtype}:{date}', self.id, 1) + r_object.hincrby(f'{self.type}:{self.subtype}:{date}', self.id, 1) # all subtypes - r_metadata.zincrby(f'{self.type}_all:{self.subtype}', 1, self.id) + r_object.zincrby(f'{self.type}_all:{self.subtype}', 1, self.id) ####################################################################### ####################################################################### @@ -158,14 +149,5 @@ class AbstractSubtypeObject(AbstractObject): def _delete(self): pass - - #################################### - # - # _get_items - # get_metadata - # - # - - def get_all_id(obj_type, subtype): - return r_metadata.zrange(f'{obj_type}_all:{subtype}', 0, -1) + return r_object.zrange(f'{obj_type}_all:{subtype}', 0, -1) diff --git a/bin/modules/Credential.py b/bin/modules/Credential.py index e7e311e8..cf2f4f38 100755 --- a/bin/modules/Credential.py +++ b/bin/modules/Credential.py @@ -69,8 +69,7 @@ class Credential(AbstractModule): # Database config_loader = ConfigLoader.ConfigLoader() - # self.server_cred = config_loader.get_redis_conn("ARDB_TermCred") - self.server_statistics = config_loader.get_redis_conn("ARDB_Statistics") + # self.server_cred = config_loader.get_redis_conn("_TermCred") # Config values self.minimumLengthThreshold = config_loader.get_config_int("Credential", "minimumLengthThreshold") diff --git a/bin/modules/Global.py b/bin/modules/Global.py index f1567f48..4edb35b2 100755 --- a/bin/modules/Global.py +++ b/bin/modules/Global.py @@ -43,6 +43,7 @@ from modules.abstract_module import AbstractModule from lib.ConfigLoader import ConfigLoader from lib.data_retention_engine import update_obj_date from lib import item_basic +# from lib import Statistics class Global(AbstractModule): """ @@ -52,8 +53,6 @@ class Global(AbstractModule): def __init__(self): super(Global, self).__init__() - self.r_stats = ConfigLoader().get_redis_conn("ARDB_Statistics") - self.processed_item = 0 self.time_last_stats = time.time() @@ -197,12 +196,13 @@ class Global(AbstractModule): self.redis_logger.warning(f'Global; Incomplete file: {filename}') print(f'Global; Incomplete file: {filename}') # save daily stats - self.r_stats.zincrby('module:Global:incomplete_file', 1, datetime.datetime.now().strftime('%Y%m%d')) + # self.r_stats.zincrby('module:Global:incomplete_file', 1, datetime.datetime.now().strftime('%Y%m%d')) + # Statistics. except OSError: self.redis_logger.warning(f'Global; Not a gzipped file: {filename}') print(f'Global; Not a gzipped file: {filename}') # save daily stats - self.r_stats.zincrby('module:Global:invalid_file', 1, datetime.datetime.now().strftime('%Y%m%d')) + # self.r_stats.zincrby('module:Global:invalid_file', 1, datetime.datetime.now().strftime('%Y%m%d')) return curr_file_content diff --git a/bin/modules/LibInjection.py b/bin/modules/LibInjection.py index 883e2e46..24c7ca23 100755 --- a/bin/modules/LibInjection.py +++ b/bin/modules/LibInjection.py @@ -13,7 +13,6 @@ It tries to identify SQL Injections with libinjection. import os import sys -import urllib.request import pylibinjection from datetime import datetime @@ -28,6 +27,7 @@ sys.path.append(os.environ['AIL_BIN']) from modules.abstract_module import AbstractModule from lib.ConfigLoader import ConfigLoader from lib.objects.Items import Item +from lib import Statistics class LibInjection(AbstractModule): """docstring for LibInjection module.""" @@ -37,9 +37,6 @@ class LibInjection(AbstractModule): self.faup = Faup() - config_loader = ConfigLoader() - self.server_statistics = config_loader.get_redis_conn("ARDB_Statistics") - self.redis_logger.info(f"Module: {self.module_name} Launched") def compute(self, message): @@ -94,7 +91,7 @@ class LibInjection(AbstractModule): tld = url_parsed['tld'] if tld is not None: date = datetime.now().strftime("%Y%m") - self.server_statistics.hincrby(f'SQLInjection_by_tld:{date}', tld, 1) + Statistics.add_module_tld_stats_by_date(self.module_name, date, tld, 1) if __name__ == "__main__": diff --git a/bin/modules/Onion.py b/bin/modules/Onion.py index 4a7ce415..1bfb4198 100755 --- a/bin/modules/Onion.py +++ b/bin/modules/Onion.py @@ -34,7 +34,6 @@ class Onion(AbstractModule): config_loader = ConfigLoader() self.r_cache = config_loader.get_redis_conn("Redis_Cache") - self.r_onion = config_loader.get_redis_conn("ARDB_Onion") self.pending_seconds = config_loader.get_config_int("Onion", "max_execution_time") # regex timeout @@ -91,7 +90,7 @@ class Onion(AbstractModule): if onion_urls: if crawlers.is_crawler_activated(): - for domain in domains:# TODO LOAD DEFAULT SCREENSHOT + HAR + for domain in domains: # TODO LOAD DEFAULT SCREENSHOT + HAR task_uuid = crawlers.add_crawler_task(domain, parent=item.get_id()) if task_uuid: print(f'{domain} added to crawler queue: {task_uuid}') @@ -109,6 +108,3 @@ if __name__ == "__main__": module = Onion() # module.compute('submitted/2022/10/10/submitted_705d1d92-7e9a-4a44-8c21-ccd167bfb7db.gz 9') module.run() - - -# 5ajw6aqf3ep7sijnscdzw77t7xq4xjpsy335yb2wiwgouo7yfxtjlmid.onion to debian.org \ No newline at end of file diff --git a/bin/modules/SQLInjectionDetection.py b/bin/modules/SQLInjectionDetection.py index 17d52741..2cc11066 100755 --- a/bin/modules/SQLInjectionDetection.py +++ b/bin/modules/SQLInjectionDetection.py @@ -27,6 +27,7 @@ sys.path.append(os.environ['AIL_BIN']) from modules.abstract_module import AbstractModule from lib.ConfigLoader import ConfigLoader from lib.objects.Items import Item +from lib import Statistics class SQLInjectionDetection(AbstractModule): """docstring for SQLInjectionDetection module.""" @@ -40,9 +41,6 @@ class SQLInjectionDetection(AbstractModule): self.faup = Faup() - config_loader = ConfigLoader() - self.server_statistics = config_loader.get_redis_conn("ARDB_Statistics") - self.redis_logger.info(f"Module: {self.module_name} Launched") def compute(self, message): @@ -75,7 +73,7 @@ class SQLInjectionDetection(AbstractModule): except: pass date = datetime.now().strftime("%Y%m") - self.server_statistics.hincrby(f'SQLInjection_by_tld:{date}', tld, 1) + Statistics.add_module_tld_stats_by_date(self.module_name, date, tld, 1) # Try to detect if the url passed might be an sql injection by applying the regex # defined above on it. diff --git a/bin/modules/SentimentAnalysis.py b/bin/modules/SentimentAnalysis.py index 68b9edf2..2f625809 100755 --- a/bin/modules/SentimentAnalysis.py +++ b/bin/modules/SentimentAnalysis.py @@ -62,7 +62,7 @@ class SentimentAnalysis(AbstractModule): self.sentiment_lexicon_file = ConfigLoader.ConfigLoader().get_config_str("Directories", "sentiment_lexicon_file") # REDIS_LEVEL_DB # - self.db = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Sentiment") + self.db = ConfigLoader.ConfigLoader().get_redis_conn("_Sentiment") self.time1 = time.time() diff --git a/bin/packages/config.cfg.docker-compose-sample b/bin/packages/config.cfg.docker-compose-sample deleted file mode 100644 index 2f563493..00000000 --- a/bin/packages/config.cfg.docker-compose-sample +++ /dev/null @@ -1,253 +0,0 @@ -[Directories] -bloomfilters = Blooms -dicofilters = Dicos -pastes = PASTES -hash = HASHS -crawled = crawled -crawled_screenshot = CRAWLED_SCREENSHOT - -wordtrending_csv = var/www/static/csv/wordstrendingdata -wordsfile = files/wordfile - -protocolstrending_csv = var/www/static/csv/protocolstrendingdata -protocolsfile = files/protocolsfile - -tldstrending_csv = var/www/static/csv/tldstrendingdata -tldsfile = faup/src/data/mozilla.tlds - -domainstrending_csv = var/www/static/csv/domainstrendingdata - -pystemonpath = /opt/pystemon/ - -sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon.txt - -##### Notifications ###### -[Notifications] -ail_domain = http://localhost:7000 -sender = sender@example.com -sender_host = smtp.example.com -sender_port = 1337 -sender_pw = None - -# optional for using with authenticated SMTP over SSL -# sender_pw = securepassword - -##### Flask ##### -[Flask] -#Proxying requests to the app -baseUrl = / -#Number of logs to display in the dashboard -max_dashboard_logs = 15 -#Maximum number of character to display in the toolip -max_preview_char = 250 -#Maximum number of character to display in the modal -max_preview_modal = 800 -#Default number of header to display in trending graphs -default_display = 10 -#Number of minutes displayed for the number of processed pastes. -minute_processed_paste = 10 -#Maximum line length authorized to make a diff between duplicates -DiffMaxLineLength = 10000 - -#### Modules #### -[BankAccount] -max_execution_time = 60 - -[Categ] -#Minimum number of match between the paste and the category file -matchingThreshold=1 - -[Credential] -#Minimum length that a credential must have to be considered as such -minimumLengthThreshold=3 -#Will be pushed as alert if the number of credentials is greater to that number -criticalNumberToAlert=8 -#Will be considered as false positive if less that X matches from the top password list -minTopPassList=5 - -[Curve] -max_execution_time = 90 - -[Onion] -max_execution_time = 180 - -[Base64] -path = Base64/ -max_execution_time = 60 - -[Binary] -path = Base64/ -max_execution_time = 60 - -[Hex] -path = Base64/ -max_execution_time = 60 - -[Modules_Duplicates] -#Number of month to look back -maximum_month_range = 3 -#The value where two pastes are considerate duplicate for ssdeep. -threshold_duplicate_ssdeep = 50 -#The value where two pastes are considerate duplicate for tlsh. -threshold_duplicate_tlsh = 52 -#Minimum size of the paste considered -min_paste_size = 0.3 - -[Module_ModuleInformation] -#Threshold to deduce if a module is stuck or not, in seconds. -threshold_stucked_module=600 - -[Module_Mixer] -#Define the configuration of the mixer, possible value: 1, 2 or 3 -operation_mode = 3 -#Define the time that a paste will be considerate duplicate. in seconds (1day = 86400) -ttl_duplicate = 86400 -default_unnamed_feed_name = unnamed_feeder - -[RegexForTermsFrequency] -max_execution_time = 60 - -##### Redis ##### -[Redis_Cache] -host = localhost -port = 6379 -db = 0 - -[Redis_Log] -host = localhost -port = 6380 -db = 0 - -[Redis_Log_submit] -host = localhost -port = 6380 -db = 1 - -[Redis_Queues] -host = localhost -port = 6381 -db = 0 - -[Redis_Data_Merging] -host = localhost -port = 6379 -db = 1 - -[Redis_Paste_Name] -host = localhost -port = 6379 -db = 2 - -[Redis_Mixer_Cache] -host = localhost -port = 6381 -db = 1 - -##### ARDB ##### -[ARDB_Curve] -host = localhost -port = 6382 -db = 1 - -[ARDB_Sentiment] -host = localhost -port = 6382 -db = 4 - -[ARDB_TermFreq] -host = localhost -port = 6382 -db = 2 - -[ARDB_TermCred] -host = localhost -port = 6382 -db = 5 - -[ARDB_DB] -host = localhost -port = 6382 -db = 0 - -[ARDB_Trending] -host = localhost -port = 6382 -db = 3 - -[ARDB_Hashs] -host = localhost -db = 1 - -[ARDB_Tags] -host = localhost -port = 6382 -db = 6 - -[ARDB_Metadata] -host = localhost -port = 6382 -db = 7 - -[ARDB_Statistics] -host = localhost -port = 6382 -db = 8 - -[ARDB_Onion] -host = localhost -port = 6382 -db = 9 - -[Url] -cc_critical = DE - -[DomClassifier] -cc = DE -cc_tld = r'\.de$' -dns = 8.8.8.8 - -[Mail] -dns = 8.8.8.8 - -[Web] -dns = 149.13.33.69 - -# Indexer configuration -[Indexer] -type = whoosh -path = indexdir -register = indexdir/all_index.txt -#size in Mb -index_max_size = 2000 - -[ailleakObject] -maxDuplicateToPushToMISP=10 - -############################################################################### - -# For multiple feed, add them with "," without space -# e.g.: tcp://127.0.0.1:5556,tcp://127.0.0.1:5557 -[ZMQ_Global] -#address = tcp://crf.circl.lu:5556 -address = tcp://127.0.0.1:5556,tcp://crf.circl.lu:5556 -channel = 102 -bind = tcp://127.0.0.1:5556 - -[ZMQ_Url] -address = tcp://127.0.0.1:5004 -channel = urls - -[ZMQ_FetchedOnion] -address = tcp://127.0.0.1:5005 -channel = FetchedOnion - -[RedisPubSub] -host = localhost -port = 6381 -db = 0 - -[Crawler] -activate_crawler = False -crawler_depth_limit = 1 -splash_url_onion = http://172.17.0.1 -splash_onion_port = 8050 diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 7810680d..0f1d7547 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -12,10 +12,6 @@ publish = Redis_Mixer,Redis_Tags subscribe = Redis_Mixer publish = Redis_Global,Redis_ModuleStats -#[PreProcessFeed] -#subscribe = Redis_preProcess1 -#publish = Redis_Mixer - [Duplicates] subscribe = Redis_Duplicate @@ -62,7 +58,7 @@ subscribe = Redis_Global [Categ] subscribe = Redis_Global -publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Urls,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_ApiKey,Redis_SyncModule +publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Urls,Redis_Credential,Redis_Cve,Redis_ApiKey,Redis_SyncModule [CreditCards] subscribe = Redis_CreditCards @@ -78,13 +74,9 @@ publish = Redis_ModuleStats,Redis_Tags [Onion] subscribe = Redis_Onion -publish = Redis_ValidOnion,Redis_Tags,Redis_Crawler +publish = Redis_Tags,Redis_Crawler #publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_Tags,Redis_Crawler -# TODO remove me -[DumpValidOnion] -subscribe = Redis_ValidOnion - [Urls] subscribe = Redis_Urls publish = Redis_Url @@ -112,15 +104,8 @@ subscribe = Redis_SyncModule [MISP_The_hive_feeder] subscribe = Redis_Tags_feed -#[send_to_queue] -#subscribe = Redis_Cve -#publish = Redis_Tags - -[SentimentAnalysis] -subscribe = Redis_Global - -[Release] -subscribe = Redis_Global +#[SentimentAnalysis] +#subscribe = Redis_Global [Credential] subscribe = Redis_Credential @@ -130,9 +115,10 @@ publish = Redis_Duplicate,Redis_ModuleStats,Redis_Tags subscribe = Redis_Cve publish = Redis_Tags -[Phone] -subscribe = Redis_Global -publish = Redis_Tags +# Disabled +#[Phone] +#subscribe = Redis_Global +#publish = Redis_Tags [Keys] subscribe = Redis_Global @@ -159,7 +145,6 @@ subscribe = Redis publish = Redis_Mixer [Crawler] -subscribe = Redis_Crawler publish = Redis_Mixer,Redis_Tags [IP] @@ -169,3 +154,11 @@ publish = Redis_Tags [Zerobins] subscribe = Redis_Url +#[PreProcessFeed] +#subscribe = Redis_preProcess1 +#publish = Redis_Mixer + +# [My_Module] +# subscribe = Redis_Global +# publish = Redis_Tags + diff --git a/bin/template.py b/bin/template.py index 50714d63..bb66e86a 100755 --- a/bin/template.py +++ b/bin/template.py @@ -36,11 +36,11 @@ class Template(AbstractModule): # Send module state to logs self.redis_logger.info(f'Module {self.module_name} initialized') - def computeNone(self): - """ - Do something when there is no message in the queue - """ - self.redis_logger.debug("No message in queue") + # def computeNone(self): + # """ + # Do something when there is no message in the queue + # """ + # self.redis_logger.debug("No message in queue") def compute(self, message): """ diff --git a/var/www/blueprints/import_export.py b/var/www/blueprints/import_export.py index 1eb58be6..d05c3b22 100644 --- a/var/www/blueprints/import_export.py +++ b/var/www/blueprints/import_export.py @@ -81,7 +81,7 @@ def import_object_file(): return render_template("import_object.html", all_imported_obj=all_imported_obj, error=error) -@import_export.route("/objects/misp/export", methods=['GET']) +@import_export.route("/misp/objects/export", methods=['GET']) @login_required @login_analyst def objects_misp_export(): @@ -91,7 +91,7 @@ def objects_misp_export(): return render_template("export_object.html", object_types=object_types, to_export=to_export) -@import_export.route("/objects/misp/export/post", methods=['POST']) +@import_export.route("/misp/objects/export/post", methods=['POST']) @login_required @login_analyst def objects_misp_export_post(): @@ -159,7 +159,7 @@ def objects_misp_export_post(): misp_url=event['url']) -@import_export.route("/objects/misp/export/add", methods=['GET']) +@import_export.route("/misp/objects/export/add", methods=['GET']) @login_required @login_analyst def add_object_id_to_export(): @@ -181,7 +181,7 @@ def add_object_id_to_export(): return redirect(url_for('import_export.objects_misp_export')) -@import_export.route("/objects/misp/export/delete", methods=['GET']) +@import_export.route("/misp/objects/export/delete", methods=['GET']) @login_required @login_analyst def delete_object_id_to_export(): @@ -194,7 +194,7 @@ def delete_object_id_to_export(): return jsonify(success=True) -@import_export.route("/import_export/investigation", methods=['GET']) +@import_export.route("/investigation/misp/export", methods=['GET']) @login_required @login_analyst def export_investigation(): @@ -206,3 +206,24 @@ def export_investigation(): return Response(json.dumps({"error": "Can't reach MISP Instance"}, indent=2, sort_keys=True), mimetype='application/json'), 400 return redirect(url_for('investigations_b.show_investigation', uuid=investigation_uuid)) + + +@import_export.route("/thehive/objects/case/export", methods=['POST']) +@login_required +@login_analyst +def create_thehive_case(): + description = request.form['hive_description'] + title = request.form['hive_case_title'] + threat_level = Export.sanitize_threat_level_hive(request.form['threat_level_hive']) + tlp = Export.sanitize_tlp_hive(request.form['hive_tlp']) + item_id = request.form['obj_id'] + + item = Item(item_id) + if not item.exists(): + abort(404) + + case_id = Export.create_thehive_case(item_id, title=title, tlp=tlp, threat_level=threat_level, description=description) + if case_id: + return redirect(Export.get_case_url(case_id)) + else: + return 'error' diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py index 6468d2a4..ed685fc6 100644 --- a/var/www/modules/Flask_config.py +++ b/var/www/modules/Flask_config.py @@ -30,15 +30,9 @@ r_cache = config_loader.get_redis_conn("Redis_Cache") r_serv_log = config_loader.get_redis_conn("Redis_Log") r_serv_log_submit = config_loader.get_redis_conn("Redis_Log_submit") - - -r_serv_charts = config_loader.get_redis_conn("ARDB_Trending") -r_serv_sentiment = config_loader.get_redis_conn("ARDB_Sentiment") -r_serv_term = config_loader.get_redis_conn("ARDB_Tracker") -r_serv_cred = config_loader.get_redis_conn("ARDB_TermCred") -r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") -r_serv_statistics = config_loader.get_redis_conn("ARDB_Statistics") -r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") +r_serv_charts = config_loader.get_redis_conn("ARDB_Trending") # -> TODO MIGRATE Stats Graphs +r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") # -> TODO MIGRATE /correlation/ subtypes objects +r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") # -> TODO MIGRATE AUTO CRAWLER # # # # # # # r_serv_db = config_loader.get_db_conn("Kvrocks_DB") @@ -51,72 +45,45 @@ redis_logger.port = 6380 # Channel name to publish logs redis_logger.channel = 'Flask' - sys.path.append('../../configs/keys') -# MISP # -try: - from pymisp import PyMISP - from mispKEYS import misp_url, misp_key, misp_verifycert - pymisp = PyMISP(misp_url, misp_key, misp_verifycert) - misp_event_url = misp_url + '/events/view/' - print('Misp connected') -except: - print('Misp not connected') - pymisp = False - misp_event_url = '#' -# The Hive # -try: - from thehive4py.api import TheHiveApi - import thehive4py.exceptions - from theHiveKEYS import the_hive_url, the_hive_key, the_hive_verifycert - if the_hive_url == '': - HiveApi = False - hive_case_url = '#' - print('The HIVE not connected') - else: - HiveApi = TheHiveApi(the_hive_url, the_hive_key, cert=the_hive_verifycert) - hive_case_url = the_hive_url+'/index.html#/case/id_here/details' -except: - print('The HIVE not connected') - HiveApi = False - hive_case_url = '#' - -if HiveApi != False: - try: - HiveApi.get_alert(0) - print('The Hive connected') - except thehive4py.exceptions.AlertException: - HiveApi = False - print('The Hive not connected') #### VARIABLES #### baseUrl = config_loader.get_config_str("Flask", "baseurl") baseUrl = baseUrl.replace('/', '') if baseUrl != '': - baseUrl = '/'+baseUrl + baseUrl = '/' + baseUrl -max_preview_char = int(config_loader.get_config_str("Flask", "max_preview_char")) # Maximum number of character to display in the tooltip -max_preview_modal = int(config_loader.get_config_str("Flask", "max_preview_modal")) # Maximum number of character to display in the modal +max_preview_char = int( + config_loader.get_config_str("Flask", "max_preview_char")) # Maximum number of character to display in the tooltip +max_preview_modal = int( + config_loader.get_config_str("Flask", "max_preview_modal")) # Maximum number of character to display in the modal max_tags_result = 50 -DiffMaxLineLength = int(config_loader.get_config_str("Flask", "DiffMaxLineLength"))#Use to display the estimated percentage instead of a raw value +DiffMaxLineLength = int(config_loader.get_config_str("Flask", + "DiffMaxLineLength")) # Use to display the estimated percentage instead of a raw value bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info'] -dict_update_description = {'v1.5':{'nb_background_update': 5, 'update_warning_message': 'An Update is running on the background. Some informations like Tags, screenshot can be', - 'update_warning_message_notice_me': 'missing from the UI.'}, - 'v2.4':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be', - 'update_warning_message_notice_me': 'missing from the UI.'}, - 'v2.6':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be', - 'update_warning_message_notice_me': 'missing from the UI.'}, - 'v2.7':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags can be', - 'update_warning_message_notice_me': 'missing from the UI.'}, - 'v3.4':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Languages can be', - 'update_warning_message_notice_me': 'missing from the UI.'}, - 'v3.7':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Tracker first_seen/last_seen can be', - 'update_warning_message_notice_me': 'missing from the UI.'} - } +dict_update_description = {'v1.5': {'nb_background_update': 5, + 'update_warning_message': 'An Update is running on the background. Some informations like Tags, screenshot can be', + 'update_warning_message_notice_me': 'missing from the UI.'}, + 'v2.4': {'nb_background_update': 1, + 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be', + 'update_warning_message_notice_me': 'missing from the UI.'}, + 'v2.6': {'nb_background_update': 1, + 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be', + 'update_warning_message_notice_me': 'missing from the UI.'}, + 'v2.7': {'nb_background_update': 1, + 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags can be', + 'update_warning_message_notice_me': 'missing from the UI.'}, + 'v3.4': {'nb_background_update': 1, + 'update_warning_message': 'An Update is running on the background. Some informations like Domain Languages can be', + 'update_warning_message_notice_me': 'missing from the UI.'}, + 'v3.7': {'nb_background_update': 1, + 'update_warning_message': 'An Update is running on the background. Some informations like Tracker first_seen/last_seen can be', + 'update_warning_message_notice_me': 'missing from the UI.'} + } UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted') @@ -137,10 +104,10 @@ SUBMIT_PASTE_TEXT_MAX_SIZE = int(config_loader.get_config_str("SubmitPaste", "TE SUBMIT_PASTE_FILE_MAX_SIZE = int(config_loader.get_config_str("SubmitPaste", "FILE_MAX_SIZE")) SUBMIT_PASTE_FILE_ALLOWED_EXTENSIONS = [item.strip() for item in config_loader.get_config_str("SubmitPaste", "FILE_ALLOWED_EXTENSIONS").split(',')] - # VT try: from virusTotalKEYS import vt_key + if vt_key != '': vt_auth = vt_key vt_enabled = True diff --git a/var/www/modules/PasteSubmit/Flask_PasteSubmit.py b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py index 1dd0c470..0551119c 100644 --- a/var/www/modules/PasteSubmit/Flask_PasteSubmit.py +++ b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py @@ -11,10 +11,8 @@ import re import os import sys import string -import datetime import unicodedata import uuid -from io import BytesIO from functools import wraps @@ -42,9 +40,6 @@ import Flask_config app = Flask_config.app baseUrl = Flask_config.baseUrl - - -r_serv_metadata = Flask_config.r_serv_metadata # TODO REMOVE ME r_serv_db = Flask_config.r_serv_db # TODO REMOVE ME r_serv_log_submit = Flask_config.r_serv_log_submit # TODO REMOVE ME @@ -317,35 +312,16 @@ def submit_status(): else: return 'INVALID UUID' -# TODO MOVE ME IN import_export blueprint -@PasteSubmit.route("/PasteSubmit/create_hive_case", methods=['POST']) -@login_required -@login_analyst -def create_hive_case(): - tlp = request.form['hive_tlp'] - if tlp: - tlp = int(tlp) - else: - tlp = 2 - threat_level = request.form['threat_level_hive'] - if threat_level: - threat_level = int(threat_level) - else: - threat_level = 2 - description = request.form['hive_description'] - title = request.form['hive_case_title'] - item_id = request.form['obj_id'] +###################################################################################################### +###################################################################################################### +###################################################################################################### +###################################################################################################### - if (0 <= tlp <= 3) and (1 <= threat_level <= 4): +# TODO MIGRATE TAGS PUSH - case_id = Export.create_thehive_case(item_id, title=title, tlp=tlp, threat_level=threat_level, description=description) - if case_id: - return redirect(get_case_url(case_id)) - else: - return 'error' +# TEMPORARY: 2 SET OF CUSTOM + infoleak tags ????????? - return 'error' @PasteSubmit.route("/PasteSubmit/edit_tag_export") @login_required @@ -410,8 +386,8 @@ def edit_tag_export(): status_hive=status_hive, nb_tags_whitelist_misp=nb_tags_whitelist_misp, nb_tags_whitelist_hive=nb_tags_whitelist_hive, - flag_misp=flag_misp, - flag_hive=flag_hive) + flag_misp=True, + flag_hive=True) @PasteSubmit.route("/PasteSubmit/tag_export_edited", methods=['POST']) @login_required diff --git a/var/www/modules/hiddenServices/Flask_hiddenServices.py b/var/www/modules/hiddenServices/Flask_hiddenServices.py index c8d099ad..8d19cc24 100644 --- a/var/www/modules/hiddenServices/Flask_hiddenServices.py +++ b/var/www/modules/hiddenServices/Flask_hiddenServices.py @@ -27,7 +27,6 @@ app = Flask_config.app baseUrl = Flask_config.baseUrl r_cache = Flask_config.r_cache r_serv_onion = Flask_config.r_serv_onion -r_serv_metadata = Flask_config.r_serv_metadata bootstrap_label = Flask_config.bootstrap_label hiddenServices = Blueprint('hiddenServices', __name__, template_folder='templates') diff --git a/var/www/modules/restApi/Flask_restApi.py b/var/www/modules/restApi/Flask_restApi.py index 85484000..81fa47eb 100644 --- a/var/www/modules/restApi/Flask_restApi.py +++ b/var/www/modules/restApi/Flask_restApi.py @@ -41,8 +41,6 @@ app = Flask_config.app baseUrl = Flask_config.baseUrl r_cache = Flask_config.r_cache r_serv_db = Flask_config.r_serv_db -r_serv_onion = Flask_config.r_serv_onion -r_serv_metadata = Flask_config.r_serv_metadata restApi = Blueprint('restApi', __name__, template_folder='templates') @@ -258,7 +256,8 @@ def add_item_tags(): tags = data.get('tags', []) galaxy = data.get('galaxy', []) - res = Tag.api_add_obj_tags(tags=tags, galaxy_tags=galaxy, object_id=object_id, object_type="item") + # res = Tag.api_add_obj_tags(tags=tags, galaxy_tags=galaxy, object_id=object_id, object_type="item") + res = {'error': 'disabled endpoint'}, 500 return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1] # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/var/www/modules/sentiment/Flask_sentiment.py b/var/www/modules/sentiment/Flask_sentiment.py index 663f6f02..a11c7f23 100644 --- a/var/www/modules/sentiment/Flask_sentiment.py +++ b/var/www/modules/sentiment/Flask_sentiment.py @@ -1,160 +1,160 @@ #!/usr/bin/env python3 # -*-coding:UTF-8 -* -''' - Flask functions and routes for the trending modules page -''' -import os -import sys -import datetime -import calendar -import flask -from flask import Flask, render_template, jsonify, request, Blueprint - -from Role_Manager import login_admin, login_analyst, login_read_only -from flask_login import login_required - -sys.path.append(os.environ['AIL_BIN']) -################################## -# Import Project packages -################################## -from packages.Date import Date - -# ============ VARIABLES ============ -import Flask_config - -app = Flask_config.app -baseUrl = Flask_config.baseUrl -r_serv_charts = Flask_config.r_serv_charts -r_serv_sentiment = Flask_config.r_serv_sentiment - -sentiments = Blueprint('sentiments', __name__, template_folder='templates') - -# ============ FUNCTIONS ============ - -def get_date_range(num_day): - curr_date = datetime.date.today() - date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2)) - date_list = [] - - for i in range(0, num_day+1): - date_list.append(date.substract_day(i)) - return date_list - - -# ============ ROUTES ============ - -@sentiments.route("/sentiment_analysis_trending/") -@login_required -@login_read_only -def sentiment_analysis_trending(): - return render_template("sentiment_analysis_trending.html") - - -@sentiments.route("/sentiment_analysis_getplotdata/", methods=['GET']) -@login_required -@login_read_only -def sentiment_analysis_getplotdata(): - # Get the top providers based on number of pastes - oneHour = 60*60 - sevenDays = oneHour*24*7 - dateStart = datetime.datetime.now() - dateStart = dateStart.replace(minute=0, second=0, microsecond=0) - dateStart_timestamp = calendar.timegm(dateStart.timetuple()) - - getAllProviders = request.args.get('getProviders') - provider = request.args.get('provider') - allProvider = request.args.get('all') - if getAllProviders == 'True': - if allProvider == "True": - range_providers = r_serv_charts.smembers('all_provider_set') - - return jsonify(list(range_providers)) - else: - range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8) - # if empty, get yesterday top providers - range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers - - - # if still empty, takes from all providers - if range_providers == []: - print('today provider empty') - range_providers = r_serv_charts.smembers('all_provider_set') - - return jsonify(list(range_providers)) - - elif provider is not None: - to_return = {} - - cur_provider_name = provider + '_' - list_date = {} - for cur_timestamp in range(int(dateStart_timestamp), int(dateStart_timestamp)-sevenDays-oneHour, -oneHour): - cur_set_name = cur_provider_name + str(cur_timestamp) - - list_value = [] - for cur_id in r_serv_sentiment.smembers(cur_set_name): - cur_value = (r_serv_sentiment.get(cur_id)) - list_value.append(cur_value) - list_date[cur_timestamp] = list_value - to_return[provider] = list_date - - return jsonify(to_return) - return "Bad request" - - - -@sentiments.route("/sentiment_analysis_plot_tool/") -@login_required -@login_read_only -def sentiment_analysis_plot_tool(): - return render_template("sentiment_analysis_plot_tool.html") - - - -@sentiments.route("/sentiment_analysis_plot_tool_getdata/", methods=['GET']) -@login_required -@login_read_only -def sentiment_analysis_plot_tool_getdata(): - getProviders = request.args.get('getProviders') - - if getProviders == 'True': - providers = [] - for cur_provider in r_serv_charts.smembers('all_provider_set'): - providers.append(cur_provider) - return jsonify(providers) - - else: - query = request.args.get('query') - query = query.split(',') - Qdate = request.args.get('Qdate') - - date1 = (Qdate.split('-')[0]).split('/') - date1 = datetime.date(int(date1[2]), int(date1[0]), int(date1[1])) - - date2 = (Qdate.split('-')[1]).split('/') - date2 = datetime.date(int(date2[2]), int(date2[0]), int(date2[1])) - - timestamp1 = calendar.timegm(date1.timetuple()) - timestamp2 = calendar.timegm(date2.timetuple()) - - oneHour = 60*60 - oneDay = oneHour*24 - - to_return = {} - for cur_provider in query: - list_date = {} - cur_provider_name = cur_provider + '_' - for cur_timestamp in range(int(timestamp1), int(timestamp2)+oneDay, oneHour): - cur_set_name = cur_provider_name + str(cur_timestamp) - - list_value = [] - for cur_id in r_serv_sentiment.smembers(cur_set_name): - cur_value = (r_serv_sentiment.get(cur_id)) - list_value.append(cur_value) - list_date[cur_timestamp] = list_value - to_return[cur_provider] = list_date - - return jsonify(to_return) - -# ========= REGISTRATION ========= -app.register_blueprint(sentiments, url_prefix=baseUrl) +# ''' +# Flask functions and routes for the trending modules page +# ''' +# import os +# import sys +# import datetime +# import calendar +# import flask +# from flask import Flask, render_template, jsonify, request, Blueprint +# +# from Role_Manager import login_admin, login_analyst, login_read_only +# from flask_login import login_required +# +# sys.path.append(os.environ['AIL_BIN']) +# ################################## +# # Import Project packages +# ################################## +# from packages.Date import Date +# +# # ============ VARIABLES ============ +# import Flask_config +# +# app = Flask_config.app +# baseUrl = Flask_config.baseUrl +# r_serv_charts = Flask_config.r_serv_charts +# r_serv_sentiment = Flask_config.r_serv_sentiment +# +# sentiments = Blueprint('sentiments', __name__, template_folder='templates') +# +# # ============ FUNCTIONS ============ +# +# def get_date_range(num_day): +# curr_date = datetime.date.today() +# date = Date(str(curr_date.year)+str(curr_date.month).zfill(2)+str(curr_date.day).zfill(2)) +# date_list = [] +# +# for i in range(0, num_day+1): +# date_list.append(date.substract_day(i)) +# return date_list +# +# +# # ============ ROUTES ============ +# +# @sentiments.route("/sentiment_analysis_trending/") +# @login_required +# @login_read_only +# def sentiment_analysis_trending(): +# return render_template("sentiment_analysis_trending.html") +# +# +# @sentiments.route("/sentiment_analysis_getplotdata/", methods=['GET']) +# @login_required +# @login_read_only +# def sentiment_analysis_getplotdata(): +# # Get the top providers based on number of pastes +# oneHour = 60*60 +# sevenDays = oneHour*24*7 +# dateStart = datetime.datetime.now() +# dateStart = dateStart.replace(minute=0, second=0, microsecond=0) +# dateStart_timestamp = calendar.timegm(dateStart.timetuple()) +# +# getAllProviders = request.args.get('getProviders') +# provider = request.args.get('provider') +# allProvider = request.args.get('all') +# if getAllProviders == 'True': +# if allProvider == "True": +# range_providers = r_serv_charts.smembers('all_provider_set') +# +# return jsonify(list(range_providers)) +# else: +# range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(0)[0], '+inf', '-inf', start=0, num=8) +# # if empty, get yesterday top providers +# range_providers = r_serv_charts.zrevrangebyscore('providers_set_'+ get_date_range(1)[1], '+inf', '-inf', start=0, num=8) if range_providers == [] else range_providers +# +# +# # if still empty, takes from all providers +# if range_providers == []: +# print('today provider empty') +# range_providers = r_serv_charts.smembers('all_provider_set') +# +# return jsonify(list(range_providers)) +# +# elif provider is not None: +# to_return = {} +# +# cur_provider_name = provider + '_' +# list_date = {} +# for cur_timestamp in range(int(dateStart_timestamp), int(dateStart_timestamp)-sevenDays-oneHour, -oneHour): +# cur_set_name = cur_provider_name + str(cur_timestamp) +# +# list_value = [] +# for cur_id in r_serv_sentiment.smembers(cur_set_name): +# cur_value = (r_serv_sentiment.get(cur_id)) +# list_value.append(cur_value) +# list_date[cur_timestamp] = list_value +# to_return[provider] = list_date +# +# return jsonify(to_return) +# return "Bad request" +# +# +# +# @sentiments.route("/sentiment_analysis_plot_tool/") +# @login_required +# @login_read_only +# def sentiment_analysis_plot_tool(): +# return render_template("sentiment_analysis_plot_tool.html") +# +# +# +# @sentiments.route("/sentiment_analysis_plot_tool_getdata/", methods=['GET']) +# @login_required +# @login_read_only +# def sentiment_analysis_plot_tool_getdata(): +# getProviders = request.args.get('getProviders') +# +# if getProviders == 'True': +# providers = [] +# for cur_provider in r_serv_charts.smembers('all_provider_set'): +# providers.append(cur_provider) +# return jsonify(providers) +# +# else: +# query = request.args.get('query') +# query = query.split(',') +# Qdate = request.args.get('Qdate') +# +# date1 = (Qdate.split('-')[0]).split('/') +# date1 = datetime.date(int(date1[2]), int(date1[0]), int(date1[1])) +# +# date2 = (Qdate.split('-')[1]).split('/') +# date2 = datetime.date(int(date2[2]), int(date2[0]), int(date2[1])) +# +# timestamp1 = calendar.timegm(date1.timetuple()) +# timestamp2 = calendar.timegm(date2.timetuple()) +# +# oneHour = 60*60 +# oneDay = oneHour*24 +# +# to_return = {} +# for cur_provider in query: +# list_date = {} +# cur_provider_name = cur_provider + '_' +# for cur_timestamp in range(int(timestamp1), int(timestamp2)+oneDay, oneHour): +# cur_set_name = cur_provider_name + str(cur_timestamp) +# +# list_value = [] +# for cur_id in r_serv_sentiment.smembers(cur_set_name): +# cur_value = (r_serv_sentiment.get(cur_id)) +# list_value.append(cur_value) +# list_date[cur_timestamp] = list_value +# to_return[cur_provider] = list_date +# +# return jsonify(to_return) +# +# # ========= REGISTRATION ========= +# app.register_blueprint(sentiments, url_prefix=baseUrl) diff --git a/var/www/modules/sentiment/templates/header_sentiment.html b/var/www/modules/sentiment/templates/header_sentiment.html index 7e757604..7696c9a7 100644 --- a/var/www/modules/sentiment/templates/header_sentiment.html +++ b/var/www/modules/sentiment/templates/header_sentiment.html @@ -1,7 +1,7 @@ -
Username | -Similarity | -# concerned paste(s) | -Action | -
---|
Regex: surround the term by '/'. | /([a-z])\w+([a-z])\n/ |
Set of terms: surround the list by '\'. | \[term1, term2, ...]\ |
- To set a custom matching threshold (defaut=50), append it at the end as a inner list '[thresh]'. | \[term1, term2, ..., [75]]\ |
Term | -Added date | -Day occurence | -Week occurence | -Month occurence | -# tracked paste | -Action | -Notification E-Mails | -
---|---|---|---|---|---|---|---|
- {{ set }}
-
- {% for tag in notificationTagsTermMapping[set] %}
-
- {{ tag }}
-
- {% endfor %}
- {% if notificationTagsTermMapping[set] %}
-
-
-
-
-
- {% set uniq_id.modal_id = uniq_id.modal_id + 1 %}
- {% endif %}
-
-
-
-
-
- |
- {{ trackSet_list_values[loop.index0][3] }} | -{{ trackSet_list_values[loop.index0][0] }} | -{{ trackSet_list_values[loop.index0][1] }} | -{{ trackSet_list_values[loop.index0][2] }} | -{{ trackSet_list_num_of_paste[loop.index0] }} | -- - - - |
-
- {% for email in notificationEMailTermMapping[set] %}
-
-
-
-
-
- {{ email }}
- - {% endfor %} - |
-
- {{ regex }}
-
- {% for tag in notificationTagsTermMapping[regex] %}
-
- {{ tag }}
-
- {% endfor %}
- {% if notificationTagsTermMapping[regex] %}
-
-
-
-
-
- {% set uniq_id.modal_id = uniq_id.modal_id + 1 %}
- {% endif %}
-
-
-
-
-
- |
- {{ trackReg_list_values[loop.index0][3] }} | -{{ trackReg_list_values[loop.index0][0] }} | -{{ trackReg_list_values[loop.index0][1] }} | -{{ trackReg_list_values[loop.index0][2] }} | -{{ trackReg_list_num_of_paste[loop.index0] }} | -- - - - |
-
- {% for email in notificationEMailTermMapping[regex] %}
-
-
-
-
-
- {{ email }}
- - {% endfor %} - |
-
- {{ term }}
-
- {% for tag in notificationTagsTermMapping[term] %}
-
- {{ tag }}
-
- {% endfor %}
- {% if notificationTagsTermMapping[term] %}
-
-
-
-
-
- {% set uniq_id.modal_id = uniq_id.modal_id + 1 %}
- {% endif %}
-
-
-
-
-
- |
- {{ track_list_values[loop.index0][3] }} | -{{ track_list_values[loop.index0][0] }} | -{{ track_list_values[loop.index0][1] }} | -{{ track_list_values[loop.index0][2] }} | -{{ track_list_num_of_paste[loop.index0] }} | -- - - - |
-
- {% for email in notificationEMailTermMapping[term] %}
-
-
-
-
-
- {{ email }}
- - {% endfor %} - |
-
Term | -Added date | -Action | -
---|---|---|
{{ term }} | -{{ date }} | -- - |
-
Term | -Value | -Action | -Show | -Position | -
---|
Term | -Value | -Action | -Show | -Position | -
---|
Term | -Value | -Action | -Show | -Position | -
---|
Term | -Value | -Action | -Show | -Position | -
---|
Term | -Value | -Action | -Show | -Position | -
---|
Term | -Value | -Action | -Show | -Position | -
---|