diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index 07c121c9..2bc33431 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -55,21 +55,22 @@ from thehive4py.models import Case, CaseTask, CustomFieldHelper def create_the_hive_alert(source, path, tag): - tags = list(r_serv_metadata.smembers('tag:'+path)) + # # TODO: check items status (processed by all modules) + # # TODO: add item metadata: decoded content, link to auto crawled content, pgp correlation, cryptocurrency correlation... + # # # TODO: description, add AIL link:show items ? + tags = list( r_serv_metadata.smembers('tag:{}'.format(path)) ) artifacts = [ AlertArtifact( dataType='uuid-ail', data=r_serv_db.get('ail:uuid') ), AlertArtifact( dataType='file', data=path, tags=tags ) ] - l_tags = tag.split(',') - # Prepare the sample Alert sourceRef = str(uuid.uuid4())[0:6] alert = Alert(title='AIL Leak', tlp=3, - tags=l_tags, - description='infoleak', + tags=tags, + description='AIL Leak, triggered by {}'.format(tag), type='ail', source=source, sourceRef=sourceRef, diff --git a/bin/PgpDump.py b/bin/PgpDump.py index 21ffd263..4b7ec629 100755 --- a/bin/PgpDump.py +++ b/bin/PgpDump.py @@ -82,7 +82,12 @@ def get_pgp_packet(message, save_path): process1 = subprocess.Popen([ 'echo', '-e', save_path], stdout=subprocess.PIPE) process2 = subprocess.Popen([ 'pgpdump'], stdin=process1.stdout, stdout=subprocess.PIPE) process1.stdout.close() - output = process2.communicate()[0].decode() + output = process2.communicate()[0] + try: + output = output.decode() + except UnicodeDecodeError: + publisher.error('Error PgpDump UnicodeDecodeError: {}'.format(message)) + output = '' return output def get_pgp_packet_file(file): diff --git a/bin/packages/HiddenServices.py b/bin/packages/HiddenServices.py index f1ed0767..b60c6d99 100755 --- a/bin/packages/HiddenServices.py +++ b/bin/packages/HiddenServices.py @@ -17,10 +17,14 @@ Conditions to fulfill to be able to use this class correctly: """ import os +import time import gzip import redis import random +from io import BytesIO +import zipfile + import configparser import sys sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/')) @@ -71,6 +75,7 @@ class HiddenServices(object): self.paste_crawled_directory = os.path.join(self.paste_directory, cfg.get("Directories", "crawled")) self.paste_crawled_directory_name = cfg.get("Directories", "crawled") self.screenshot_directory = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "crawled_screenshot")) + self.screenshot_directory_screenshot = os.path.join(self.screenshot_directory, 'screenshot') elif type == 'i2p': self.paste_directory = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "crawled_screenshot")) self.screenshot_directory = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "crawled_screenshot")) @@ -120,6 +125,26 @@ class HiddenServices(object): for tag in p_tags: self.tags[tag] = self.tags.get(tag, 0) + 1 + def extract_epoch_from_history(self, crawled_history): + epoch_list = [] + for res, epoch_val in crawled_history: + epoch_val = int(epoch_val) # force int + try: + # domain down + if int(res) == epoch_val: + status = False + # domain up + else: + status = True + except ValueError: + status = True + epoch_val = int(epoch_val) # force int + epoch_list.append((epoch_val, time.strftime('%Y/%m/%d - %H:%M.%S', time.gmtime(epoch_val)), status)) + return epoch_list + + def get_domain_crawled_history(self): + return self.r_serv_onion.zrange('crawler_history_{}:{}:{}'.format(self.type, self.domain, self.port), 0, -1, withscores=True) + def get_first_crawled(self): res = self.r_serv_onion.zrange('crawler_history_{}:{}:{}'.format(self.type, self.domain, self.port), 0, 0, withscores=True) if res: @@ -230,6 +255,13 @@ class HiddenServices(object): return l_crawled_pastes ''' + def get_item_screenshot(self, item): + screenshot = self.r_serv_metadata.hget('paste_metadata:{}'.format(item), 'screenshot') + if screenshot: + screenshot = os.path.join(screenshot[0:2], screenshot[2:4], screenshot[4:6], screenshot[6:8], screenshot[8:10], screenshot[10:12], screenshot[12:]) + return screenshot + return '' + def get_domain_random_screenshot(self, l_crawled_pastes, num_screenshot = 1): l_screenshot_paste = [] for paste in l_crawled_pastes: @@ -237,9 +269,8 @@ class HiddenServices(object): origin_paste = paste paste= paste.replace(self.paste_directory+'/', '') - screenshot = self.r_serv_metadata.hget('paste_metadata:{}'.format(paste), 'screenshot') + screenshot = self.get_item_screenshot(paste) if screenshot: - screenshot = os.path.join(screenshot[0:2], screenshot[2:4], screenshot[4:6], screenshot[6:8], screenshot[8:10], screenshot[10:12], screenshot[12:]) l_screenshot_paste.append({'screenshot': screenshot, 'item': origin_paste}) if len(l_screenshot_paste) > num_screenshot: @@ -250,6 +281,35 @@ class HiddenServices(object): else: return l_screenshot_paste + def get_all_domain_screenshot(self, l_crawled_pastes, filename=False): + l_screenshot_paste = [] + for paste in l_crawled_pastes: + ## FIXME: # TODO: remove me + origin_paste = paste + paste= paste.replace(self.paste_directory+'/', '') + + screenshot = self.get_item_screenshot(paste) + if screenshot: + screenshot = screenshot + '.png' + screenshot_full_path = os.path.join(self.screenshot_directory_screenshot, screenshot) + if filename: + screen_file_name = os.path.basename(paste) + '.png' + l_screenshot_paste.append( (screenshot_full_path, screen_file_name) ) + else: + l_screenshot_paste.append(screenshot_full_path) + return l_screenshot_paste + + def get_all_item_full_path(self, l_items, filename=False): + l_full_items = [] + for item in l_items: + item = os.path.join(self.PASTES_FOLDER, item) + if filename: + file_name = os.path.basename(item) + '.gz' + l_full_items.append( (item, file_name) ) + else: + l_full_items.append(item) + return l_full_items + def get_crawled_pastes_by_date(self, date): pastes_path = os.path.join(self.paste_crawled_directory, date[0:4], date[4:6], date[6:8]) @@ -258,6 +318,63 @@ class HiddenServices(object): l_crawled_pastes = [] return l_crawled_pastes + def get_all_har(self, l_pastes, filename=False): + all_har = [] + for item in l_pastes: + if filename: + all_har.append( (self.get_item_har(item), os.path.basename(item) + '.json') ) + else: + all_har.append(self.get_item_har(item)) + return all_har + + + def get_item_har(self, item_path): + item_path = item_path.replace('{}/'.format(self.paste_crawled_directory_name), '', 1) + har_path = os.path.join(self.screenshot_directory, item_path) + '.json' + return har_path + + def create_domain_basic_archive(self, l_pastes): + all_har = self.get_all_har(l_pastes, filename=True) + all_screenshot = self.get_all_domain_screenshot(l_pastes, filename=True) + all_items = self.get_all_item_full_path(l_pastes, filename=True) + + # try: + + # zip buffer + zip_buffer = BytesIO() + + with zipfile.ZipFile(zip_buffer, "a") as zf: + + #print(all_har) + self.write_in_zip_buffer(zf, all_har) + self.write_in_zip_buffer(zf, all_screenshot) + self.write_in_zip_buffer(zf, all_items) + + # write map url + map_file_content = self.get_metadata_file(l_pastes).encode() + zf.writestr( '_URL_MAP_', BytesIO(map_file_content).getvalue()) + + zip_buffer.seek(0) + return zip_buffer + + # except Exception as e: + # print(e) + # return 'Server Error' + + def write_in_zip_buffer(self, zf, list_file): + for file_path, file_name in list_file: + with open(file_path, "rb") as f: + har_content = f.read() + zf.writestr( file_name, BytesIO(har_content).getvalue()) + + def get_metadata_file(self, list_items): + file_content = '' + dict_url = self.get_all_links(list_items) + for key in dict_url: + file_content = '{}\n{} : {}'.format(file_content, os.path.basename(key), dict_url[key]) + return file_content + + ''' def get_last_crawled_pastes_fileSearch(self): diff --git a/var/www/modules/dashboard/templates/index.html b/var/www/modules/dashboard/templates/index.html index 49f3e59b..812e1ea0 100644 --- a/var/www/modules/dashboard/templates/index.html +++ b/var/www/modules/dashboard/templates/index.html @@ -2,23 +2,40 @@
- - -