mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 00:28:22 +00:00
chg: [decoded migration] migrate Decoded Objects
This commit is contained in:
parent
b5d285b5b4
commit
d295c084c6
20 changed files with 1210 additions and 727 deletions
|
@ -612,8 +612,10 @@ def domain_migration():
|
||||||
def get_estimated_type(decoded_id):
|
def get_estimated_type(decoded_id):
|
||||||
return r_serv_metadata.hget(f'metadata_hash:{decoded_id}', 'estimated_type')
|
return r_serv_metadata.hget(f'metadata_hash:{decoded_id}', 'estimated_type')
|
||||||
|
|
||||||
def get_decoded_items_list_by_decoder(decoder_type, decoded_id): ###################
|
def get_hash_size(decoded_id):
|
||||||
#return r_serv_metadata.zrange('nb_seen_hash:{}'.format(sha1_string), 0, -1)
|
return r_serv_metadata.hget(f'metadata_hash:{decoded_id}', 'size')
|
||||||
|
|
||||||
|
def get_decoded_items_list_by_decoder(decoder_type, decoded_id):
|
||||||
return r_serv_metadata.zrange(f'{decoder_type}_hash:{decoded_id}', 0, -1)
|
return r_serv_metadata.zrange(f'{decoder_type}_hash:{decoded_id}', 0, -1)
|
||||||
|
|
||||||
def get_decodeds_tags(decoded_id):
|
def get_decodeds_tags(decoded_id):
|
||||||
|
@ -621,10 +623,10 @@ def get_decodeds_tags(decoded_id):
|
||||||
|
|
||||||
def decodeds_migration():
|
def decodeds_migration():
|
||||||
print('Decoded MIGRATION...')
|
print('Decoded MIGRATION...')
|
||||||
decoder_names = ['base64', 'binary', 'hexadecimal']
|
algo_names = ['base64', 'binary', 'hexadecimal']
|
||||||
|
|
||||||
Decodeds._delete_old_json_descriptor()
|
Decodeds._delete_old_json_descriptor()
|
||||||
for decoded_id in Decodeds.get_all_decodeds():
|
for decoded_id in Decodeds.get_all_decodeds_files():
|
||||||
mimetype = get_estimated_type(decoded_id)
|
mimetype = get_estimated_type(decoded_id)
|
||||||
# ignore invalid object
|
# ignore invalid object
|
||||||
if mimetype is None:
|
if mimetype is None:
|
||||||
|
@ -633,19 +635,23 @@ def decodeds_migration():
|
||||||
print(decoded_id)
|
print(decoded_id)
|
||||||
|
|
||||||
decoded = Decodeds.Decoded(decoded_id)
|
decoded = Decodeds.Decoded(decoded_id)
|
||||||
filepath = decoded.get_filepath(mimetype=mimetype)
|
decoded._add_create()
|
||||||
decoded._save_meta(filepath, mimetype)
|
decoded.set_mimetype(mimetype)
|
||||||
|
|
||||||
|
size = get_hash_size(decoded_id)
|
||||||
|
if not size:
|
||||||
|
filepath = decoded.get_filepath(mimetype=mimetype)
|
||||||
|
size = os.path.getsize(filepath)
|
||||||
|
decoded.set_size(size)
|
||||||
|
|
||||||
for tag in get_decodeds_tags(decoded_id):
|
for tag in get_decodeds_tags(decoded_id):
|
||||||
decoded.add_tag(tag)
|
decoded.add_tag(tag)
|
||||||
|
|
||||||
for decoder_type in decoder_names:
|
for algo in algo_names:
|
||||||
for item_id in get_decoded_items_list_by_decoder(decoder_type, decoded_id):
|
for item_id in get_decoded_items_list_by_decoder(algo, decoded_id):
|
||||||
print(item_id, decoder_type)
|
print(item_id, algo)
|
||||||
date = get_item_date(item_id)
|
date = get_item_date(item_id)
|
||||||
#for decoder_type in :
|
decoded.add(algo, date, item_id, mimetype=mimetype)
|
||||||
|
|
||||||
decoded.add(decoder_type, date, item_id, mimetype)
|
|
||||||
|
|
||||||
###############################
|
###############################
|
||||||
# #
|
# #
|
||||||
|
@ -868,7 +874,7 @@ if __name__ == '__main__':
|
||||||
# items_migration()
|
# items_migration()
|
||||||
#crawler_migration()
|
#crawler_migration()
|
||||||
# domain_migration() # TO TEST ###########################
|
# domain_migration() # TO TEST ###########################
|
||||||
#decodeds_migration()
|
decodeds_migration()
|
||||||
# screenshots_migration()
|
# screenshots_migration()
|
||||||
# subtypes_obj_migration()
|
# subtypes_obj_migration()
|
||||||
# ail_2_ail_migration()
|
# ail_2_ail_migration()
|
||||||
|
@ -876,7 +882,7 @@ if __name__ == '__main__':
|
||||||
# investigations_migration()
|
# investigations_migration()
|
||||||
# statistics_migration()
|
# statistics_migration()
|
||||||
|
|
||||||
cves_migration()
|
# cves_migration()
|
||||||
|
|
||||||
# custom tags
|
# custom tags
|
||||||
# crawler queues + auto_crawlers
|
# crawler queues + auto_crawlers
|
||||||
|
|
|
@ -96,7 +96,10 @@ def is_obj_correlated(obj_type, subtype, obj_id, obj2_type, subtype2, obj2_id):
|
||||||
subtype = ''
|
subtype = ''
|
||||||
if subtype2 is None:
|
if subtype2 is None:
|
||||||
subtype2 = ''
|
subtype2 = ''
|
||||||
return r_metadata.sismember(f'correlation:obj:{obj_type}:{subtype}:{obj2_type}:{obj_id}', f'{subtype2}:{obj2_id}')
|
try:
|
||||||
|
return r_metadata.sismember(f'correlation:obj:{obj_type}:{subtype}:{obj2_type}:{obj_id}', f'{subtype2}:{obj2_id}')
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
def add_obj_correlation(obj1_type, subtype1, obj1_id, obj2_type, subtype2, obj2_id):
|
def add_obj_correlation(obj1_type, subtype1, obj1_id, obj2_type, subtype2, obj2_id):
|
||||||
if subtype1 is None:
|
if subtype1 is None:
|
||||||
|
|
|
@ -27,7 +27,7 @@ config_loader = None
|
||||||
# DOMAIN -> subtype = domain type
|
# DOMAIN -> subtype = domain type
|
||||||
|
|
||||||
# TAG -> type = "TAG"
|
# TAG -> type = "TAG"
|
||||||
# TAG -> subtype = "OBJ:"tag
|
# TAG -> subtype = f"OBJ:{tag}"
|
||||||
|
|
||||||
def load_obj_date_first_last():
|
def load_obj_date_first_last():
|
||||||
# LOAD FIRST DATE
|
# LOAD FIRST DATE
|
||||||
|
|
|
@ -84,7 +84,7 @@ class Cve(AbstractDaterangeObject):
|
||||||
# 'Modified'
|
# 'Modified'
|
||||||
return json_response
|
return json_response
|
||||||
else:
|
else:
|
||||||
return {'error': 'cve search error'} # TODO
|
return {'error': f'{response.status_code}'}
|
||||||
|
|
||||||
# TODO ADD SEARCH FUNCTION
|
# TODO ADD SEARCH FUNCTION
|
||||||
|
|
||||||
|
@ -122,5 +122,14 @@ def api_get_cves_meta_by_daterange(date_from, date_to):
|
||||||
date = Date.sanitise_date_range(date_from, date_to)
|
date = Date.sanitise_date_range(date_from, date_to)
|
||||||
return get_cves_meta(get_cves_by_daterange(date['date_from'], date['date_to']), options=['sparkline'])
|
return get_cves_meta(get_cves_by_daterange(date['date_from'], date['date_to']), options=['sparkline'])
|
||||||
|
|
||||||
|
def get_cve_graphline(cve_id):
|
||||||
|
cve = Cve(cve_id)
|
||||||
|
graphline = []
|
||||||
|
if cve.exists():
|
||||||
|
nb_day = 30
|
||||||
|
for date in Date.get_previous_date_list(nb_day):
|
||||||
|
graphline.append({'date': f'{date[0:4]}-{date[4:6]}-{date[6:8]}', 'value': cve.get_nb_seen_by_date(date)})
|
||||||
|
return graphline
|
||||||
|
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
# if __name__ == '__main__':
|
||||||
|
|
|
@ -8,12 +8,14 @@ import zipfile
|
||||||
|
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from pymisp import MISPObject
|
||||||
|
|
||||||
sys.path.append(os.environ['AIL_BIN'])
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
from lib.ConfigLoader import ConfigLoader
|
from lib.ConfigLoader import ConfigLoader
|
||||||
from lib.objects.abstract_object import AbstractObject
|
from lib.objects.abstract_daterange_object import AbstractDaterangeObject
|
||||||
from lib.item_basic import is_crawled, get_item_domain
|
|
||||||
|
|
||||||
from packages import Date
|
from packages import Date
|
||||||
|
|
||||||
sys.path.append('../../configs/keys')
|
sys.path.append('../../configs/keys')
|
||||||
|
@ -22,17 +24,17 @@ try:
|
||||||
if vt_key != '':
|
if vt_key != '':
|
||||||
VT_TOKEN = vt_key
|
VT_TOKEN = vt_key
|
||||||
VT_ENABLED = True
|
VT_ENABLED = True
|
||||||
#print('VT submission is enabled')
|
# print('VT submission is enabled')
|
||||||
else:
|
else:
|
||||||
VT_ENABLED = False
|
VT_ENABLED = False
|
||||||
#print('VT submission is disabled')
|
# print('VT submission is disabled')
|
||||||
except:
|
except:
|
||||||
VT_TOKEN = None
|
VT_TOKEN = None
|
||||||
VT_ENABLED = False
|
VT_ENABLED = False
|
||||||
#print('VT submission is disabled')
|
# print('VT submission is disabled')
|
||||||
|
|
||||||
config_loader = ConfigLoader()
|
config_loader = ConfigLoader()
|
||||||
r_metadata = config_loader.get_db_conn("Kvrocks_Objects")
|
r_objects = config_loader.get_db_conn("Kvrocks_Objects")
|
||||||
|
|
||||||
r_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
r_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
||||||
HASH_DIR = config_loader.get_config_str('Directories', 'hash')
|
HASH_DIR = config_loader.get_config_str('Directories', 'hash')
|
||||||
|
@ -46,7 +48,7 @@ config_loader = None
|
||||||
|
|
||||||
# # TODO: COMPLETE CLASS
|
# # TODO: COMPLETE CLASS
|
||||||
|
|
||||||
class Decoded(AbstractObject):
|
class Decoded(AbstractDaterangeObject):
|
||||||
"""
|
"""
|
||||||
AIL Decoded Object. (strings)
|
AIL Decoded Object. (strings)
|
||||||
"""
|
"""
|
||||||
|
@ -54,9 +56,6 @@ class Decoded(AbstractObject):
|
||||||
def __init__(self, id):
|
def __init__(self, id):
|
||||||
super(Decoded, self).__init__('decoded', id)
|
super(Decoded, self).__init__('decoded', id)
|
||||||
|
|
||||||
def exists(self):
|
|
||||||
return r_metadata.exists(f'metadata_hash:{self.id}')
|
|
||||||
|
|
||||||
# def get_ail_2_ail_payload(self):
|
# def get_ail_2_ail_payload(self):
|
||||||
# payload = {'raw': self.get_gzip_content(b64=True),
|
# payload = {'raw': self.get_gzip_content(b64=True),
|
||||||
# 'compress': 'gzip'}
|
# 'compress': 'gzip'}
|
||||||
|
@ -74,9 +73,10 @@ class Decoded(AbstractObject):
|
||||||
url = f'{baseurl}/correlation/show?type={self.type}&id={self.id}'
|
url = f'{baseurl}/correlation/show?type={self.type}&id={self.id}'
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def get_svg_icon(self):
|
def get_svg_icon(self, mimetype=None):
|
||||||
file_type = self.get_estimated_type()
|
if not mimetype:
|
||||||
file_type = file_type.split('/')[0]
|
mimetype = self.get_mimetype()
|
||||||
|
file_type = mimetype.split('/')[0]
|
||||||
if file_type == 'application':
|
if file_type == 'application':
|
||||||
icon = '\uf15b'
|
icon = '\uf15b'
|
||||||
elif file_type == 'audio':
|
elif file_type == 'audio':
|
||||||
|
@ -87,19 +87,28 @@ class Decoded(AbstractObject):
|
||||||
icon = '\uf15c'
|
icon = '\uf15c'
|
||||||
else:
|
else:
|
||||||
icon = '\uf249'
|
icon = '\uf249'
|
||||||
return {'style': 'fas', 'icon': icon, 'color': '#88CCEE', 'radius':5}
|
return {'style': 'fas', 'icon': icon, 'color': '#88CCEE', 'radius': 5}
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Return the estimated type of a given decoded item.
|
Return the estimated type of a given decoded item.
|
||||||
|
|
||||||
:param sha1_string: sha1_string
|
:param sha1_string: sha1_string
|
||||||
'''
|
'''
|
||||||
def get_estimated_type(self):
|
def get_mimetype(self):
|
||||||
return r_metadata.hget(f'metadata_hash:{self.id}', 'estimated_type')
|
return r_objects.hget(f'meta:{self.type}:{self.id}', 'mime')
|
||||||
|
|
||||||
|
def set_mimetype(self, mimetype):
|
||||||
|
return r_objects.hset(f'meta:{self.type}:{self.id}', 'mime', mimetype)
|
||||||
|
|
||||||
|
def get_size(self):
|
||||||
|
return r_objects.hget(f'meta:{self.type}:{self.id}', 'size')
|
||||||
|
|
||||||
|
def set_size(self, size):
|
||||||
|
return r_objects.hset(f'meta:{self.type}:{self.id}', 'size', int(size))
|
||||||
|
|
||||||
def get_rel_path(self, mimetype=None):
|
def get_rel_path(self, mimetype=None):
|
||||||
if not mimetype:
|
if not mimetype:
|
||||||
mimetype = self.get_estimated_type()
|
mimetype = self.get_mimetype()
|
||||||
return os.path.join(HASH_DIR, mimetype, self.id[0:2], self.id)
|
return os.path.join(HASH_DIR, mimetype, self.id[0:2], self.id)
|
||||||
|
|
||||||
def get_filepath(self, mimetype=None):
|
def get_filepath(self, mimetype=None):
|
||||||
|
@ -112,25 +121,24 @@ class Decoded(AbstractObject):
|
||||||
return file_content
|
return file_content
|
||||||
|
|
||||||
def get_zip_content(self):
|
def get_zip_content(self):
|
||||||
mimetype = self.get_estimated_type()
|
# mimetype = self.get_estimated_type()
|
||||||
zip_content = BytesIO()
|
zip_content = BytesIO()
|
||||||
with zipfile.ZipFile(zip_content, "w") as zf:
|
with zipfile.ZipFile(zip_content, "w") as zf:
|
||||||
# TODO: Fix password
|
# TODO: Fix password
|
||||||
#zf.setpassword(b"infected")
|
# zf.setpassword(b"infected")
|
||||||
zf.writestr( self.id, self.get_content().getvalue())
|
zf.writestr(self.id, self.get_content().getvalue())
|
||||||
zip_content.seek(0)
|
zip_content.seek(0)
|
||||||
return zip_content
|
return zip_content
|
||||||
|
|
||||||
|
|
||||||
def get_misp_object(self):
|
def get_misp_object(self):
|
||||||
obj_attrs = []
|
obj_attrs = []
|
||||||
obj = MISPObject('file')
|
obj = MISPObject('file')
|
||||||
obj.first_seen = self.get_first_seen()
|
obj.first_seen = self.get_first_seen()
|
||||||
obj.last_seen = self.get_last_seen()
|
obj.last_seen = self.get_last_seen()
|
||||||
|
|
||||||
obj_attrs.append( obj.add_attribute('sha1', value=self.id) )
|
obj_attrs.append( obj.add_attribute('sha1', value=self.id))
|
||||||
obj_attrs.append( obj.add_attribute('mimetype', value=self.get_estimated_type()) )
|
obj_attrs.append( obj.add_attribute('mimetype', value=self.get_mimetype()))
|
||||||
obj_attrs.append( obj.add_attribute('malware-sample', value=self.id, data=self.get_content()) )
|
obj_attrs.append( obj.add_attribute('malware-sample', value=self.id, data=self.get_content()))
|
||||||
for obj_attr in obj_attrs:
|
for obj_attr in obj_attrs:
|
||||||
for tag in self.get_tags():
|
for tag in self.get_tags():
|
||||||
obj_attr.add_tag(tag)
|
obj_attr.add_tag(tag)
|
||||||
|
@ -139,176 +147,84 @@ class Decoded(AbstractObject):
|
||||||
############################################################################
|
############################################################################
|
||||||
############################################################################
|
############################################################################
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
def get_decoders(self):
|
def get_decoders(self):
|
||||||
return ['base64', 'binary', 'hexadecimal']
|
return ['base64', 'binary', 'hexadecimal']
|
||||||
|
|
||||||
def get_first_seen(self):
|
|
||||||
res = r_metadata.hget(f'metadata_hash:{self.id}', 'first_seen')
|
|
||||||
if res:
|
|
||||||
return int(res)
|
|
||||||
else:
|
|
||||||
return 99999999
|
|
||||||
|
|
||||||
def get_last_seen(self):
|
|
||||||
res = r_metadata.hget(f'metadata_hash:{self.id}', 'last_seen')
|
|
||||||
if res:
|
|
||||||
return int(res)
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def set_first_seen(self, date):
|
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'first_seen', date)
|
|
||||||
|
|
||||||
def set_last_seen(self, date):
|
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'last_seen', date)
|
|
||||||
|
|
||||||
def update_daterange(self, date):
|
|
||||||
first_seen = self.get_first_seen()
|
|
||||||
last_seen = self.get_last_seen()
|
|
||||||
if date < first_seen:
|
|
||||||
self.set_first_seen(date)
|
|
||||||
if date > last_seen:
|
|
||||||
self.set_last_seen(date)
|
|
||||||
|
|
||||||
def get_meta(self, options=set()):
|
def get_meta(self, options=set()):
|
||||||
meta = {'id': self.id,
|
meta = self._get_meta(options=options)
|
||||||
'subtype': self.subtype,
|
meta['id'] = self.id
|
||||||
'tags': self.get_tags()}
|
if 'mimetype' in options:
|
||||||
|
meta['mimetype'] = self.get_mimetype()
|
||||||
|
if 'icon' in options:
|
||||||
|
if 'mimetype' in meta:
|
||||||
|
mimetype = meta['mimetype']
|
||||||
|
else:
|
||||||
|
mimetype = None
|
||||||
|
meta['icon'] = self.get_svg_icon(mimetype=mimetype)
|
||||||
|
if 'size' in options:
|
||||||
|
meta['size'] = self.get_size()
|
||||||
|
if 'tags' in options:
|
||||||
|
meta['tags'] = self.get_tags()
|
||||||
|
if 'vt' in options:
|
||||||
|
meta['vt'] = self.get_meta_vt()
|
||||||
return meta
|
return meta
|
||||||
|
|
||||||
def get_meta_vt(self):
|
def get_meta_vt(self):
|
||||||
meta = {}
|
link = r_objects.hget(f'meta:{self.type}:{self.id}', 'vt_link')
|
||||||
meta['link'] = r_metadata.hget(f'metadata_hash:{self.id}', 'vt_link')
|
report = r_objects.hget(f'meta:{self.type}:{self.id}', 'vt_report')
|
||||||
meta['report'] = r_metadata.hget(f'metadata_hash:{self.id}', 'vt_report')
|
if link or report:
|
||||||
return meta
|
return {'link': link, 'report': report}
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
def guess_mimetype(self, bytes_content):
|
def guess_mimetype(self, bytes_content):
|
||||||
return magic.from_buffer(bytes_content, mime=True)
|
return magic.from_buffer(bytes_content, mime=True)
|
||||||
|
|
||||||
def _save_meta(self, filepath, mimetype):
|
# avoid counting the same hash multiple time on the same item
|
||||||
# create hash metadata
|
# except if different encoding
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'size', os.path.getsize(filepath))
|
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'estimated_type', mimetype)
|
|
||||||
r_metadata.sadd('hash_all_type', mimetype) #################################################### rename ????
|
|
||||||
|
|
||||||
def save_file(self, content, mimetype): #####################################################
|
def is_seen_this_day(self, date):
|
||||||
|
return bool(self.get_nb_seen_by_date(Date.get_today_date_str()))
|
||||||
|
|
||||||
|
def save_file(self, b_content, mimetype): # TODO TEST ME
|
||||||
filepath = self.get_filepath(mimetype=mimetype)
|
filepath = self.get_filepath(mimetype=mimetype)
|
||||||
if os.path.isfile(filepath):
|
if os.path.isfile(filepath):
|
||||||
#print('File already exist')
|
# print('File already exist')
|
||||||
return False
|
return False
|
||||||
# create dir
|
# create dir
|
||||||
dirname = os.path.dirname(filepath)
|
dirname = os.path.dirname(filepath)
|
||||||
if not os.path.exists(dirname):
|
if not os.path.exists(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
with open(filepath, 'wb') as f:
|
with open(filepath, 'wb') as f:
|
||||||
f.write(file_content)
|
f.write(b_content)
|
||||||
|
|
||||||
|
# create meta
|
||||||
|
self._add_create()
|
||||||
|
self.set_mimetype(mimetype)
|
||||||
|
self.set_size(os.path.getsize(filepath))
|
||||||
|
|
||||||
# create hash metadata
|
|
||||||
self._save_meta(filepath, mimetype)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# avoid counting the same hash multiple time on the same item
|
def add(self, algo_name, date, obj_id, mimetype=None):
|
||||||
# except if defferent encoding
|
self._add(date, obj_id)
|
||||||
|
if not mimetype:
|
||||||
|
mimetype = self.get_mimetype()
|
||||||
|
|
||||||
def is_seen_this_day(self, date):
|
is_new_decoded = r_objects.sadd(f'decoded:algo:{algo_name}:{date}', self.id) # filter by algo + sparkline
|
||||||
for decoder in get_decoders_names():
|
# uniq decoded in this algo today
|
||||||
if r_metadata.zscore(f'{decoder}_date:{date}', self.id):
|
if int(is_new_decoded):
|
||||||
return True
|
r_objects.zincrby(f'decoded:algos:{date}', 1, algo_name) # pie chart
|
||||||
return False
|
|
||||||
|
|
||||||
def add(self, decoder_name, date, obj_id, mimetype):
|
# mimetype -> decodeds
|
||||||
|
is_new_decoded = r_objects.sadd(f'decoded:mime:{mimetype}:{date}', self.id) # filter by mimetype
|
||||||
|
# uniq decoded in this mimetype today
|
||||||
|
if int(is_new_decoded):
|
||||||
|
r_objects.zincrby(f'decoded:mime:{date}', 1, mimetype) # TDO ADD OPTION TO CALC IF NOT EXISTS
|
||||||
|
r_objects.sadd('decoded:mimetypes', mimetype)
|
||||||
|
|
||||||
if not self.is_seen_this_day(date):
|
# algo + mimetype -> Decodeds
|
||||||
# mimetype
|
# -> sinter with r_objects.sunion(f'decoded:algo:{algo_name}:{date}')
|
||||||
r_metadata.zincrby(f'decoded:mimetype:{date}', 1, mimetype)
|
|
||||||
r_metadata.sadd(f'decoded:mimetypes', mimetype)
|
|
||||||
|
|
||||||
# filter hash encoded in the same object
|
|
||||||
if not self.is_correlated('item', None, obj_id):
|
|
||||||
|
|
||||||
r_metadata.hincrby(f'metadata_hash:{self.id}', f'{decoder_name}_decoder', 1)
|
|
||||||
r_metadata.zincrby(f'{decoder_name}_type:{mimetype}', 1, date)
|
|
||||||
|
|
||||||
r_metadata.incrby(f'{decoder_name}_decoded:{date}', 1)
|
|
||||||
r_metadata.zincrby(f'{decoder_name}_date:{date}', 1, self.id)
|
|
||||||
|
|
||||||
|
|
||||||
self.update_daterange(date)
|
|
||||||
|
|
||||||
# First Hash for this decoder this day
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Correlations
|
|
||||||
self.add_correlation('item', '', obj_id)
|
|
||||||
# domain
|
|
||||||
if is_crawled(obj_id):
|
|
||||||
domain = get_item_domain(obj_id)
|
|
||||||
self.add_correlation('domain', '', domain)
|
|
||||||
|
|
||||||
|
|
||||||
# Filter duplicates ######################################################################
|
|
||||||
# Filter on item + hash for this day
|
|
||||||
|
|
||||||
# filter Obj Duplicate
|
|
||||||
|
|
||||||
# first time we see this day
|
|
||||||
# iterate on all decoder
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
######################################################################
|
|
||||||
|
|
||||||
# first time we see this hash today
|
|
||||||
|
|
||||||
|
|
||||||
# mimetype # # # # # # # #
|
|
||||||
r_metadata.zincrby(f'decoded:mimetype:{date}', 1, mimetype)
|
|
||||||
|
|
||||||
# create hash metadata
|
|
||||||
r_metadata.sadd(f'decoded:mimetypes', mimetype)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # TODO: DUPLICATES + check fields
|
|
||||||
def add(self, decoder_name, date, obj_id, mimetype):
|
|
||||||
self.update_daterange(date)
|
|
||||||
|
|
||||||
r_metadata.incrby(f'{decoder_type}_decoded:{date}', 1)
|
|
||||||
r_metadata.zincrby(f'{decoder_type}_date:{date}', 1, self.id)
|
|
||||||
|
|
||||||
r_metadata.hincrby(f'metadata_hash:{self.id}', f'{decoder_type}_decoder', 1)
|
|
||||||
r_metadata.zincrby(f'{decoder_type}_type:{mimetype}', 1, date) # # TODO: # DUP1
|
|
||||||
|
|
||||||
################################################################ # TODO: REMOVE ?????????????????????????????????
|
|
||||||
r_metadata.zincrby(f'{decoder_type}_hash:{self.id}', 1, obj_id) # number of b64 on this item
|
|
||||||
|
|
||||||
|
|
||||||
# first time we see this hash encoding on this item
|
|
||||||
if not r_metadata.zscore(f'{decoder_type}_hash:{self.id}', obj_id):
|
|
||||||
|
|
||||||
# create hash metadata
|
|
||||||
r_metadata.sadd(f'hash_{decoder_type}_all_type', mimetype)
|
|
||||||
|
|
||||||
# first time we see this hash encoding today
|
|
||||||
if not r_metadata.zscore(f'{decoder_type}_date:{date}', self.id):
|
|
||||||
r_metadata.zincrby(f'{decoder_type}_type:{mimetype}', 1, date) # # TODO: # DUP1
|
|
||||||
|
|
||||||
|
|
||||||
# Correlations
|
|
||||||
self.add_correlation('item', '', obj_id)
|
|
||||||
# domain
|
|
||||||
if is_crawled(obj_id):
|
|
||||||
domain = get_item_domain(obj_id)
|
|
||||||
self.add_correlation('domain', '', domain)
|
|
||||||
|
|
||||||
|
|
||||||
# NB of MIMETYPE / DAY -> ALL HASH OR UNIQ HASH ??????
|
|
||||||
|
|
||||||
# # TODO: ADD items
|
# # TODO: ADD items
|
||||||
def create(self, content, date, mimetype=None):
|
def create(self, content, date, mimetype=None):
|
||||||
|
@ -324,17 +240,14 @@ class Decoded(AbstractObject):
|
||||||
#######################################################################################
|
#######################################################################################
|
||||||
#######################################################################################
|
#######################################################################################
|
||||||
|
|
||||||
#######################################################################################
|
|
||||||
#######################################################################################
|
|
||||||
|
|
||||||
def is_vt_enabled(self):
|
def is_vt_enabled(self):
|
||||||
return VT_ENABLED
|
return VT_ENABLED
|
||||||
|
|
||||||
def set_vt_report(self, report):
|
def set_vt_report(self, report):
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'vt_report', report)
|
r_objects.hset(f'meta:{self.type}:{self.id}', 'vt_report', report)
|
||||||
|
|
||||||
def set_meta_vt(self, link, report):
|
def set_meta_vt(self, link, report):
|
||||||
r_metadata.hset(f'metadata_hash:{self.id}', 'vt_link', link)
|
r_objects.hset(f'meta:{self.type}:{self.id}', 'vt_link', link)
|
||||||
self.set_vt_report(report)
|
self.set_vt_report(report)
|
||||||
|
|
||||||
def refresh_vt_report(self):
|
def refresh_vt_report(self):
|
||||||
|
@ -371,17 +284,68 @@ class Decoded(AbstractObject):
|
||||||
link = json_response['permalink'].split('analysis')[0] + 'analysis/'
|
link = json_response['permalink'].split('analysis')[0] + 'analysis/'
|
||||||
self.set_meta_vt(link, 'Please Refresh')
|
self.set_meta_vt(link, 'Please Refresh')
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
############################################################################
|
|
||||||
|
|
||||||
def get_decoders_names():
|
def is_vt_enabled():
|
||||||
|
return VT_ENABLED
|
||||||
|
|
||||||
|
def get_all_decodeds():
|
||||||
|
return r_objects.smembers(f'decoded:all')
|
||||||
|
|
||||||
|
def get_algos():
|
||||||
return ['base64', 'binary', 'hexadecimal']
|
return ['base64', 'binary', 'hexadecimal']
|
||||||
|
|
||||||
def get_all_mimetypes():
|
def get_all_mimetypes():
|
||||||
return r_metadata.smembers(f'decoded:mimetypes')
|
return r_objects.smembers('decoded:mimetypes')
|
||||||
|
|
||||||
|
def get_nb_decodeds_by_date(date):
|
||||||
|
return r_objects.zcard(f'decoded:date:{date}')
|
||||||
|
|
||||||
|
def get_decodeds_by_date(date):
|
||||||
|
return r_objects.zrange(f'decoded:date:{date}', 0, -1)
|
||||||
|
|
||||||
|
def get_algo_decodeds_by_date(date, algo):
|
||||||
|
return r_objects.smembers(f'decoded:algo:{algo}:{date}')
|
||||||
|
|
||||||
|
def get_mimetype_decodeds_by_date(date, mimetype):
|
||||||
|
return r_objects.smembers(f'decoded:mime:{mimetype}:{date}')
|
||||||
|
|
||||||
|
def get_algo_mimetype_decodeds_by_date(date, algo, mimetype):
|
||||||
|
return r_objects.sinter(f'decoded:algo:{algo}:{date}', f'decoded:mime:{mimetype}:{date}')
|
||||||
|
|
||||||
|
def get_decodeds_by_daterange(date_from, date_to, algo=None, mimetype=None):
|
||||||
|
decodeds = set()
|
||||||
|
if not algo and not mimetype:
|
||||||
|
for date in Date.substract_date(date_from, date_to):
|
||||||
|
decodeds = decodeds | set(get_decodeds_by_date(date))
|
||||||
|
elif algo and not mimetype:
|
||||||
|
for date in Date.substract_date(date_from, date_to):
|
||||||
|
decodeds = decodeds | get_algo_decodeds_by_date(date, algo)
|
||||||
|
elif mimetype and not algo:
|
||||||
|
for date in Date.substract_date(date_from, date_to):
|
||||||
|
decodeds = decodeds | get_mimetype_decodeds_by_date(date, mimetype)
|
||||||
|
elif algo and mimetype:
|
||||||
|
for date in Date.substract_date(date_from, date_to):
|
||||||
|
decodeds = decodeds | get_algo_mimetype_decodeds_by_date(date, algo, mimetype)
|
||||||
|
return decodeds
|
||||||
|
|
||||||
|
def sanitise_algo(algo):
|
||||||
|
if algo in get_algos():
|
||||||
|
return algo
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def sanitise_mimetype(mimetype):
|
||||||
|
if mimetype:
|
||||||
|
if r_objects.sismember('decoded:mimetypes', mimetype):
|
||||||
|
return mimetype
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
############################################################################
|
||||||
|
|
||||||
def sanityze_decoder_names(decoder_name):
|
def sanityze_decoder_names(decoder_name):
|
||||||
if decoder_name not in Decodeds.get_decoders_names():
|
if decoder_name not in Decodeds.get_algos():
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return decoder_name
|
return decoder_name
|
||||||
|
@ -389,11 +353,12 @@ def sanityze_decoder_names(decoder_name):
|
||||||
def sanityze_mimetype(mimetype):
|
def sanityze_mimetype(mimetype):
|
||||||
if mimetype == 'All types':
|
if mimetype == 'All types':
|
||||||
return None
|
return None
|
||||||
elif not r_metadata.sismember('hash_all_type', mimetype):
|
elif not r_objects.sismember(f'decoded:mimetypes', mimetype):
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return mimetype
|
return mimetype
|
||||||
|
|
||||||
|
# TODO
|
||||||
def pie_chart_mimetype_json(date_from, date_to, mimetype, decoder_name):
|
def pie_chart_mimetype_json(date_from, date_to, mimetype, decoder_name):
|
||||||
if mimetype:
|
if mimetype:
|
||||||
all_mimetypes = [mimetype]
|
all_mimetypes = [mimetype]
|
||||||
|
@ -404,33 +369,121 @@ def pie_chart_mimetype_json(date_from, date_to, mimetype, decoder_name):
|
||||||
for mimet in all_mimetypes:
|
for mimet in all_mimetypes:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# TODO
|
||||||
def pie_chart_decoder_json(date_from, date_to, mimetype):
|
def pie_chart_decoder_json(date_from, date_to, mimetype):
|
||||||
all_decoder = get_decoders_names()
|
all_algos = get_algos()
|
||||||
date_range = Date.substract_date(date_from, date_to)
|
date_range = Date.substract_date(date_from, date_to)
|
||||||
if not date_range:
|
if not date_range:
|
||||||
date_range.append(Date.get_today_date_str())
|
date_range.append(Date.get_today_date_str())
|
||||||
nb_decoder = {}
|
nb_algos = {}
|
||||||
for date in date_range:
|
for date in date_range:
|
||||||
for decoder_name in all_decoder:
|
for algo_name in all_algos:
|
||||||
if not mimetype:
|
# if not mimetype:
|
||||||
nb = r_metadata.get(f'{decoder_name}_decoded:{date}')
|
nb = r_objects.zscore(f'decoded:algos:{date}', algo_name)
|
||||||
if nb is None:
|
# TODO mimetype necoding per day
|
||||||
nb = 0
|
# else:
|
||||||
else:
|
# nb = r_metadata.zscore(f'{algo_name}_type:{mimetype}', date)
|
||||||
nb = int(nb)
|
if nb is None:
|
||||||
|
nb = 0
|
||||||
else:
|
else:
|
||||||
nb = r_metadata.zscore(f'{decoder_name}_type:{mimetype}', date)
|
nb = int(nb)
|
||||||
nb_decoder[decoder_name] = nb_decoder.get(decoder_name, 0) + nb
|
nb_algos[algo_name] = nb_algos.get(algo_name, 0) + nb
|
||||||
pie_chart = []
|
pie_chart = []
|
||||||
for decoder_name in all_decoder:
|
for algo_name in all_algos:
|
||||||
pie_chart.append({'name': decoder_name, 'value': nb_decoder[decoder_name]})
|
pie_chart.append({'name': algo_name, 'value': nb_algos[algo_name]})
|
||||||
return pie_chart
|
return pie_chart
|
||||||
|
|
||||||
|
# TODO FILTER BY ALGO
|
||||||
|
def pie_chart_mimetype_json(date_from, date_to, algo):
|
||||||
|
date_range = Date.substract_date(date_from, date_to)
|
||||||
|
if not date_range:
|
||||||
|
date_range.append(Date.get_today_date_str())
|
||||||
|
mimetypes = {}
|
||||||
|
if len(date_range) == 1:
|
||||||
|
mimes = r_objects.zrange(f'decoded:mime:{date_range[0]}', 0, -1, withscores=True)
|
||||||
|
for t_mime in mimes:
|
||||||
|
mime, nb = t_mime
|
||||||
|
mimetypes[mime] = int(nb)
|
||||||
|
else:
|
||||||
|
mimetypes = {}
|
||||||
|
for date in date_range:
|
||||||
|
mimes = r_objects.zrange(f'decoded:mime:{date}', 0, -1, withscores=True)
|
||||||
|
for t_mime in mimes:
|
||||||
|
mime, nb = t_mime
|
||||||
|
mimetypes[mime] = mimetypes.get(mime, 0) + int(nb)
|
||||||
|
top5_mimes = sorted(mimetypes, key=mimetypes.get, reverse=True)[:5]
|
||||||
|
pie_chart = []
|
||||||
|
for mime in top5_mimes:
|
||||||
|
pie_chart.append({'name': mime, 'value': mimetypes[mime]})
|
||||||
|
return pie_chart
|
||||||
|
|
||||||
|
def barchart_range_json(date_from, date_to, mimetype=None):
|
||||||
|
date_range = Date.substract_date(date_from, date_to)
|
||||||
|
if not date_range:
|
||||||
|
date_range.append(Date.get_today_date_str())
|
||||||
|
barchart = []
|
||||||
|
if mimetype:
|
||||||
|
for date in date_range:
|
||||||
|
range_day = {'date': f'{date[0:4]}-{date[4:6]}-{date[6:8]}'}
|
||||||
|
nb_day = r_objects.scard(f'decoded:mime:{mimetype}:{date}')
|
||||||
|
range_day[mimetype] = nb_day
|
||||||
|
barchart.append(range_day)
|
||||||
|
else:
|
||||||
|
# algo by mimetype, date = mimetype
|
||||||
|
if len(date_range) == 1:
|
||||||
|
mimes = r_objects.zrange(f'decoded:mime:{date_range[0]}', 0, -1, withscores=True)
|
||||||
|
# TODO
|
||||||
|
# UNION
|
||||||
|
# f'decoded:algo:{algo_name}:{date}'
|
||||||
|
# f'decoded:mime:{mimetype}:{date}'
|
||||||
|
for t_mime in mimes:
|
||||||
|
mime, nb = t_mime
|
||||||
|
range_day = {'date': mime, 'mimetype': nb}
|
||||||
|
barchart.append(range_day)
|
||||||
|
# mimetypes by date
|
||||||
|
else:
|
||||||
|
mimetypes = set()
|
||||||
|
for date in date_range:
|
||||||
|
range_day = {'date': f'{date[0:4]}-{date[4:6]}-{date[6:8]}'}
|
||||||
|
mimes = r_objects.zrange(f'decoded:mime:{date}', 0, -1, withscores=True)
|
||||||
|
for t_mime in mimes:
|
||||||
|
mime, nb = t_mime
|
||||||
|
mimetypes.add(mime)
|
||||||
|
range_day[mime] = int(nb)
|
||||||
|
barchart.append(range_day)
|
||||||
|
if not mimetypes:
|
||||||
|
mimetypes.add('No Data')
|
||||||
|
for row in barchart:
|
||||||
|
for mime in mimetypes:
|
||||||
|
if mime not in row:
|
||||||
|
row[mime] = 0
|
||||||
|
return barchart
|
||||||
|
|
||||||
|
def graphline_json(decoded_id):
|
||||||
|
decoded = Decoded(decoded_id)
|
||||||
|
graphline = []
|
||||||
|
if decoded.exists():
|
||||||
|
nb_day = 30
|
||||||
|
for date in Date.get_previous_date_list(nb_day):
|
||||||
|
graphline.append({'date': f'{date[0:4]}-{date[4:6]}-{date[6:8]}', 'value': decoded.get_nb_seen_by_date(date)})
|
||||||
|
return graphline
|
||||||
|
|
||||||
def api_pie_chart_decoder_json(date_from, date_to, mimetype):
|
def api_pie_chart_decoder_json(date_from, date_to, mimetype):
|
||||||
mimetype = sanityze_mimetype(mimetype)
|
mimetype = sanityze_mimetype(mimetype)
|
||||||
date = Date.sanitise_date_range(date_from, date_to)
|
date = Date.sanitise_date_range(date_from, date_to)
|
||||||
return pie_chart_decoder_json(date['date_from'], date['date_to'], mimetype)
|
return pie_chart_decoder_json(date['date_from'], date['date_to'], mimetype)
|
||||||
|
|
||||||
|
def api_pie_chart_mimetype_json(date_from, date_to, algo):
|
||||||
|
algo = sanitise_algo(algo)
|
||||||
|
date = Date.sanitise_date_range(date_from, date_to)
|
||||||
|
return pie_chart_mimetype_json(date['date_from'], date['date_to'], algo)
|
||||||
|
|
||||||
|
def api_barchart_range_json(date_from, date_to, mimetype):
|
||||||
|
date = Date.sanitise_date_range(date_from, date_to)
|
||||||
|
if mimetype:
|
||||||
|
mimetype = sanityze_mimetype(mimetype)
|
||||||
|
return barchart_range_json(date['date_from'], date['date_to'], mimetype=mimetype)
|
||||||
|
|
||||||
def _delete_old_json_descriptor():
|
def _delete_old_json_descriptor():
|
||||||
decodeds = []
|
decodeds = []
|
||||||
hash_dir = os.path.join(os.environ['AIL_HOME'], HASH_DIR)
|
hash_dir = os.path.join(os.environ['AIL_HOME'], HASH_DIR)
|
||||||
|
@ -441,15 +494,17 @@ def _delete_old_json_descriptor():
|
||||||
os.remove(decoded_path)
|
os.remove(decoded_path)
|
||||||
return decodeds
|
return decodeds
|
||||||
|
|
||||||
def get_all_decodeds():
|
|
||||||
|
# TODO
|
||||||
|
def get_all_decodeds_files():
|
||||||
decodeds = []
|
decodeds = []
|
||||||
hash_dir = os.path.join(os.environ['AIL_HOME'], HASH_DIR)
|
hash_dir = os.path.join(os.environ['AIL_HOME'], HASH_DIR)
|
||||||
if not hash_dir.endswith("/"):
|
if not hash_dir.endswith("/"):
|
||||||
hash_dir = f"{hash_dir}/"
|
hash_dir = f"{hash_dir}/"
|
||||||
for root, dirs, files in os.walk(hash_dir):
|
for root, dirs, files in os.walk(hash_dir):
|
||||||
for file in files:
|
for file in files:
|
||||||
decoded_path = f'{root}{file}'
|
# decoded_path = f'{root}{file}'
|
||||||
decodeds.append(file)
|
decodeds.append(file)
|
||||||
return decodeds
|
return decodeds
|
||||||
|
|
||||||
#if __name__ == '__main__':
|
# if __name__ == '__main__':
|
||||||
|
|
|
@ -65,7 +65,7 @@ class AbstractDaterangeObject(AbstractObject, ABC):
|
||||||
return last_seen
|
return last_seen
|
||||||
|
|
||||||
def get_nb_seen(self):
|
def get_nb_seen(self):
|
||||||
return r_object.hget(f'meta:{self.type}:{self.id}', 'nb')
|
return self.get_nb_correlation('item')
|
||||||
|
|
||||||
def get_nb_seen_by_date(self, date):
|
def get_nb_seen_by_date(self, date):
|
||||||
nb = r_object.zscore(f'{self.type}:date:{date}', self.id)
|
nb = r_object.zscore(f'{self.type}:date:{date}', self.id)
|
||||||
|
@ -108,20 +108,23 @@ class AbstractDaterangeObject(AbstractObject, ABC):
|
||||||
sparkline.append(self.get_nb_seen_by_date(date))
|
sparkline.append(self.get_nb_seen_by_date(date))
|
||||||
return sparkline
|
return sparkline
|
||||||
|
|
||||||
|
def _add_create(self):
|
||||||
|
r_object.sadd(f'{self.type}:all', self.id)
|
||||||
|
|
||||||
|
# TODO don't increase nb if same hash in item with different encoding
|
||||||
|
# if hash already in item
|
||||||
def _add(self, date, item_id):
|
def _add(self, date, item_id):
|
||||||
if not self.exists():
|
if not self.exists():
|
||||||
|
self._add_create(date)
|
||||||
self.set_first_seen(date)
|
self.set_first_seen(date)
|
||||||
self.set_last_seen(date)
|
self.set_last_seen(date)
|
||||||
r_object.sadd(f'{self.type}:all', self.id)
|
|
||||||
else:
|
else:
|
||||||
self.update_daterange(date)
|
self.update_daterange(date)
|
||||||
update_obj_date(date, self.type)
|
update_obj_date(date, self.type)
|
||||||
|
|
||||||
# NB Object seen by day
|
# NB Object seen by day
|
||||||
print(f'{self.type}:date:{date}', 1, self.id)
|
if not self.is_correlated('item', '', item_id): # if decoded not already in object
|
||||||
r_object.zincrby(f'{self.type}:date:{date}', 1, self.id)
|
r_object.zincrby(f'{self.type}:date:{date}', 1, self.id)
|
||||||
# NB Object seen
|
|
||||||
r_object.hincrby(f'meta:{self.type}:{self.id}', 'nb', 1)
|
|
||||||
|
|
||||||
# Correlations
|
# Correlations
|
||||||
self.add_correlation('item', '', item_id)
|
self.add_correlation('item', '', item_id)
|
||||||
|
|
|
@ -19,7 +19,7 @@ sys.path.append(os.environ['AIL_BIN'])
|
||||||
##################################
|
##################################
|
||||||
from lib import Tag
|
from lib import Tag
|
||||||
from lib import Duplicate
|
from lib import Duplicate
|
||||||
from lib.correlations_engine import get_nb_correlations, get_correlations, add_obj_correlation, delete_obj_correlation, exists_obj_correlation, is_obj_correlated
|
from lib.correlations_engine import get_nb_correlations, get_correlations, add_obj_correlation, delete_obj_correlation, exists_obj_correlation, is_obj_correlated, get_nb_correlation_by_correl_type
|
||||||
from lib.Investigations import is_object_investigated, get_obj_investigations, delete_obj_investigations
|
from lib.Investigations import is_object_investigated, get_obj_investigations, delete_obj_investigations
|
||||||
from lib.Tracker import is_obj_tracked, get_obj_all_trackers, delete_obj_trackers
|
from lib.Tracker import is_obj_tracked, get_obj_all_trackers, delete_obj_trackers
|
||||||
|
|
||||||
|
@ -219,6 +219,9 @@ class AbstractObject(ABC):
|
||||||
"""
|
"""
|
||||||
return get_correlations(self.type, self.subtype, self.id)
|
return get_correlations(self.type, self.subtype, self.id)
|
||||||
|
|
||||||
|
def get_nb_correlation(self, correl_type):
|
||||||
|
return get_nb_correlation_by_correl_type(self.type, self.get_subtype(r_str=True), self.id, correl_type)
|
||||||
|
|
||||||
def get_nb_correlations(self, filter_types=[]):
|
def get_nb_correlations(self, filter_types=[]):
|
||||||
return get_nb_correlations(self.type, self.subtype, self.id, filter_types=filter_types)
|
return get_nb_correlations(self.type, self.subtype, self.id, filter_types=filter_types)
|
||||||
|
|
||||||
|
|
|
@ -97,6 +97,21 @@ def get_object_svg(obj_type, subtype, id):
|
||||||
obj = get_object(obj_type, subtype, id)
|
obj = get_object(obj_type, subtype, id)
|
||||||
return obj.get_svg_icon()
|
return obj.get_svg_icon()
|
||||||
|
|
||||||
|
## TAGS ##
|
||||||
|
def get_obj_tags(obj_type, subtype, id):
|
||||||
|
obj = get_object(obj_type, subtype, id)
|
||||||
|
return obj.get_tags()
|
||||||
|
|
||||||
|
def add_obj_tag(obj_type, subtype, id, tag):
|
||||||
|
obj = get_object(obj_type, subtype, id)
|
||||||
|
obj.add_tag(tag)
|
||||||
|
|
||||||
|
def add_obj_tags(obj_type, subtype, id, tags):
|
||||||
|
obj = get_object(obj_type, subtype, id)
|
||||||
|
for tag in tags:
|
||||||
|
obj.add_tag(tag)
|
||||||
|
# -TAGS- #
|
||||||
|
|
||||||
def get_object_meta(obj_type, subtype, id, options=[], flask_context=False):
|
def get_object_meta(obj_type, subtype, id, options=[], flask_context=False):
|
||||||
obj = get_object(obj_type, subtype, id)
|
obj = get_object(obj_type, subtype, id)
|
||||||
meta = obj.get_meta(options=options)
|
meta = obj.get_meta(options=options)
|
||||||
|
@ -117,6 +132,7 @@ def get_object_card_meta(obj_type, subtype, id, related_btc=False):
|
||||||
meta['icon'] = obj.get_svg_icon()
|
meta['icon'] = obj.get_svg_icon()
|
||||||
if subtype or obj_type == 'cve':
|
if subtype or obj_type == 'cve':
|
||||||
meta['sparkline'] = obj.get_sparkline()
|
meta['sparkline'] = obj.get_sparkline()
|
||||||
|
meta['cve_search'] = obj.get_cve_search()
|
||||||
if subtype == 'bitcoin' and related_btc:
|
if subtype == 'bitcoin' and related_btc:
|
||||||
meta["related_btc"] = btc_ail.get_bitcoin_info(obj.id)
|
meta["related_btc"] = btc_ail.get_bitcoin_info(obj.id)
|
||||||
if obj.get_type() == 'decoded':
|
if obj.get_type() == 'decoded':
|
||||||
|
|
|
@ -9,15 +9,10 @@
|
||||||
##################################
|
##################################
|
||||||
# Import External packages
|
# Import External packages
|
||||||
##################################
|
##################################
|
||||||
import time
|
|
||||||
import os
|
import os
|
||||||
import base64
|
import base64
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
import magic
|
|
||||||
import json
|
|
||||||
import datetime
|
|
||||||
import re
|
import re
|
||||||
import signal
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.append(os.environ['AIL_BIN'])
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
@ -27,49 +22,27 @@ sys.path.append(os.environ['AIL_BIN'])
|
||||||
from modules.abstract_module import AbstractModule
|
from modules.abstract_module import AbstractModule
|
||||||
from lib.ConfigLoader import ConfigLoader
|
from lib.ConfigLoader import ConfigLoader
|
||||||
from lib.objects.Items import Item
|
from lib.objects.Items import Item
|
||||||
from lib.objects import Decodeds
|
from lib.objects.Decodeds import Decoded
|
||||||
|
|
||||||
config_loader = ConfigLoader()
|
config_loader = ConfigLoader()
|
||||||
serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
|
|
||||||
hex_max_execution_time = config_loader.get_config_int("Hex", "max_execution_time")
|
hex_max_execution_time = config_loader.get_config_int("Hex", "max_execution_time")
|
||||||
binary_max_execution_time = config_loader.get_config_int("Binary", "max_execution_time")
|
binary_max_execution_time = config_loader.get_config_int("Binary", "max_execution_time")
|
||||||
base64_max_execution_time = config_loader.get_config_int("Base64", "max_execution_time")
|
base64_max_execution_time = config_loader.get_config_int("Base64", "max_execution_time")
|
||||||
config_loader = None
|
config_loader = None
|
||||||
|
|
||||||
#####################################################
|
|
||||||
#####################################################
|
|
||||||
|
|
||||||
# # TODO: use regex_helper
|
|
||||||
class TimeoutException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def timeout_handler(signum, frame):
|
|
||||||
raise TimeoutException
|
|
||||||
|
|
||||||
|
|
||||||
# # TODO: # FIXME: Remove signal -> replace with regex_helper
|
|
||||||
signal.signal(signal.SIGALRM, timeout_handler)
|
|
||||||
|
|
||||||
|
|
||||||
#####################################################
|
|
||||||
####################################################
|
|
||||||
|
|
||||||
|
|
||||||
class Decoder(AbstractModule):
|
class Decoder(AbstractModule):
|
||||||
"""
|
"""
|
||||||
Decoder module for AIL framework
|
Decoder module for AIL framework
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# TODO to lambda expr
|
|
||||||
def hex_decoder(self, hexStr):
|
def hex_decoder(self, hexStr):
|
||||||
#hexStr = ''.join( hex_string.split(" ") )
|
# hexStr = ''.join( hex_string.split(" ") )
|
||||||
return bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)]))
|
return bytes(bytearray([int(hexStr[i:i+2], 16) for i in range(0, len(hexStr), 2)]))
|
||||||
|
|
||||||
# TODO to lambda expr
|
|
||||||
def binary_decoder(self, binary_string):
|
def binary_decoder(self, binary_string):
|
||||||
return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)]))
|
return bytes(bytearray([int(binary_string[i:i+8], 2) for i in range(0, len(binary_string), 8)]))
|
||||||
|
|
||||||
# TODO to lambda expr
|
|
||||||
def base64_decoder(self, base64_string):
|
def base64_decoder(self, base64_string):
|
||||||
return base64.b64decode(base64_string)
|
return base64.b64decode(base64_string)
|
||||||
|
|
||||||
|
@ -86,18 +59,20 @@ class Decoder(AbstractModule):
|
||||||
cmp_regex_base64 = re.compile(regex_base64)
|
cmp_regex_base64 = re.compile(regex_base64)
|
||||||
|
|
||||||
# map decoder function
|
# map decoder function
|
||||||
self.decoder_function = {'binary': self.binary_decoder, 'hexadecimal': self.hex_decoder, 'base64': self.base64_decoder}
|
self.decoder_function = {'binary': self.binary_decoder,
|
||||||
|
'hexadecimal': self.hex_decoder,
|
||||||
|
'base64': self.base64_decoder}
|
||||||
|
|
||||||
# list all decoder with regex,
|
# list all decoder with regex,
|
||||||
decoder_binary = {'name': 'binary', 'regex': cmp_regex_binary, 'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time}
|
decoder_binary = {'name': 'binary', 'regex': cmp_regex_binary,
|
||||||
decoder_hexadecimal = {'name': 'hexadecimal', 'regex': cmp_regex_hex, 'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time}
|
'encoded_min_size': 300, 'max_execution_time': binary_max_execution_time}
|
||||||
decoder_base64 = {'name': 'base64', 'regex': cmp_regex_base64, 'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time}
|
decoder_hexadecimal = {'name': 'hexadecimal', 'regex': cmp_regex_hex,
|
||||||
|
'encoded_min_size': 300, 'max_execution_time': hex_max_execution_time}
|
||||||
|
decoder_base64 = {'name': 'base64', 'regex': cmp_regex_base64,
|
||||||
|
'encoded_min_size': 40, 'max_execution_time': base64_max_execution_time}
|
||||||
|
|
||||||
self.decoder_order = [decoder_base64, decoder_binary, decoder_hexadecimal, decoder_base64]
|
self.decoder_order = [decoder_base64, decoder_binary, decoder_hexadecimal, decoder_base64]
|
||||||
|
|
||||||
for decoder in self.decoder_order:
|
|
||||||
serv_metadata.sadd('all_decoder', decoder['name'])
|
|
||||||
|
|
||||||
# Waiting time in seconds between to message processed
|
# Waiting time in seconds between to message processed
|
||||||
self.pending_seconds = 1
|
self.pending_seconds = 1
|
||||||
|
|
||||||
|
@ -110,63 +85,47 @@ class Decoder(AbstractModule):
|
||||||
content = item.get_content()
|
content = item.get_content()
|
||||||
date = item.get_date()
|
date = item.get_date()
|
||||||
|
|
||||||
for decoder in self.decoder_order: # add threshold and size limit
|
for decoder in self.decoder_order:
|
||||||
# max execution time on regex
|
find = False
|
||||||
signal.alarm(decoder['max_execution_time'])
|
dname = decoder['name']
|
||||||
|
|
||||||
try:
|
encodeds = self.regex_findall(decoder['regex'], item.id, content)
|
||||||
encoded_list = decoder['regex'].findall(content)
|
# PERF remove encoded from item content
|
||||||
except TimeoutException:
|
for encoded in encodeds:
|
||||||
encoded_list = []
|
|
||||||
self.process.incr_module_timeout_statistic() # add encoder type
|
|
||||||
self.redis_logger.debug(f"{item.id} processing timeout")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
signal.alarm(0)
|
|
||||||
|
|
||||||
if len(encoded_list) > 0:
|
|
||||||
content = self.decode_string(content, item.id, date, encoded_list, decoder['name'], decoder['encoded_min_size'])
|
|
||||||
|
|
||||||
def decode_string(self, content, item_id, date, encoded_list, decoder_name, encoded_min_size):
|
|
||||||
find = False
|
|
||||||
for encoded in encoded_list:
|
|
||||||
if len(encoded) >= encoded_min_size:
|
|
||||||
decoded_file = self.decoder_function[decoder_name](encoded)
|
|
||||||
find = True
|
|
||||||
|
|
||||||
sha1_string = sha1(decoded_file).hexdigest()
|
|
||||||
decoded = Decoded(sha1_string)
|
|
||||||
|
|
||||||
mimetype = decoded.guess_mimetype(decoded_file)
|
|
||||||
if not mimetype:
|
|
||||||
print(item_id)
|
|
||||||
print(sha1_string)
|
|
||||||
raise Exception(f'Invalid mimetype: {sha1_string} {item_id}')
|
|
||||||
|
|
||||||
decoded.create(content, date)
|
|
||||||
decoded.add(decoder_name, date, item_id, mimetype)
|
|
||||||
|
|
||||||
save_item_relationship(sha1_string, item_id) ################################
|
|
||||||
|
|
||||||
# remove encoded from item content
|
|
||||||
content = content.replace(encoded, '', 1)
|
content = content.replace(encoded, '', 1)
|
||||||
|
encodeds = set(encodeds)
|
||||||
|
|
||||||
self.redis_logger.debug(f'{item_id} : {decoder_name} - {mimetype}')
|
for encoded in encodeds:
|
||||||
print(f'{item_id} : {decoder_name} - {mimetype}')
|
find = False
|
||||||
if find:
|
if len(encoded) >= decoder['encoded_min_size']:
|
||||||
self.redis_logger.info(f'{decoder_name} decoded')
|
decoded_file = self.decoder_function[dname](encoded)
|
||||||
print(f'{decoder_name} decoded')
|
|
||||||
|
|
||||||
# Send to Tags
|
sha1_string = sha1(decoded_file).hexdigest()
|
||||||
msg = f'infoleak:automatic-detection="{decoder_name}";{item_id}'
|
decoded = Decoded(sha1_string)
|
||||||
self.send_message_to_queue(msg, 'Tags')
|
|
||||||
|
|
||||||
# perf: remove encoded from item content
|
if not decoded.exists():
|
||||||
return content
|
mimetype = decoded.guess_mimetype(decoded_file)
|
||||||
|
if not mimetype:
|
||||||
|
print(sha1_string, item.id)
|
||||||
|
raise Exception(f'Invalid mimetype: {decoded.id} {item.id}')
|
||||||
|
decoded.save_file(decoded_file, mimetype)
|
||||||
|
else:
|
||||||
|
mimetype = decoded.get_mimetype()
|
||||||
|
decoded.add(dname, date, item.id, mimetype=mimetype)
|
||||||
|
|
||||||
|
# DEBUG
|
||||||
|
self.redis_logger.debug(f'{item.id} : {dname} - {decoded.id} - {mimetype}')
|
||||||
|
print(f'{item.id} : {dname} - {decoded.id} - {mimetype}')
|
||||||
|
|
||||||
|
if find:
|
||||||
|
self.redis_logger.info(f'{item.id} - {dname}')
|
||||||
|
print(f'{item.id} - {dname}')
|
||||||
|
|
||||||
|
# Send to Tags
|
||||||
|
msg = f'infoleak:automatic-detection="{dname}";{item.id}'
|
||||||
|
self.send_message_to_queue(msg, 'Tags')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
# # TODO: TEST ME
|
|
||||||
module = Decoder()
|
module = Decoder()
|
||||||
module.run()
|
module.run()
|
||||||
|
|
|
@ -87,7 +87,7 @@ class AbstractModule(ABC):
|
||||||
def regex_finditer(self, regex, obj_id, content):
|
def regex_finditer(self, regex, obj_id, content):
|
||||||
return regex_helper.regex_finditer(self.r_cache_key, regex, obj_id, content, max_time=self.max_execution_time)
|
return regex_helper.regex_finditer(self.r_cache_key, regex, obj_id, content, max_time=self.max_execution_time)
|
||||||
|
|
||||||
def regex_findall(self, regex, id, content):
|
def regex_findall(self, regex, id, content, r_set=False):
|
||||||
"""
|
"""
|
||||||
regex findall helper (force timeout)
|
regex findall helper (force timeout)
|
||||||
:param regex: compiled regex
|
:param regex: compiled regex
|
||||||
|
@ -96,7 +96,7 @@ class AbstractModule(ABC):
|
||||||
|
|
||||||
ex: send_to_queue(item_id, 'Global')
|
ex: send_to_queue(item_id, 'Global')
|
||||||
"""
|
"""
|
||||||
return regex_helper.regex_findall(self.module_name, self.r_cache_key, regex, id, content, max_time=self.max_execution_time)
|
return regex_helper.regex_findall(self.module_name, self.r_cache_key, regex, id, content, max_time=self.max_execution_time, r_set=r_set)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -183,3 +183,42 @@ def sanitise_date_range(date_from, date_to, separator='', date_type='str'):
|
||||||
date_from = date_to
|
date_from = date_to
|
||||||
date_to = res
|
date_to = res
|
||||||
return {"date_from": date_from, "date_to": date_to}
|
return {"date_from": date_from, "date_to": date_to}
|
||||||
|
|
||||||
|
def sanitise_daterange(date_from, date_to, separator='', date_type='str'):
|
||||||
|
'''
|
||||||
|
Check/Return a correct date_form and date_to
|
||||||
|
'''
|
||||||
|
if not date_from and date_to:
|
||||||
|
date_from = date_to
|
||||||
|
elif not date_to and date_from:
|
||||||
|
date_to = date_from
|
||||||
|
elif not date_to and not date_from:
|
||||||
|
date = datetime.date.today().strftime("%Y%m%d")
|
||||||
|
return date, date
|
||||||
|
|
||||||
|
if date_type == 'str':
|
||||||
|
# remove separators
|
||||||
|
if len(date_from) == 10:
|
||||||
|
date_from = date_from[0:4] + date_from[5:7] + date_from[8:10]
|
||||||
|
if len(date_to) == 10:
|
||||||
|
date_to = date_to[0:4] + date_to[5:7] + date_to[8:10]
|
||||||
|
|
||||||
|
if not validate_str_date(date_from, separator=separator):
|
||||||
|
date_from = datetime.date.today().strftime("%Y%m%d")
|
||||||
|
if not validate_str_date(date_to, separator=separator):
|
||||||
|
date_to = datetime.date.today().strftime("%Y%m%d")
|
||||||
|
else: # datetime
|
||||||
|
if isinstance(date_from, datetime.datetime):
|
||||||
|
date_from = date_from.strftime("%Y%m%d")
|
||||||
|
else:
|
||||||
|
date_from = datetime.date.today().strftime("%Y%m%d")
|
||||||
|
if isinstance(date_to, datetime.datetime):
|
||||||
|
date_to = date_to.strftime("%Y%m%d")
|
||||||
|
else:
|
||||||
|
date_to = datetime.date.today().strftime("%Y%m%d")
|
||||||
|
|
||||||
|
if int(date_from) > int(date_to):
|
||||||
|
res = date_from
|
||||||
|
date_from = date_to
|
||||||
|
date_to = res
|
||||||
|
return date_from, date_to
|
||||||
|
|
|
@ -83,5 +83,15 @@ def objects_cve_search():
|
||||||
else:
|
else:
|
||||||
return redirect(cve.get_link(flask_context=True))
|
return redirect(cve.get_link(flask_context=True))
|
||||||
|
|
||||||
|
@objects_cve.route("/objects/cve/graphline/json", methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def objects_cve_graphline_json():
|
||||||
|
cve_id = request.args.get('id')
|
||||||
|
cve = Cves.Cve(cve_id)
|
||||||
|
if not cve.exists():
|
||||||
|
abort(404)
|
||||||
|
return jsonify(Cves.get_cve_graphline(cve_id))
|
||||||
|
|
||||||
# ============= ROUTES ==============
|
# ============= ROUTES ==============
|
||||||
|
|
||||||
|
|
|
@ -20,10 +20,12 @@ sys.path.append(os.environ['AIL_BIN'])
|
||||||
# Import Project packages
|
# Import Project packages
|
||||||
##################################
|
##################################
|
||||||
from lib.objects import Decodeds
|
from lib.objects import Decodeds
|
||||||
|
from packages import Date
|
||||||
|
|
||||||
# ============ BLUEPRINT ============
|
# ============ BLUEPRINT ============
|
||||||
objects_decoded = Blueprint('objects_decoded', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/objects/decoded'))
|
objects_decoded = Blueprint('objects_decoded', __name__,
|
||||||
|
template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/objects/decoded'))
|
||||||
|
|
||||||
|
|
||||||
# ============ VARIABLES ============
|
# ============ VARIABLES ============
|
||||||
|
|
||||||
|
@ -33,9 +35,60 @@ objects_decoded = Blueprint('objects_decoded', __name__, template_folder=os.path
|
||||||
|
|
||||||
# ============= ROUTES ==============
|
# ============= ROUTES ==============
|
||||||
|
|
||||||
# # TODO: # FIXME: CHECK IF OBJ EXIST
|
@objects_decoded.route("/object/decodeds", methods=['GET', 'POST'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def decodeds_dashboard():
|
||||||
|
if request.method == 'POST':
|
||||||
|
date_from = request.form.get('date_from')
|
||||||
|
date_to = request.form.get('date_to')
|
||||||
|
mimetype = request.form.get('mimetype')
|
||||||
|
algo = request.form.get('algo')
|
||||||
|
show_decoded = request.form.get('show_decoded')
|
||||||
|
return redirect(
|
||||||
|
url_for('objects_decoded.decodeds_dashboard', date_from=date_from, date_to=date_to, mimetype=mimetype,
|
||||||
|
algo=algo, show=show_decoded))
|
||||||
|
else:
|
||||||
|
date_from = request.args.get('date_from')
|
||||||
|
date_to = request.args.get('date_to')
|
||||||
|
mimetype = request.args.get('mimetype')
|
||||||
|
algo = request.args.get('algo')
|
||||||
|
show_decoded = request.args.get('show')
|
||||||
|
if show_decoded:
|
||||||
|
show_decoded = True
|
||||||
|
|
||||||
@objects_decoded.route("/object/decoded/download") #completely shows the paste in a new tab
|
if mimetype == 'All types':
|
||||||
|
mimetype = None
|
||||||
|
if algo == 'All encoding':
|
||||||
|
algo = None
|
||||||
|
|
||||||
|
algo = Decodeds.sanitise_algo(algo)
|
||||||
|
mimetype = Decodeds.sanitise_mimetype(mimetype)
|
||||||
|
date_from, date_to = Date.sanitise_daterange(date_from, date_to)
|
||||||
|
metas = []
|
||||||
|
if show_decoded:
|
||||||
|
decodeds = Decodeds.get_decodeds_by_daterange(date_from, date_to, algo=algo, mimetype=mimetype)
|
||||||
|
metas = []
|
||||||
|
for decoded_id in decodeds:
|
||||||
|
decoded = Decodeds.Decoded(decoded_id)
|
||||||
|
metas.append(decoded.get_meta(options={'sparkline', 'mimetype', 'icon', 'size', 'vt'}))
|
||||||
|
|
||||||
|
# TODO GET PIE CHARTS
|
||||||
|
|
||||||
|
return render_template("decoded/decodeds_dashboard.html", metas=metas, vt_enabled=Decodeds.is_vt_enabled(),
|
||||||
|
date_from=date_from, date_to=date_to, algo=algo, mimetype=mimetype,
|
||||||
|
algos=Decodeds.get_algos(), show_decoded=show_decoded,
|
||||||
|
mimetypes=Decodeds.get_all_mimetypes())
|
||||||
|
|
||||||
|
@objects_decoded.route("/object/decodeds/search", methods=['POST'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def decodeds_search():
|
||||||
|
decoded_id = request.form.get('object_id')
|
||||||
|
print(decoded_id)
|
||||||
|
return redirect(url_for('correlation.show_correlation', type='decoded', id=decoded_id))
|
||||||
|
|
||||||
|
@objects_decoded.route("/object/decoded/download")
|
||||||
@login_required
|
@login_required
|
||||||
@login_read_only
|
@login_read_only
|
||||||
def decoded_download():
|
def decoded_download():
|
||||||
|
@ -51,7 +104,8 @@ def decoded_download():
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
@objects_decoded.route("/object/decoded/send_to_vt") #completely shows the paste in a new tab
|
|
||||||
|
@objects_decoded.route("/object/decoded/send_to_vt")
|
||||||
@login_required
|
@login_required
|
||||||
@login_read_only
|
@login_read_only
|
||||||
def send_to_vt():
|
def send_to_vt():
|
||||||
|
@ -66,7 +120,8 @@ def send_to_vt():
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
@objects_decoded.route("/object/decoded/refresh_vt_report") #completely shows the paste in a new tab
|
|
||||||
|
@objects_decoded.route("/object/decoded/refresh_vt_report")
|
||||||
@login_required
|
@login_required
|
||||||
@login_read_only
|
@login_read_only
|
||||||
def refresh_vt_report():
|
def refresh_vt_report():
|
||||||
|
@ -81,18 +136,42 @@ def refresh_vt_report():
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
@objects_decoded.route("/object/decoded/decoder_pie_chart_json", methods=['GET'])
|
|
||||||
|
# TODO
|
||||||
|
@objects_decoded.route("/object/decoded/algo_pie_chart/json", methods=['GET'])
|
||||||
@login_required
|
@login_required
|
||||||
@login_read_only
|
@login_read_only
|
||||||
def decoder_pie_chart_json():
|
def decoder_pie_chart_json():
|
||||||
date_from = request.args.get('date_from')
|
date_from = request.args.get('date_from')
|
||||||
date_to = request.args.get('date_to')
|
date_to = request.args.get('date_to')
|
||||||
mimetype = request.args.get('type')
|
mimetype = request.args.get('mimetype')
|
||||||
return jsonify(Decodeds.api_pie_chart_decoder_json(date_from, date_to, mimetype))
|
return jsonify(Decodeds.api_pie_chart_decoder_json(date_from, date_to, mimetype))
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
@objects_decoded.route("/object/decoded/mimetype_pie_chart/json", methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def mimetype_pie_chart_json():
|
||||||
|
date_from = request.args.get('date_from')
|
||||||
|
date_to = request.args.get('date_to')
|
||||||
|
algo = request.args.get('algo')
|
||||||
|
return jsonify(Decodeds.api_pie_chart_mimetype_json(date_from, date_to, algo))
|
||||||
|
|
||||||
|
@objects_decoded.route("/object/decoded/barchart/json", methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
|
def barchart_json():
|
||||||
|
date_from = request.args.get('date_from')
|
||||||
|
date_to = request.args.get('date_to')
|
||||||
|
mimetype = request.args.get('mimetype')
|
||||||
|
return jsonify(Decodeds.api_barchart_range_json(date_from, date_to , mimetype))
|
||||||
|
|
||||||
|
@objects_decoded.route("/object/decoded/graphline/json", methods=['GET'])
|
||||||
|
@login_required
|
||||||
|
@login_read_only
|
||||||
#####################################################3
|
def graphline_json():
|
||||||
|
decoded_id = request.args.get('id')
|
||||||
|
decoded = Decodeds.Decoded(decoded_id)
|
||||||
|
if not decoded:
|
||||||
|
abort(404)
|
||||||
|
return jsonify(Decodeds.graphline_json(decoded_id))
|
||||||
|
|
|
@ -59,46 +59,6 @@ def substract_date(date_from, date_to):
|
||||||
l_date.append( date.strftime('%Y%m%d') )
|
l_date.append( date.strftime('%Y%m%d') )
|
||||||
return l_date
|
return l_date
|
||||||
|
|
||||||
def list_sparkline_values(date_range_sparkline, hash):
|
|
||||||
sparklines_value = []
|
|
||||||
for date_day in date_range_sparkline:
|
|
||||||
nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date_day, hash)
|
|
||||||
if nb_seen_this_day is None:
|
|
||||||
nb_seen_this_day = 0
|
|
||||||
sparklines_value.append(int(nb_seen_this_day))
|
|
||||||
return sparklines_value
|
|
||||||
|
|
||||||
def get_file_icon(estimated_type):
|
|
||||||
file_type = estimated_type.split('/')[0]
|
|
||||||
# set file icon
|
|
||||||
if file_type == 'application':
|
|
||||||
file_icon = 'fa-file '
|
|
||||||
elif file_type == 'audio':
|
|
||||||
file_icon = 'fa-file-audio '
|
|
||||||
elif file_type == 'image':
|
|
||||||
file_icon = 'fa-file-image'
|
|
||||||
elif file_type == 'text':
|
|
||||||
file_icon = 'fa-file-alt'
|
|
||||||
else:
|
|
||||||
file_icon = 'fa-sticky-note'
|
|
||||||
|
|
||||||
return file_icon
|
|
||||||
|
|
||||||
def get_file_icon_text(estimated_type):
|
|
||||||
file_type = estimated_type.split('/')[0]
|
|
||||||
# set file icon
|
|
||||||
if file_type == 'application':
|
|
||||||
file_icon_text = '\uf15b'
|
|
||||||
elif file_type == 'audio':
|
|
||||||
file_icon_text = '\uf1c7'
|
|
||||||
elif file_type == 'image':
|
|
||||||
file_icon_text = '\uf1c5'
|
|
||||||
elif file_type == 'text':
|
|
||||||
file_icon_text = '\uf15c'
|
|
||||||
else:
|
|
||||||
file_icon_text = '\uf249'
|
|
||||||
|
|
||||||
return file_icon_text
|
|
||||||
|
|
||||||
def get_icon(correlation_type, type_id):
|
def get_icon(correlation_type, type_id):
|
||||||
icon_text = 'fas fa-sticky-note'
|
icon_text = 'fas fa-sticky-note'
|
||||||
|
@ -182,16 +142,6 @@ def list_sparkline_type_id_values(date_range_sparkline, correlation_type, type_i
|
||||||
sparklines_value.append(int(nb_seen_this_day))
|
sparklines_value.append(int(nb_seen_this_day))
|
||||||
return sparklines_value
|
return sparklines_value
|
||||||
|
|
||||||
def get_all_keys_id_from_item(correlation_type, item_path):
|
|
||||||
all_keys_id_dump = set()
|
|
||||||
if item_path is not None:
|
|
||||||
for type_id in get_all_types_id(correlation_type):
|
|
||||||
res = r_serv_metadata.smembers('item_{}_{}:{}'.format(correlation_type, type_id, item_path))
|
|
||||||
for key_id in res:
|
|
||||||
all_keys_id_dump.add( (key_id, type_id) )
|
|
||||||
return all_keys_id_dump
|
|
||||||
|
|
||||||
|
|
||||||
def get_correlation_type_search_endpoint(correlation_type):
|
def get_correlation_type_search_endpoint(correlation_type):
|
||||||
if correlation_type == 'pgpdump':
|
if correlation_type == 'pgpdump':
|
||||||
endpoint = 'hashDecoded.all_pgpdump_search'
|
endpoint = 'hashDecoded.all_pgpdump_search'
|
||||||
|
@ -379,345 +329,7 @@ def correlation_type_range_type_json(correlation_type, date_from, date_to):
|
||||||
return jsonify(range_type)
|
return jsonify(range_type)
|
||||||
|
|
||||||
# ============= ROUTES ==============
|
# ============= ROUTES ==============
|
||||||
@hashDecoded.route("/hashDecoded/all_hash_search", methods=['POST'])
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def all_hash_search():
|
|
||||||
date_from = request.form.get('date_from')
|
|
||||||
date_to = request.form.get('date_to')
|
|
||||||
type = request.form.get('type')
|
|
||||||
encoding = request.form.get('encoding')
|
|
||||||
show_decoded_files = request.form.get('show_decoded_files')
|
|
||||||
return redirect(url_for('hashDecoded.hashDecoded_page', date_from=date_from, date_to=date_to, type=type, encoding=encoding, show_decoded_files=show_decoded_files))
|
|
||||||
|
|
||||||
@hashDecoded.route("/hashDecoded/", methods=['GET'])
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def hashDecoded_page():
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
type = request.args.get('type')
|
|
||||||
encoding = request.args.get('encoding')
|
|
||||||
show_decoded_files = request.args.get('show_decoded_files')
|
|
||||||
|
|
||||||
if type == 'All types':
|
|
||||||
type = None
|
|
||||||
|
|
||||||
if encoding == 'All encoding':
|
|
||||||
encoding = None
|
|
||||||
|
|
||||||
#date_from = '20180628' or date_from = '2018-06-28'
|
|
||||||
#date_to = '20180628' or date_to = '2018-06-28'
|
|
||||||
|
|
||||||
# verify file type input
|
|
||||||
if type is not None:
|
|
||||||
#retrieve + char
|
|
||||||
type = type.replace(' ', '+')
|
|
||||||
if type not in r_serv_metadata.smembers('hash_all_type'):
|
|
||||||
type = None
|
|
||||||
|
|
||||||
all_encoding = r_serv_metadata.smembers('all_decoder')
|
|
||||||
# verify encoding input
|
|
||||||
if encoding is not None:
|
|
||||||
if encoding not in all_encoding:
|
|
||||||
encoding = None
|
|
||||||
|
|
||||||
date_range = []
|
|
||||||
if date_from is not None and date_to is not None:
|
|
||||||
#change format
|
|
||||||
try:
|
|
||||||
if len(date_from) != 8:
|
|
||||||
date_from = date_from[0:4] + date_from[5:7] + date_from[8:10]
|
|
||||||
date_to = date_to[0:4] + date_to[5:7] + date_to[8:10]
|
|
||||||
date_range = substract_date(date_from, date_to)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not date_range:
|
|
||||||
date_range.append(datetime.date.today().strftime("%Y%m%d"))
|
|
||||||
date_from = date_range[0][0:4] + '-' + date_range[0][4:6] + '-' + date_range[0][6:8]
|
|
||||||
date_to = date_from
|
|
||||||
|
|
||||||
else:
|
|
||||||
date_from = date_from[0:4] + '-' + date_from[4:6] + '-' + date_from[6:8]
|
|
||||||
date_to = date_to[0:4] + '-' + date_to[4:6] + '-' + date_to[6:8]
|
|
||||||
|
|
||||||
# display day type bar chart
|
|
||||||
if len(date_range) == 1 and type is None:
|
|
||||||
daily_type_chart = True
|
|
||||||
daily_date = date_range[0]
|
|
||||||
else:
|
|
||||||
daily_type_chart = False
|
|
||||||
daily_date = None
|
|
||||||
|
|
||||||
l_64 = set()
|
|
||||||
if show_decoded_files:
|
|
||||||
show_decoded_files = True
|
|
||||||
for date in date_range:
|
|
||||||
if encoding is None:
|
|
||||||
l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1)
|
|
||||||
else:
|
|
||||||
l_hash = r_serv_metadata.zrange(encoding+'_date:' +date, 0, -1)
|
|
||||||
if l_hash:
|
|
||||||
for hash in l_hash:
|
|
||||||
l_64.add(hash)
|
|
||||||
|
|
||||||
num_day_sparkline = 6
|
|
||||||
date_range_sparkline = get_date_range(num_day_sparkline)
|
|
||||||
|
|
||||||
b64_metadata = []
|
|
||||||
l_64 = list(l_64)
|
|
||||||
for hash in l_64:
|
|
||||||
# select requested base 64 type
|
|
||||||
estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type')
|
|
||||||
if type is not None:
|
|
||||||
if estimated_type is not None:
|
|
||||||
if estimated_type != type:
|
|
||||||
continue
|
|
||||||
|
|
||||||
first_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'first_seen')
|
|
||||||
last_seen = r_serv_metadata.hget('metadata_hash:'+hash, 'last_seen')
|
|
||||||
nb_seen_in_paste = r_serv_metadata.hget('metadata_hash:'+hash, 'nb_seen_in_all_pastes')
|
|
||||||
size = r_serv_metadata.hget('metadata_hash:'+hash, 'size')
|
|
||||||
|
|
||||||
if hash is not None and first_seen is not None and \
|
|
||||||
last_seen is not None and \
|
|
||||||
nb_seen_in_paste is not None and \
|
|
||||||
size is not None:
|
|
||||||
|
|
||||||
file_icon = get_file_icon(estimated_type)
|
|
||||||
|
|
||||||
if r_serv_metadata.hexists('metadata_hash:'+hash, 'vt_link'):
|
|
||||||
b64_vt = True
|
|
||||||
b64_vt_link = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_link')
|
|
||||||
b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report')
|
|
||||||
else:
|
|
||||||
b64_vt = False
|
|
||||||
b64_vt_link = ''
|
|
||||||
b64_vt_report = r_serv_metadata.hget('metadata_hash:'+hash, 'vt_report')
|
|
||||||
# hash never refreshed
|
|
||||||
if b64_vt_report is None:
|
|
||||||
b64_vt_report = ''
|
|
||||||
|
|
||||||
sparklines_value = list_sparkline_values(date_range_sparkline, hash)
|
|
||||||
|
|
||||||
b64_metadata.append( (file_icon, estimated_type, hash, nb_seen_in_paste, size, first_seen, last_seen, b64_vt, b64_vt_link, b64_vt_report, sparklines_value) )
|
|
||||||
|
|
||||||
l_type = sorted(r_serv_metadata.smembers('hash_all_type'))
|
|
||||||
|
|
||||||
return render_template("hashDecoded.html", l_64=b64_metadata, vt_enabled=vt_enabled, l_type=l_type, type=type, daily_type_chart=daily_type_chart, daily_date=daily_date,
|
|
||||||
encoding=encoding, all_encoding=all_encoding, date_from=date_from, date_to=date_to, show_decoded_files=show_decoded_files)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@hashDecoded.route('/hashDecoded/hash_by_type_json')
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def hash_by_type_json():
|
|
||||||
type = request.args.get('type')
|
|
||||||
|
|
||||||
#retrieve + char
|
|
||||||
type = type.replace(' ', '+')
|
|
||||||
|
|
||||||
num_day_type = 30
|
|
||||||
date_range = get_date_range(num_day_type)
|
|
||||||
|
|
||||||
#verify input
|
|
||||||
if type in r_serv_metadata.smembers('hash_all_type'):
|
|
||||||
type_value = []
|
|
||||||
all_decoder = r_serv_metadata.smembers('all_decoder')
|
|
||||||
|
|
||||||
range_decoder = []
|
|
||||||
for date in date_range:
|
|
||||||
day_decoder = {}
|
|
||||||
day_decoder['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
|
||||||
for decoder in all_decoder:
|
|
||||||
num_day_decoder = r_serv_metadata.zscore(decoder+'_type:'+type, date)
|
|
||||||
if num_day_decoder is None:
|
|
||||||
num_day_decoder = 0
|
|
||||||
day_decoder[decoder]= num_day_decoder
|
|
||||||
range_decoder.append(day_decoder)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return jsonify(range_decoder)
|
|
||||||
else:
|
|
||||||
return jsonify()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@hashDecoded.route('/hashDecoded/top5_type_json')
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def top5_type_json():
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
|
|
||||||
typ = request.args.get('type')
|
|
||||||
decoder = request.args.get('encoding')
|
|
||||||
|
|
||||||
if decoder == 'All encoding' or decoder is None:
|
|
||||||
all_decoder = r_serv_metadata.smembers('all_decoder')
|
|
||||||
else:
|
|
||||||
if not r_serv_metadata.sismember('all_decoder', decoder):
|
|
||||||
return jsonify({'Error': 'This decoder do not exist'})
|
|
||||||
else:
|
|
||||||
all_decoder = [decoder]
|
|
||||||
|
|
||||||
if typ == 'All types' or typ is None or typ=='None':
|
|
||||||
all_type = r_serv_metadata.smembers('hash_all_type')
|
|
||||||
else:
|
|
||||||
typ = typ.replace(' ', '+')
|
|
||||||
if not r_serv_metadata.sismember('hash_all_type', typ):
|
|
||||||
return jsonify({'Error': 'This type do not exist'})
|
|
||||||
else:
|
|
||||||
all_type = [typ]
|
|
||||||
|
|
||||||
date_range = []
|
|
||||||
if date_from is not None and date_to is not None:
|
|
||||||
#change format
|
|
||||||
try:
|
|
||||||
if len(date_from) != 8:
|
|
||||||
date_from = date_from[0:4] + date_from[5:7] + date_from[8:10]
|
|
||||||
date_to = date_to[0:4] + date_to[5:7] + date_to[8:10]
|
|
||||||
date_range = substract_date(date_from, date_to)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not date_range:
|
|
||||||
date_range.append(datetime.date.today().strftime("%Y%m%d"))
|
|
||||||
|
|
||||||
# TODO replace with ZUNIONSTORE
|
|
||||||
nb_types_decoded = {}
|
|
||||||
for date in date_range:
|
|
||||||
for typ in all_type:
|
|
||||||
for decoder in all_decoder:
|
|
||||||
nb_decoded = r_serv_metadata.zscore(f'{decoder}_type:{typ}', date) # daily_type key:date mimetype 3
|
|
||||||
if nb_decoded is not None:
|
|
||||||
if typ in nb_types_decoded:
|
|
||||||
nb_types_decoded[typ] = nb_types_decoded[typ] + int(nb_decoded)
|
|
||||||
else:
|
|
||||||
nb_types_decoded[typ] = int(nb_decoded)
|
|
||||||
|
|
||||||
to_json = []
|
|
||||||
top5_types = sorted(nb_types_decoded, key=nb_types_decoded.get, reverse=True)[:5]
|
|
||||||
for typ in top5_types:
|
|
||||||
to_json.append({'name': typ, 'value': nb_types_decoded[typ]})
|
|
||||||
return jsonify(to_json)
|
|
||||||
|
|
||||||
|
|
||||||
@hashDecoded.route('/hashDecoded/daily_type_json')
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def daily_type_json():
|
|
||||||
date = request.args.get('date')
|
|
||||||
|
|
||||||
daily_type = set()
|
|
||||||
l_b64 = r_serv_metadata.zrange('hash_date:' +date, 0, -1)
|
|
||||||
for hash in l_b64:
|
|
||||||
estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type')
|
|
||||||
if estimated_type is not None:
|
|
||||||
daily_type.add(estimated_type)
|
|
||||||
|
|
||||||
type_value = []
|
|
||||||
for day_type in daily_type:
|
|
||||||
num_day_type = r_serv_metadata.zscore('hash_type:'+day_type, date)
|
|
||||||
type_value.append({ 'date' : day_type, 'value' : int( num_day_type )})
|
|
||||||
|
|
||||||
return jsonify(type_value)
|
|
||||||
|
|
||||||
|
|
||||||
@hashDecoded.route('/hashDecoded/range_type_json')
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def range_type_json():
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
|
|
||||||
date_range = []
|
|
||||||
if date_from is not None and date_to is not None:
|
|
||||||
#change format
|
|
||||||
if len(date_from) != 8:
|
|
||||||
date_from = date_from[0:4] + date_from[5:7] + date_from[8:10]
|
|
||||||
date_to = date_to[0:4] + date_to[5:7] + date_to[8:10]
|
|
||||||
date_range = substract_date(date_from, date_to)
|
|
||||||
|
|
||||||
if not date_range:
|
|
||||||
date_range.append(datetime.date.today().strftime("%Y%m%d"))
|
|
||||||
|
|
||||||
all_type = set()
|
|
||||||
for date in date_range:
|
|
||||||
l_hash = r_serv_metadata.zrange('hash_date:' +date, 0, -1)
|
|
||||||
if l_hash:
|
|
||||||
for hash in l_hash:
|
|
||||||
estimated_type = r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type')
|
|
||||||
if estimated_type:
|
|
||||||
all_type.add(estimated_type)
|
|
||||||
|
|
||||||
range_type = []
|
|
||||||
|
|
||||||
list_decoder = r_serv_metadata.smembers('all_decoder')
|
|
||||||
for date in date_range:
|
|
||||||
if len(date_range) == 1:
|
|
||||||
if date==date_from and date==date_to:
|
|
||||||
for type in all_type:
|
|
||||||
day_type = {}
|
|
||||||
day_type['date']= type
|
|
||||||
for decoder in list_decoder:
|
|
||||||
num_day_decoder = r_serv_metadata.zscore(decoder+'_type:'+type, date)
|
|
||||||
if num_day_decoder is None:
|
|
||||||
num_day_decoder = 0
|
|
||||||
day_type[decoder]= num_day_decoder
|
|
||||||
range_type.append(day_type)
|
|
||||||
else:
|
|
||||||
range_type = ''
|
|
||||||
else:
|
|
||||||
day_type = {}
|
|
||||||
day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
|
||||||
for type in all_type:
|
|
||||||
num_day_type = 0
|
|
||||||
for decoder in list_decoder:
|
|
||||||
num_day_type_decoder = r_serv_metadata.zscore(decoder+'_type:'+type, date)
|
|
||||||
if num_day_type_decoder is not None:
|
|
||||||
num_day_type += num_day_type_decoder
|
|
||||||
day_type[type]= num_day_type
|
|
||||||
range_type.append(day_type)
|
|
||||||
|
|
||||||
return jsonify(range_type)
|
|
||||||
|
|
||||||
|
|
||||||
@hashDecoded.route('/hashDecoded/hash_graph_line_json')
|
|
||||||
@login_required
|
|
||||||
@login_read_only
|
|
||||||
def hash_graph_line_json():
|
|
||||||
hash = request.args.get('hash')
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
|
|
||||||
if date_from is None or date_to is None:
|
|
||||||
nb_days_seen_in_pastes = 30
|
|
||||||
else:
|
|
||||||
# # TODO: # FIXME:
|
|
||||||
nb_days_seen_in_pastes = 30
|
|
||||||
|
|
||||||
date_range_seen_in_pastes = get_date_range(nb_days_seen_in_pastes)
|
|
||||||
|
|
||||||
# verify input
|
|
||||||
if r_serv_metadata.hget('metadata_hash:'+hash, 'estimated_type') is not None:
|
|
||||||
json_seen_in_paste = []
|
|
||||||
for date in date_range_seen_in_pastes:
|
|
||||||
nb_seen_this_day = r_serv_metadata.zscore('hash_date:'+date, hash)
|
|
||||||
if nb_seen_this_day is None:
|
|
||||||
nb_seen_this_day = 0
|
|
||||||
date = date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
|
||||||
json_seen_in_paste.append({'date': date, 'value': int(nb_seen_this_day)})
|
|
||||||
|
|
||||||
return jsonify(json_seen_in_paste)
|
|
||||||
else:
|
|
||||||
return jsonify()
|
|
||||||
|
|
||||||
############################ PGPDump ############################
|
############################ PGPDump ############################
|
||||||
|
|
||||||
|
|
|
@ -594,8 +594,7 @@ function barchart_type(url, id) {
|
||||||
.attr("transform", "rotate(-20)" );
|
.attr("transform", "rotate(-20)" );
|
||||||
{% else %}
|
{% else %}
|
||||||
.attr("transform", "rotate(-70)" )
|
.attr("transform", "rotate(-70)" )
|
||||||
.attr("class", "bar")
|
.attr("class", "bar");
|
||||||
.on("click", function (d) { window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}"+'?date_from='+d+'&date_to='+d });
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
svg.append("g")
|
svg.append("g")
|
||||||
|
@ -617,12 +616,6 @@ function barchart_type(url, id) {
|
||||||
.attr("width", x.bandwidth())
|
.attr("width", x.bandwidth())
|
||||||
.attr("y", function(d) { return y(d.value); })
|
.attr("y", function(d) { return y(d.value); })
|
||||||
.attr("height", function(d) { return height - y(d.value); })
|
.attr("height", function(d) { return height - y(d.value); })
|
||||||
{% if type %}
|
|
||||||
.on("click", function(d){ window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}" +'?type={{type}}&date_from='+ d.date +'&date_to='+ d.date; });
|
|
||||||
{% endif %}
|
|
||||||
{% if daily_type_chart %}
|
|
||||||
.on("click", function(d){ window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}" +'?type='+d.date+'&date_from={{ daily_date }}&date_to={{ daily_date }}'; });
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
data.forEach(function(d) {
|
data.forEach(function(d) {
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
<li id='page-hashDecoded'><a href="{{ url_for('hashDecoded.hashDecoded_page') }}"><i class="fa fa-files-o"></i> hashesDecoded </a></li>
|
<li id='page-hashDecoded'><a href="{{ url_for('objects_decoded.decodeds_dashboard') }}"><i class="fa fa-files-o"></i> hashesDecoded </a></li>
|
||||||
|
|
|
@ -270,9 +270,9 @@ $(document).ready(function(){
|
||||||
{% elif dict_object["object_type"] == "cryptocurrency" %}
|
{% elif dict_object["object_type"] == "cryptocurrency" %}
|
||||||
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('hashDecoded.cryptocurrency_graph_line_json') }}?type_id={{dict_object["metadata"]["type_id"]}}&key_id={{dict_object["correlation_id"]}}");
|
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('hashDecoded.cryptocurrency_graph_line_json') }}?type_id={{dict_object["metadata"]["type_id"]}}&key_id={{dict_object["correlation_id"]}}");
|
||||||
{% elif dict_object["object_type"] == "decoded" %}
|
{% elif dict_object["object_type"] == "decoded" %}
|
||||||
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('hashDecoded.hash_graph_line_json') }}?hash={{dict_object["correlation_id"]}}");
|
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('objects_decoded.graphline_json') }}?id={{dict_object["correlation_id"]}}");
|
||||||
{% elif dict_object["object_type"] == "cve" %}
|
{% elif dict_object["object_type"] == "cve" %}
|
||||||
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('hashDecoded.hash_graph_line_json') }}?hash={{dict_object["correlation_id"]}}");
|
all_graph.line_chart = create_line_chart('graph_line', "{{ url_for('objects_cve.objects_cve_graphline_json') }}?id={{dict_object["correlation_id"]}}");
|
||||||
{% endif %}
|
{% endif %}
|
||||||
all_graph.onResize();
|
all_graph.onResize();
|
||||||
});
|
});
|
||||||
|
|
703
var/www/templates/decoded/decodeds_dashboard.html
Normal file
703
var/www/templates/decoded/decodeds_dashboard.html
Normal file
|
@ -0,0 +1,703 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<title>Decoded - AIL</title>
|
||||||
|
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}">
|
||||||
|
|
||||||
|
<!-- Core CSS -->
|
||||||
|
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/daterangepicker.min.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- JS -->
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/moment.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery.daterangepicker.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/d3.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='js/d3/sparklines.js')}}"></script>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.input-group .form-control {
|
||||||
|
position: unset;
|
||||||
|
}
|
||||||
|
.line {
|
||||||
|
fill: none;
|
||||||
|
stroke: #000;
|
||||||
|
stroke-width: 2.0px;
|
||||||
|
}
|
||||||
|
.bar {
|
||||||
|
fill: steelblue;
|
||||||
|
}
|
||||||
|
.bar:hover{
|
||||||
|
fill: brown;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.bar_stack:hover{
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.pie_path:hover{
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.svgText {
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
div.tooltip {
|
||||||
|
position: absolute;
|
||||||
|
text-align: center;
|
||||||
|
padding: 2px;
|
||||||
|
font: 12px sans-serif;
|
||||||
|
background: #ebf4fb;
|
||||||
|
border: 2px solid #b7ddf2;
|
||||||
|
border-radius: 8px;
|
||||||
|
pointer-events: none;
|
||||||
|
color: #000000;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
{% include 'nav_bar.html' %}
|
||||||
|
|
||||||
|
<div class="container-fluid">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
{% include 'sidebars/sidebar_objects.html' %}
|
||||||
|
|
||||||
|
<div class="col-12 col-lg-10" id="core_content">
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-xl-10">
|
||||||
|
<div class="mt-1" id="barchart_type">
|
||||||
|
</div>
|
||||||
|
<div class="card border-secondary my-2">
|
||||||
|
<div class="card-body text-dark">
|
||||||
|
<h5 class="card-title">Search Decoded by name:</h5>
|
||||||
|
|
||||||
|
<form action="{{ url_for('objects_decoded.decodeds_search') }}" id="search_subtype_onj" method='post'>
|
||||||
|
<div class="input-group mb-1">
|
||||||
|
<input type="text" class="form-control col-8" name="object_id" value="" placeholder="Decoded ID" required>
|
||||||
|
<button class="btn btn-primary input-group-addon search-obj col-2"><i class="fas fa-search"></i></button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-xl-2">
|
||||||
|
|
||||||
|
<div class="card mb-3 mt-2" style="background-color:#d9edf7;">
|
||||||
|
<div class="card-body text-center py-2">
|
||||||
|
<h6 class="card-title" style="color:#286090;">Select a date range :</h6>
|
||||||
|
<form action="{{ url_for('objects_decoded.decodeds_dashboard') }}" id="hash_selector_form" method='post'>
|
||||||
|
<div class="input-group" id="date-range-from">
|
||||||
|
<div class="input-group-prepend"><span class="input-group-text"><i class="far fa-calendar-alt" aria-hidden="true"></i></span></div>
|
||||||
|
<input class="form-control" id="date-range-from-input" placeholder="yyyy-mm-dd" value="{% if date_from %}{{ date_from[0:4] }}-{{ date_from[4:6] }}-{{ date_from[6:8] }}{% endif %}" name="date_from" autocomplete="off">
|
||||||
|
</div>
|
||||||
|
<div class="input-group" id="date-range-to">
|
||||||
|
<div class="input-group-prepend"><span class="input-group-text"><i class="far fa-calendar-alt" aria-hidden="true"></i></span></div>
|
||||||
|
<input class="form-control" id="date-range-to-input" placeholder="yyyy-mm-dd" value="{% if date_to %}{{ date_to[0:4] }}-{{ date_to[4:6] }}-{{ date_to[6:8] }}{% endif %}" name="date_to" autocomplete="off">
|
||||||
|
</div>
|
||||||
|
<div class="mt-1" style="font-size: 14px;color:#286090;">Encoding :</div>
|
||||||
|
<select class="custom-select" name="algo">
|
||||||
|
<option>All encoding</option>
|
||||||
|
{% for alg in algos %}
|
||||||
|
{% if alg|string() == algo|string() %}
|
||||||
|
<option selected>{{ alg }}</option>
|
||||||
|
{% else %}
|
||||||
|
<option>{{ alg }}</option>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
<div class="mt-1" style="font-size: 14px;color:#286090;">File Type :</div>
|
||||||
|
<select class="custom-select" name="mimetype">
|
||||||
|
<option>All types</option>
|
||||||
|
{% for typ in mimetypes %}
|
||||||
|
{% if mimetype|string() == typ|string() %}
|
||||||
|
<option selected>{{ typ }}</option>
|
||||||
|
{% else %}
|
||||||
|
<option>{{ typ }}</option>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
<div class="form-check my-1">
|
||||||
|
<input class="form-check-input" type="checkbox" id="checkbox-input-show" name="show_decoded" value="True" {% if show_decoded %}checked{% endif %}>
|
||||||
|
<label class="form-check-label" for="checkbox-input-show">
|
||||||
|
<span style="color:#286090; font-size: 14px;">
|
||||||
|
Show decoded files <i class="fas fa-file"></i>
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<button class="btn btn-primary" style="text-align:center;">
|
||||||
|
<i class="fas fa-copy"></i> Search
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="pie_chart_encoded">
|
||||||
|
</div>
|
||||||
|
<div id="pie_chart_top5_types">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{% if metas|length != 0 %}
|
||||||
|
{% if date_from|string == date_to|string %}
|
||||||
|
<h3> {{ date_from }} Decoded files: </h3>
|
||||||
|
{% else %}
|
||||||
|
<h3> {{ date_from }} to {{ date_to }} Decoded files: </h3>
|
||||||
|
{% endif %}
|
||||||
|
<table id="tableb64" class="table table-striped table-bordered">
|
||||||
|
<thead class="bg-dark text-white">
|
||||||
|
<tr>
|
||||||
|
<th>estimated type</th>
|
||||||
|
<th>hash</th>
|
||||||
|
<th>first seen</th>
|
||||||
|
<th>last seen</th>
|
||||||
|
<th>nb item</th>
|
||||||
|
<th>size</th>
|
||||||
|
<th>Virus Total</th>
|
||||||
|
<th>Sparkline</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody style="font-size: 15px;">
|
||||||
|
{% for meta in metas %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<svg height="26" width="26">
|
||||||
|
<g class="nodes">
|
||||||
|
<circle cx="13" cy="13" r="13" fill="{{ meta['icon']['color'] }}"></circle>
|
||||||
|
<text x="13" y="13" text-anchor="middle" dominant-baseline="central" class="graph_node_icon {{ meta['icon']['style'] }}" font-size="16px">{{ meta['icon']['icon'] }}</text>
|
||||||
|
</g>
|
||||||
|
</svg> {{ meta['mimetype'] }}
|
||||||
|
</td>
|
||||||
|
<td><a target="_blank" href="{{ url_for('correlation.show_correlation') }}?type=decoded&id={{ meta['id'] }}">{{ meta['id'] }}</a></td>
|
||||||
|
<td>{{ meta['first_seen'] }}</td>
|
||||||
|
<td>{{ meta['last_seen'] }}</td>
|
||||||
|
<td>{{ meta['nb_seen'] }}</td>
|
||||||
|
<td>{{ meta['size'] }}</td>
|
||||||
|
<td>
|
||||||
|
{% if vt_enabled %}
|
||||||
|
{% if not meta['vt'] %}
|
||||||
|
<darkbutton_{{ meta['id'] }}>
|
||||||
|
<button id="submit_vt_{{ meta['id'] }}" class="btn btn-secondary" style="font-size: 14px;" onclick="sendFileToVT('{{ meta['id'] }}')">
|
||||||
|
<i class="fas fa-paper-plane"></i> Send this file to VT
|
||||||
|
</button>
|
||||||
|
</darkbutton_{{ meta['id'] }}>
|
||||||
|
{% else %}
|
||||||
|
<a class="btn btn-secondary" target="_blank" href="{{ meta['vt']['link'] }}" style="font-size: 14px;"><i class="fas fa-link"></i> VT Report</a>
|
||||||
|
{% endif %}
|
||||||
|
<button class="btn btn-outline-dark" onclick="updateVTReport('{{ meta['id'] }}')" style="font-size: 14px;">
|
||||||
|
<span id="report_vt_{{ meta['id'] }}"><i class="fas fa-sync-alt"></i> {{ meta['vt']['report'] }}</span>
|
||||||
|
</button>
|
||||||
|
{% else %}
|
||||||
|
Virus Total submission is disabled
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td id="sparklines_{{ meta['id'] }}" style="text-align:center;"></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{% else %}
|
||||||
|
{% if show_decoded %}
|
||||||
|
{% if date_from|string == date_to|string %}
|
||||||
|
<h3> {{ date_from }}, No Decoded</h3>
|
||||||
|
{% else %}
|
||||||
|
<h3> {{ date_from }} to {{ date_to }}, No Decodeds</h3>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
var chart = {};
|
||||||
|
$(document).ready(function(){
|
||||||
|
$("#page-Decoded").addClass("active");
|
||||||
|
$("#nav_dashboard").addClass("active");
|
||||||
|
|
||||||
|
$('#date-range-from').dateRangePicker({
|
||||||
|
separator : ' to ',
|
||||||
|
getValue: function()
|
||||||
|
{
|
||||||
|
if ($('#date-range-from-input').val() && $('#date-range-to').val() )
|
||||||
|
return $('#date-range-from-input').val() + ' to ' + $('#date-range-to').val();
|
||||||
|
else
|
||||||
|
return '';
|
||||||
|
},
|
||||||
|
setValue: function(s,s1,s2)
|
||||||
|
{
|
||||||
|
$('#date-range-from-input').val(s1);
|
||||||
|
$('#date-range-to-input').val(s2);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
$('#date-range-to').dateRangePicker({
|
||||||
|
separator : ' to ',
|
||||||
|
getValue: function()
|
||||||
|
{
|
||||||
|
if ($('#date-range-from-input').val() && $('#date-range-to').val() )
|
||||||
|
return $('#date-range-from-input').val() + ' to ' + $('#date-range-to').val();
|
||||||
|
else
|
||||||
|
return '';
|
||||||
|
},
|
||||||
|
setValue: function(s,s1,s2)
|
||||||
|
{
|
||||||
|
$('#date-range-from-input').val(s1);
|
||||||
|
$('#date-range-to-input').val(s2);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
{% if date_from %}
|
||||||
|
$('#date-range-from').val("{{ date_from[0:4] }}-{{ date_from[4:6] }}-{{ date_from[6:8] }}");
|
||||||
|
{% endif %}
|
||||||
|
{% if date_to %}
|
||||||
|
$('#date-range-to').val("{{ date_to[0:4] }}-{{ date_to[4:6] }}-{{ date_to[6:8] }}");
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
$('#tableb64').DataTable({
|
||||||
|
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
|
||||||
|
"iDisplayLength": 10,
|
||||||
|
"order": [[ 3, "desc" ]]
|
||||||
|
});
|
||||||
|
|
||||||
|
{% if mimetype %}
|
||||||
|
chart.stackBarChart =barchart_type_stack("{{ url_for('objects_decoded.barchart_json') }}?date_from={{date_from}}&date_to={{date_to}}&mimetype={{mimetype}}", 'id');
|
||||||
|
{% elif date_from==date_to and date_from %}
|
||||||
|
chart.stackBarChart =barchart_type_stack("{{ url_for('objects_decoded.barchart_json') }}?date_from={{date_from}}&date_to={{date_from}}", 'id');
|
||||||
|
{% else %}
|
||||||
|
chart.stackBarChart = barchart_type_stack("{{ url_for('objects_decoded.barchart_json') }}?date_from={{date_from}}&date_to={{date_to}}", 'id');
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
draw_pie_chart("pie_chart_encoded" ,"{{ url_for('objects_decoded.decoder_pie_chart_json') }}?date_from={{date_from}}&date_to={{date_to}}&mimetype={{mimetype}}", "{{ url_for('objects_decoded.decodeds_dashboard') }}?date_from={{date_from}}&date_to={{date_to}}&mimetype={{type}}");
|
||||||
|
draw_pie_chart("pie_chart_top5_types" ,"{{ url_for('objects_decoded.mimetype_pie_chart_json') }}?date_from={{date_from}}&date_to={{date_to}}&mimetype={{mimetype}}", "{{ url_for('objects_decoded.decodeds_dashboard') }}?date_from={{date_from}}&date_to={{date_to}}");
|
||||||
|
|
||||||
|
chart.onResize();
|
||||||
|
$(window).on("resize", function() {
|
||||||
|
chart.onResize();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
function toggle_sidebar(){
|
||||||
|
if($('#nav_menu').is(':visible')){
|
||||||
|
$('#nav_menu').hide();
|
||||||
|
$('#side_menu').removeClass('border-right')
|
||||||
|
$('#side_menu').removeClass('col-lg-2')
|
||||||
|
$('#core_content').removeClass('col-lg-10')
|
||||||
|
}else{
|
||||||
|
$('#nav_menu').show();
|
||||||
|
$('#side_menu').addClass('border-right')
|
||||||
|
$('#side_menu').addClass('col-lg-2')
|
||||||
|
$('#core_content').addClass('col-lg-10')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
function updateVTReport(hash) {
|
||||||
|
//updateReport
|
||||||
|
$.getJSON("{{ url_for('objects_decoded.refresh_vt_report') }}?id="+hash,
|
||||||
|
function(data) {
|
||||||
|
content = '<i class="fas fa-sync-alt"></i> ' +data['report']
|
||||||
|
$( "#report_vt_"+hash ).html(content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function sendFileToVT(hash) {
|
||||||
|
//send file to vt
|
||||||
|
$.getJSON("{{ url_for('objects_decoded.send_to_vt') }}?id="+hash,
|
||||||
|
function(data) {
|
||||||
|
var content = '<a id="submit_vt_'+hash+'" class="btn btn-primary" target="_blank" href="'+ data['link'] +'"><i class="fa fa-link"> '+ ' VT Report' +'</i></a>';
|
||||||
|
$('#submit_vt_'+hash).remove();
|
||||||
|
$('darkbutton_'+hash).append(content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
{% for meta in metas %}
|
||||||
|
sparkline("sparklines_{{ meta['id'] }}", {{ meta['sparkline'] }}, {});
|
||||||
|
{% endfor %}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
var margin = {top: 20, right: 100, bottom: 55, left: 45},
|
||||||
|
width = 1000 - margin.left - margin.right,
|
||||||
|
height = 500 - margin.top - margin.bottom;
|
||||||
|
var x = d3.scaleBand().rangeRound([0, width]).padding(0.1);
|
||||||
|
|
||||||
|
var y = d3.scaleLinear().rangeRound([height, 0]);
|
||||||
|
|
||||||
|
var xAxis = d3.axisBottom(x);
|
||||||
|
|
||||||
|
var yAxis = d3.axisLeft(y);
|
||||||
|
|
||||||
|
var color = d3.scaleOrdinal(d3.schemeSet3);
|
||||||
|
|
||||||
|
var svg = d3.select("#barchart_type").append("svg")
|
||||||
|
.attr("id", "thesvg")
|
||||||
|
.attr("viewBox", "0 0 1000 500")
|
||||||
|
.attr("width", width + margin.left + margin.right)
|
||||||
|
.attr("height", height + margin.top + margin.bottom)
|
||||||
|
.append("g")
|
||||||
|
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
|
||||||
|
|
||||||
|
function barchart_type_stack(url, id) {
|
||||||
|
|
||||||
|
d3.json(url)
|
||||||
|
.then(function(data){
|
||||||
|
|
||||||
|
var labelVar = 'date'; //A
|
||||||
|
var varNames = d3.keys(data[0])
|
||||||
|
.filter(function (key) { return key !== labelVar;}); //B
|
||||||
|
|
||||||
|
data.forEach(function (d) { //D
|
||||||
|
var y0 = 0;
|
||||||
|
d.mapping = varNames.map(function (name) {
|
||||||
|
return {
|
||||||
|
name: name,
|
||||||
|
label: d[labelVar],
|
||||||
|
y0: y0,
|
||||||
|
y1: y0 += +d[name]
|
||||||
|
};
|
||||||
|
});
|
||||||
|
d.total = d.mapping[d.mapping.length - 1].y1;
|
||||||
|
});
|
||||||
|
|
||||||
|
x.domain(data.map(function (d) { return (d.date); })); //E
|
||||||
|
y.domain([0, d3.max(data, function (d) { return d.total; })]);
|
||||||
|
|
||||||
|
svg.append("g")
|
||||||
|
.attr("class", "x axis")
|
||||||
|
.attr("transform", "translate(0," + height + ")")
|
||||||
|
.call(xAxis)
|
||||||
|
.selectAll("text")
|
||||||
|
.attr("class", "bar")
|
||||||
|
{% if date_from|string == date_to|string and mimetype is none %}
|
||||||
|
.on("click", function (d) { window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}?date_from={{date_from}}&date_to={{date_to}}&mimetype="+d })
|
||||||
|
.attr("transform", "rotate(-18)" )
|
||||||
|
{% elif date_from|string == date_to|string and mimetype is not none %}
|
||||||
|
.on("click", function (d) { window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}?date_from="+d+'&date_to='+d })
|
||||||
|
.attr("transform", "rotate(-18)" )
|
||||||
|
{% else %}
|
||||||
|
.on("click", function (d) { window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}?date_from="+d+'&date_to='+d })
|
||||||
|
.attr("transform", "rotate(-40)" )
|
||||||
|
{% endif %}
|
||||||
|
.style("text-anchor", "end");
|
||||||
|
|
||||||
|
svg.append("g")
|
||||||
|
.attr("class", "y axis")
|
||||||
|
.call(yAxis)
|
||||||
|
.append("text")
|
||||||
|
.attr("transform", "rotate(-90)")
|
||||||
|
.attr("y", 6)
|
||||||
|
.attr("dy", ".71em")
|
||||||
|
.style("text-anchor", "end");
|
||||||
|
|
||||||
|
var selection = svg.selectAll(".series")
|
||||||
|
.data(data)
|
||||||
|
.enter().append("g")
|
||||||
|
.attr("class", "series")
|
||||||
|
.attr("transform", function (d) { return "translate(" + x((d.date)) + ",0)"; });
|
||||||
|
|
||||||
|
selection.selectAll("rect")
|
||||||
|
.data(function (d) { return d.mapping; })
|
||||||
|
.enter().append("rect")
|
||||||
|
.attr("class", "bar_stack")
|
||||||
|
.attr("width", x.bandwidth())
|
||||||
|
.attr("y", function (d) { return y(d.y1); })
|
||||||
|
.attr("height", function (d) { return y(d.y0) - y(d.y1); })
|
||||||
|
.style("fill", function (d) { return color(d.name); })
|
||||||
|
.style("stroke", "grey")
|
||||||
|
.on("mouseover", function (d) { showPopover.call(this, d); })
|
||||||
|
.on("mouseout", function (d) { removePopovers(); })
|
||||||
|
{% if date_from|string == date_to|string and mimetype is none %}
|
||||||
|
.on("click", function(d){ window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}" +'?date_from={{date_from}}&date_to={{date_to}}&mimetype='+d.label+'&algo='+d.name; });
|
||||||
|
{% elif date_from|string == date_to|string and mimetype is not none %}
|
||||||
|
.on("click", function(d){ window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}" +'?mimetype={{mimetype}}&date_from='+d.label+'&date_to='+d.label+'&algo='+d.name; });
|
||||||
|
{% else %}
|
||||||
|
.on("click", function(d){ window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}" +'?mimetype='+ d.name +'&date_from='+d.label+'&date_to='+d.label; });
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
data.forEach(function(d) {
|
||||||
|
if(d.total != 0){
|
||||||
|
svg.append("text")
|
||||||
|
.attr("class", "bar")
|
||||||
|
.attr("dy", "-.35em")
|
||||||
|
.attr('x', x(d.date) + x.bandwidth()/2)
|
||||||
|
.attr('y', y(d.total))
|
||||||
|
{% if date_from|string == date_to|string and mimetype is none %}
|
||||||
|
.on("click", function () {window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}"+'?date_from={{date_from}}&date_to={{date_to}}&mimetype='+d.date })
|
||||||
|
{% elif date_from|string == date_to|string and mimetype is not none %}
|
||||||
|
.on("click", function () {window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}?mimetype={{mimetype}}&date_from="+d.date+'&date_to='+d.date })
|
||||||
|
{% else %}
|
||||||
|
.on("click", function () {window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}"+'?date_from='+d.date+'&date_to='+d.date })
|
||||||
|
{% endif %}
|
||||||
|
.style("text-anchor", "middle")
|
||||||
|
.text(d.total);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
drawLegend(varNames);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function drawLegend (varNames) {
|
||||||
|
var legend = svg.selectAll(".legend")
|
||||||
|
.data(varNames.slice().reverse())
|
||||||
|
.enter().append("g")
|
||||||
|
.attr("class", "legend")
|
||||||
|
.attr("transform", function (d, i) { return "translate(0," + i * 20 + ")"; });
|
||||||
|
|
||||||
|
legend.append("rect")
|
||||||
|
.attr("x", 943)
|
||||||
|
.attr("width", 10)
|
||||||
|
.attr("height", 10)
|
||||||
|
.style("fill", color)
|
||||||
|
.style("stroke", "grey");
|
||||||
|
|
||||||
|
legend.append("text")
|
||||||
|
.attr("class", "svgText")
|
||||||
|
.attr("x", 941)
|
||||||
|
.attr("y", 6)
|
||||||
|
.attr("dy", ".35em")
|
||||||
|
.style("text-anchor", "end")
|
||||||
|
.text(function (d) { return d; });
|
||||||
|
}
|
||||||
|
|
||||||
|
function removePopovers () {
|
||||||
|
$('.popover').each(function() {
|
||||||
|
$(this).remove();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function showPopover (d) {
|
||||||
|
$(this).popover({
|
||||||
|
title: "<b><span id='tooltip-id-name-bar'></span></b>",
|
||||||
|
placement: 'top',
|
||||||
|
container: 'body',
|
||||||
|
trigger: 'manual',
|
||||||
|
html : true,
|
||||||
|
content: function() {
|
||||||
|
return "<span id='tooltip-id-label'></span>" +
|
||||||
|
"<br/>num: <span id='tooltip-id-value-bar'></span>"; }
|
||||||
|
});
|
||||||
|
$(this).popover('show');
|
||||||
|
$("#tooltip-id-name-bar").text(d.name);
|
||||||
|
$("#tooltip-id-label").text(d.label);
|
||||||
|
$("#tooltip-id-value-bar").text(d3.format(",")(d.value ? d.value: d.y1 - d.y0));
|
||||||
|
}
|
||||||
|
|
||||||
|
chart.onResize = function () {
|
||||||
|
var aspect = 1000 / 500, chart = $("#thesvg");
|
||||||
|
var targetWidth = chart.parent().width();
|
||||||
|
chart.attr("width", targetWidth);
|
||||||
|
chart.attr("height", targetWidth / aspect);
|
||||||
|
}
|
||||||
|
|
||||||
|
window.chart = chart;
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
function draw_pie_chart(id, url_json, pie_on_click_url) {
|
||||||
|
|
||||||
|
var width_pie = 200;
|
||||||
|
var height_pie = 200;
|
||||||
|
var padding_pie = 10;
|
||||||
|
var opacity_pie = .8;
|
||||||
|
|
||||||
|
var radius_pie = Math.min(width_pie - padding_pie, height_pie - padding_pie) / 2;
|
||||||
|
//var color_pie = d3.scaleOrdinal(d3.schemeCategory10);
|
||||||
|
var color_pie = d3.scaleOrdinal(d3.schemeSet3);
|
||||||
|
|
||||||
|
var div_pie = d3.select("body").append("div")
|
||||||
|
.attr("class", "tooltip")
|
||||||
|
.style("opacity", 0);
|
||||||
|
|
||||||
|
var svg_pie = d3.select("#"+id)
|
||||||
|
.append('svg')
|
||||||
|
.attr("width", '100%')
|
||||||
|
.attr("height", '100%')
|
||||||
|
.attr('viewBox','0 0 '+Math.min(width_pie,height_pie) +' '+Math.min(width_pie,height_pie) )
|
||||||
|
.attr('preserveAspectRatio','xMinYMin')
|
||||||
|
|
||||||
|
|
||||||
|
var g_pie = svg_pie.append('g')
|
||||||
|
.attr('transform', 'translate(' + (width_pie/2) + ',' + (height_pie/2) + ')');
|
||||||
|
|
||||||
|
var arc_pie = d3.arc()
|
||||||
|
.innerRadius(0)
|
||||||
|
.outerRadius(radius_pie);
|
||||||
|
|
||||||
|
d3.json(url_json)
|
||||||
|
.then(function(data){
|
||||||
|
|
||||||
|
var pie_pie = d3.pie()
|
||||||
|
.value(function(d) { return d.value; })
|
||||||
|
.sort(null);
|
||||||
|
|
||||||
|
var path_pie = g_pie.selectAll('path')
|
||||||
|
.data(pie_pie(data))
|
||||||
|
.enter()
|
||||||
|
.append("g")
|
||||||
|
.append('path')
|
||||||
|
.attr('d', arc_pie)
|
||||||
|
.attr('fill', (d,i) => color_pie(i))
|
||||||
|
.attr('class', 'pie_path')
|
||||||
|
.on("mouseover", mouseovered_pie)
|
||||||
|
.on("mouseout", mouseouted_pie)
|
||||||
|
.on("click", function (d) {window.location.href = pie_on_click_url+d.data.name })
|
||||||
|
.style('opacity', opacity_pie)
|
||||||
|
.style('stroke', 'white');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
function mouseovered_pie(d) {
|
||||||
|
//remove old content
|
||||||
|
$("#tooltip-id-name").remove();
|
||||||
|
$("#tooltip-id-value").remove();
|
||||||
|
|
||||||
|
// tooltip
|
||||||
|
var content;
|
||||||
|
|
||||||
|
content = "<b><span id='tooltip-id-name'></span></b><br/>"+
|
||||||
|
"<br/>"+
|
||||||
|
"<i>Decoded</i>: <span id='tooltip-id-value'></span><br/>"
|
||||||
|
|
||||||
|
div_pie.transition()
|
||||||
|
.duration(200)
|
||||||
|
.style("opacity", .9);
|
||||||
|
div_pie.html(content)
|
||||||
|
.style("left", (d3.event.pageX) + "px")
|
||||||
|
.style("top", (d3.event.pageY - 28) + "px");
|
||||||
|
|
||||||
|
$("#tooltip-id-name").text(d.data.name);
|
||||||
|
$("#tooltip-id-value").text(d.data.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function mouseouted_pie() {
|
||||||
|
div_pie.transition()
|
||||||
|
.duration(500)
|
||||||
|
.style("opacity", 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<script>
|
||||||
|
function barchart_type(url, id) {
|
||||||
|
|
||||||
|
|
||||||
|
var margin = {top: 20, right: 20, bottom: 70, left: 40};
|
||||||
|
|
||||||
|
var width = 960 - margin.left - margin.right;
|
||||||
|
var height = 500 - margin.top - margin.bottom;
|
||||||
|
|
||||||
|
var x = d3.scaleBand().rangeRound([0, width]).padding(0.1);
|
||||||
|
var y = d3.scaleLinear().rangeRound([height, 0]);
|
||||||
|
|
||||||
|
var xAxis = d3.axisBottom(x)
|
||||||
|
//.tickFormat(d3.time.format("%Y-%m"));
|
||||||
|
|
||||||
|
var yAxis = d3.axisLeft(y)
|
||||||
|
.ticks(10);
|
||||||
|
|
||||||
|
/*var svg = d3.select(id).append("svg")
|
||||||
|
.attr("width", width + margin.left + margin.right)
|
||||||
|
.attr("height", height + margin.top + margin.bottom)
|
||||||
|
.attr("id", "thesvg")
|
||||||
|
.append("g")
|
||||||
|
.attr("transform",
|
||||||
|
"translate(" + margin.left + "," + margin.top + ")");*/
|
||||||
|
|
||||||
|
|
||||||
|
d3.json(url)
|
||||||
|
.then(function(data){
|
||||||
|
|
||||||
|
data.forEach(function(d) {
|
||||||
|
d.value = +d.value;
|
||||||
|
});
|
||||||
|
|
||||||
|
x.domain(data.map(function(d) { return d.date; }));
|
||||||
|
y.domain([0, d3.max(data, function(d) { return d.value; })]);
|
||||||
|
|
||||||
|
var label = svg.append("g")
|
||||||
|
.attr("class", "x axis")
|
||||||
|
.attr("transform", "translate(0," + height + ")")
|
||||||
|
.call(xAxis)
|
||||||
|
.selectAll("text")
|
||||||
|
.style("text-anchor", "end")
|
||||||
|
.attr("dx", "-.8em")
|
||||||
|
.attr("dy", "-.55em")
|
||||||
|
{% if date_from == date_to and date_from %}
|
||||||
|
.attr("transform", "rotate(-20)" );
|
||||||
|
{% else %}
|
||||||
|
.attr("transform", "rotate(-70)" )
|
||||||
|
.attr("class", "bar")
|
||||||
|
.on("click", function (d) { window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}"+'?date_from='+d+'&date_to='+d });
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
svg.append("g")
|
||||||
|
.attr("class", "y axis")
|
||||||
|
.call(yAxis)
|
||||||
|
.append("text")
|
||||||
|
.attr("transform", "rotate(-90)")
|
||||||
|
.attr("y", 6)
|
||||||
|
.attr("dy", ".71em")
|
||||||
|
.style("text-anchor", "end")
|
||||||
|
.text("Value ($)");
|
||||||
|
|
||||||
|
var bar = svg.selectAll("bar")
|
||||||
|
.data(data)
|
||||||
|
.enter().append("rect")
|
||||||
|
.attr("class", "bar")
|
||||||
|
//.style("fill", "steelblue")
|
||||||
|
.attr("x", function(d) { return x(d.date); })
|
||||||
|
.attr("width", x.bandwidth())
|
||||||
|
.attr("y", function(d) { return y(d.value); })
|
||||||
|
.attr("height", function(d) { return height - y(d.value); })
|
||||||
|
{% if mimetype %}
|
||||||
|
.on("click", function(d){ window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}" +'?mimetype={{mimetype}}&date_from='+ d.date +'&date_to='+ d.date; });
|
||||||
|
{% endif %}
|
||||||
|
{% if date_from == date_to and date_from %}
|
||||||
|
.on("click", function(d){ window.location.href = "{{ url_for('objects_decoded.decodeds_dashboard') }}" +'?mimetype='+d.date+'&date_from={{ date_from }}&date_to={{ date_from }}'; });
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
data.forEach(function(d) {
|
||||||
|
if(d.value != 0){
|
||||||
|
svg.append("text")
|
||||||
|
.attr("class", "bar")
|
||||||
|
.attr("dy", "-.35em")
|
||||||
|
//.text(function(d) { return d.value; });
|
||||||
|
.text(d.value)
|
||||||
|
.style("text-anchor", "middle")
|
||||||
|
.attr('x', x(d.date) + x.bandwidth()/2)
|
||||||
|
.attr('y', y(d.value));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
|
@ -564,7 +564,6 @@ function barchart_type(url, id) {
|
||||||
{% else %}
|
{% else %}
|
||||||
.attr("transform", "rotate(-70)" )
|
.attr("transform", "rotate(-70)" )
|
||||||
.attr("class", "bar")
|
.attr("class", "bar")
|
||||||
.on("click", function (d) { window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}"+'?date_from='+d+'&date_to='+d });
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
svg.append("g")
|
svg.append("g")
|
||||||
|
@ -586,12 +585,6 @@ function barchart_type(url, id) {
|
||||||
.attr("width", x.bandwidth())
|
.attr("width", x.bandwidth())
|
||||||
.attr("y", function(d) { return y(d.value); })
|
.attr("y", function(d) { return y(d.value); })
|
||||||
.attr("height", function(d) { return height - y(d.value); })
|
.attr("height", function(d) { return height - y(d.value); })
|
||||||
{% if type %}
|
|
||||||
.on("click", function(d){ window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}" +'?type={{type}}&date_from='+ d.date +'&date_to='+ d.date; });
|
|
||||||
{% endif %}
|
|
||||||
{% if daily_type_chart %}
|
|
||||||
.on("click", function(d){ window.location.href = "{{ url_for('hashDecoded.hashDecoded_page') }}" +'?type='+d.date+'&date_from={{ daily_date }}&date_to={{ daily_date }}'; });
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
data.forEach(function(d) {
|
data.forEach(function(d) {
|
||||||
|
|
|
@ -35,7 +35,7 @@
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link" href="{{url_for('hashDecoded.hashDecoded_page')}}" id="nav_dashboard">
|
<a class="nav-link" href="{{url_for('objects_decoded.decodeds_dashboard')}}" id="nav_dashboard">
|
||||||
<i class="fas fa-lock-open"></i>
|
<i class="fas fa-lock-open"></i>
|
||||||
<span>Decoded</span>
|
<span>Decoded</span>
|
||||||
</a>
|
</a>
|
||||||
|
|
Loading…
Reference in a new issue