chg: [add MISP import](no correlations) import item + pgp(incomplete) + screenshot(incomplete)

This commit is contained in:
Terrtia 2020-02-06 17:14:08 +01:00
parent b31fffa728
commit 73f98c0897
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
7 changed files with 394 additions and 14 deletions

View file

@ -112,9 +112,9 @@ def export_cryptocurrency(crypto_type, crypto_address):
l_obj_attr = []
l_obj_attr.append( obj.add_attribute('address', value=crypto_address) )
#l_obj_attr.append( obj.add_attribute('symbol', value=Cryptocurrency.get_cryptocurrency_symbol(crypto_type)) )
l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['first_seen']) )
l_obj_attr.append( obj.add_attribute('last-seen', value=dict_metadata['last_seen']) )
crypto_symbol = Cryptocurrency.get_cryptocurrency_symbol(crypto_type)
if crypto_symbol:
l_obj_attr.append( obj.add_attribute('symbol', value=crypto_symbol) )
return obj
@ -319,7 +319,9 @@ if __name__ == '__main__':
l_obj = [#{'id': 'crawled/2019/11/08/6d3zimnpbwbzdgnp.onionf58258c8-c990-4707-b236-762a2b881183', 'type': 'item', 'lvl': 3},
#{'id': '6d3zimnpbwbzdgnp.onion', 'type': 'domain', 'lvl': 0},
#{'id': 'a92d459f70c4dea8a14688f585a5e2364be8b91fbf924290ead361d9b909dcf1', 'type': 'image', 'lvl': 3},
{'id': '15efuhpw5V9B1opHAgNXKPBPqdYALXP4hc', 'type': 'cryptocurrency', 'subtype': 'bitcoin', 'lvl': 1}]
#{'id': 'archive/pastebin.com_pro/2020/01/27/iHjcWhkD.gz', 'type': 'item', 'lvl': 3},
{'id': '15efuhpw5V9B1opHAgNXKPBPqdYALXP4hc', 'type': 'cryptocurrency', 'subtype': 'bitcoin', 'lvl': 0}
]
create_list_of_objs_to_export(l_obj, mode='union')

158
bin/export/MispImport.py Executable file
View file

@ -0,0 +1,158 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import uuid
import redis
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
import Cryptocurrency
import Pgp
import Decoded
import Domain
import Item
import Screenshot
import Correlate_object
import Import
# MISP
from pymisp import MISPEvent, MISPObject, PyMISP
# # TODO: deplace me in another fil
def get_global_id(obj_type, obj_id, obj_subtype=None):
if obj_subtype:
return '{}:{}:{}'.format(obj_type, obj_subtype, obj_id)
else:
return '{}:{}'.format(obj_type, obj_id)
# sub type
# obj type
# obj value
def get_global_id_from_misp_obj(misp_obj):
pass
def get_misp_obj_tag(misp_obj):
if misp_obj.attributes:
misp_tags = misp_obj.attributes[0].tags
tags = []
for misp_tag in misp_tags:
tags.append(misp_tag.name)
return tags
else:
return []
def get_object_metadata(misp_obj):
obj_meta = {}
if 'first_seen' in misp_obj.keys():
obj_meta['first_seen'] = misp_obj.first_seen
if 'last_seen' in misp_obj.keys():
obj_meta['last_seen'] = misp_obj.first_seen
obj_meta['tags'] = get_misp_obj_tag(misp_obj)
return obj_meta
def unpack_item_obj(map_uuid_global_id, misp_obj):
obj_meta = get_object_metadata(misp_obj)
obj_id = None
io_content = None
for attribute in misp_obj.attributes:
if attribute.object_relation == 'raw-data':
obj_id = attribute.value # # TODO: sanitize
io_content = attribute.data # # TODO: check if type == io
if obj_id and io_content:
res = Item.create_item(obj_id, obj_meta, io_content)
print(res)
map_uuid_global_id[misp_obj.uuid] = get_global_id('item', obj_id, io_content)
def get_obj_relationship(misp_obj):
for item in misp_obj.ObjectReference:
print(item.to_json())
## TODO: handle multiple pgp in the same object
def unpack_obj_pgp(map_uuid_global_id, misp_obj):
# get obj sub type
obj_attr = misp_obj.attributes[0]
obj_id = obj_attr.value
if obj_attr.object_relation == 'key-id':
obj_subtype = 'key'
elif obj_attr.object_relation == 'user-id-name':
obj_subtype = 'name'
elif obj_attr.object_relation == 'user-id-email':
obj_subtype = 'mail'
obj_meta = get_object_metadata(misp_obj)
if obj_id and io_content:
res = Pgp.pgp.create_item(obj_subtype, obj_id, obj_meta)
print(res)
map_uuid_global_id[misp_obj.uuid] = get_global_id('pgp', obj_id, obj_subtype=obj_subtype)
get_obj_relationship(misp_obj)
def unpack_obj_cryptocurrency(map_uuid_global_id, misp_obj):
obj_id = None
crypto_symbol = None
for attribute in misp_obj.attributes:
if attribute.object_relation == 'address':
obj_id = attribute.value
elif attribute.object_relation == 'symbol':
pass
obj_meta = get_object_metadata(misp_obj)
if obj_id and io_content:
res = Pgp.pgp.create_item(obj_subtype, obj_id, obj_meta)
print(res)
map_uuid_global_id[misp_obj.uuid] = get_global_id('pgp', obj_id, obj_subtype=obj_subtype)
get_obj_relationship(misp_obj)
def get_misp_import_fct(map_uuid_global_id, misp_obj):
#print(misp_obj.ObjectReference)
#for item in misp_obj.ObjectReference:
# print(item.to_json())
#obj_meta = get_object_metadata(misp_obj)
#print(misp_obj.name)
if misp_obj.name == 'ail-leak':
unpack_item_obj(map_uuid_global_id, misp_obj)
#print(misp_obj.to_json())
pass
elif misp_obj.name == 'domain-ip':
pass
elif misp_obj.name == 'pgp-meta':
unpack_obj_pgp(map_uuid_global_id, misp_obj)
elif misp_obj.name == 'coin-address':
pass
elif misp_obj.name == 'file':
#unpack_item_obj(map_uuid_global_id, misp_obj)
pass
def import_objs_from_file(filepath):
event_to_import = MISPEvent()
event_to_import.load_file(filepath)
map_uuid_global_id = {}
for misp_obj in event_to_import.objects:
get_misp_import_fct(map_uuid_global_id, misp_obj)
print(map_uuid_global_id)
if __name__ == '__main__':
# misp = PyMISP('https://127.0.0.1:8443/', 'uXgcN42b7xuL88XqK5hubwD8Q8596VrrBvkHQzB0', False)
import_objs_from_file('test_import_item.json')

View file

@ -29,7 +29,8 @@ def get_screenshot_rel_path(sha256_string, add_extension=False):
return screenshot_path
def get_screenshot_filepath(sha256_string):
return os.path.join(SCREENSHOT_FOLDER, get_screenshot_rel_path(sha256_string, add_extension=True))
filename = os.path.join(SCREENSHOT_FOLDER, get_screenshot_rel_path(sha256_string, add_extension=True))
return os.path.realpath(filename)
def exist_screenshot(sha256_string):
screenshot_path = get_screenshot_filepath(sha256_string)
@ -133,3 +134,25 @@ def get_screenshot_file_content(sha256_string):
with open(filepath, 'rb') as f:
file_content = BytesIO(f.read())
return file_content
def save_screenshot_file(sha256_string, io_content):
filepath = get_screenshot_filepath(sha256_string)
if os.path.isfile(filepath):
print('File already exist')
return False
# # TODO: check if is IO file
with open(filepath, 'wb') as f:
f.write(io_content.getvalue())
return True
def create_screenshot(sha256_string, io_content):
# check if sha256
res = save_screenshot_file(sha256_string, io_content)
if res:
# creata tags
if 'tags' in obj_metadata:
# # TODO: handle mixed tags: taxonomies and Galaxies
Tag.api_add_obj_tags(tags=obj_metadata['tags'], object_id=obj_id, object_type="image")
return True
return False

View file

@ -91,7 +91,35 @@ def save_cryptocurrency_data(cryptocurrency_name, date, item_path, cryptocurrenc
r_serv_metadata.sadd('set_domain_cryptocurrency_{}:{}'.format(cryptocurrency_name, cryptocurrency_address), domain)
def get_cryptocurrency_symbol(crypto_type):
if type=='bitcoin':
if crypto_type=='bitcoin':
return 'BTC'
else:
return ''
elif crypto_type=='ethereum':
return 'ETH'
elif crypto_type=='bitcoin-cash':
return 'BCH'
elif crypto_type=='litecoin':
return 'LTC'
elif crypto_type=='monero':
return 'XMR'
elif crypto_type=='zcash':
return 'ZEC'
elif crypto_type=='dash':
return 'DASH'
return None
def get_cryptocurrency_type(crypto_symbol):
if crypto_type=='BTC':
return 'bitcoin'
elif crypto_type=='ETH':
return 'ethereum'
elif crypto_type=='BCH':
return 'bitcoin-cash'
elif crypto_type=='LTC':
return 'litecoin'
elif crypto_type=='XMR':
return 'monero'
elif crypto_type=='ZEC':
return 'zcash'
elif crypto_type=='DASH':
return 'dash'
return None

View file

@ -21,14 +21,18 @@ import Decoded
import Screenshot
config_loader = ConfigLoader.ConfigLoader()
# get and sanityze PASTE DIRECTORY
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"))
config_loader = None
def exist_item(item_id):
if os.path.isfile(os.path.join(PASTES_FOLDER, item_id)):
filename = get_item_filepath(item_id)
if os.path.isfile(filename):
return True
else:
return False
@ -37,7 +41,8 @@ def get_item_id(full_path):
return full_path.replace(PASTES_FOLDER, '', 1)
def get_item_filepath(item_id):
return os.path.join(PASTES_FOLDER, item_id)
filename = os.path.join(PASTES_FOLDER, item_id)
return os.path.realpath(filename)
def get_item_date(item_id, add_separator=False):
l_directory = item_id.split('/')
@ -302,8 +307,102 @@ def get_item_har_name(item_id):
def get_item_har(har_path):
pass
def get_item_filename(item_id):
# Creating the full filepath
filename = os.path.join(PASTES_FOLDER, item_id)
filename = os.path.realpath(filename)
# incorrect filename
if not os.path.commonprefix([filename, PASTES_FOLDER]) == PASTES_FOLDER:
return None
else:
return filename
def get_item_duplicate(item_id, r_list=True):
res = r_serv_metadata.smembers('dup:{}'.format(item_id))
if r_list:
if res:
return list(res)
else:
return []
return res
def add_item_duplicate(item_id, l_dup):
for item_dup in l_dup:
r_serv_metadata.sadd('dup:{}'.format(item_dup), item_id)
r_serv_metadata.sadd('dup:{}'.format(item_id), item_dup)
def delete_item_duplicate(item_id):
item_dup = get_item_duplicate(item_id)
for item_dup in get_item_duplicate(item_id):
r_serv_metadata.srem('dup:{}'.format(item_dup), item_id)
r_serv_metadata.delete('dup:{}'.format(item_id))
def get_raw_content(item_id):
filepath = get_item_filepath(item_id)
with open(filepath, 'rb') as f:
file_content = BytesIO(f.read())
return file_content
def save_raw_content(item_id, io_content):
filepath = get_item_filename(item_id)
if os.path.isfile(filepath):
print('File already exist')
return False
# # TODO: check if is IO file
with open(filepath, 'wb') as f:
f.write(io_content.getvalue())
return True
# IDEA: send item to duplicate ?
def create_item(obj_id, obj_metadata, io_content):
'''
Create a new Item (Import or Test only).
:param obj_id: item id
:type obj_metadata: dict - 'first_seen', 'tags'
:return: is item created
:rtype: boolean
'''
# check if datetime match ??
# # TODO: validate obj_id
res = save_raw_content(obj_id, io_content)
# item saved
if res:
# creata tags
if 'tags' in obj_metadata:
# # TODO: handle mixed tags: taxonomies and Galaxies
Tag.api_add_obj_tags(tags=obj_metadata['tags'], object_id=obj_id, object_type="item")
return True
# Item not created
return False
def delete_item(obj_id):
# check if item exists
if not exist_item(obj_id):
return False
else:
Tag.delete_obj_tags(obj_id, 'item', Tag.get_obj_tag(obj_id))
delete_item_duplicate(obj_id)
# delete MISP event
r_serv_metadata.delete('misp_events:{}'.format(obj_id))
r_serv_metadata.delete('hive_cases:{}'.format(obj_id))
os.remove(get_item_filename(obj_id))
return True
# get all correlation
# delete them
### REQUIRE MORE WORK
# delete child/son !!!
# delete from tracked items
# delete from queue
###
return False

View file

@ -377,6 +377,7 @@ class Paste(object):
Save an attribute as a field
"""
for tuple in value:
tuple = tuple.replace(PASTES_FOLDER, '', 1)
self.store_metadata.sadd('dup:'+self.p_path, tuple)
def save_others_pastes_attribute_duplicate(self, list_value):
@ -384,6 +385,7 @@ class Paste(object):
Save a new duplicate on others pastes
"""
for hash_type, path, percent, date in list_value:
path = path.replace(PASTES_FOLDER, '', 1)
to_add = (hash_type, self.p_path, percent, date)
self.store_metadata.sadd('dup:'+path,to_add)

View file

@ -33,6 +33,13 @@ class Correlation(object):
else:
return r_serv_metadata.exists('set_domain_{}_{}:{}'.format(self.correlation_name, correlation_type, field_name))
def exist_correlation(self, subtype, obj_id):
res = r_serv_metadata.zscore('{}_all:{}'.format(self.correlation_name, subtype), obj_id)
if res:
return True
else:
return False
def _get_items(self, correlation_type, field_name):
res = r_serv_metadata.smembers('set_{}_{}:{}'.format(self.correlation_name, correlation_type, field_name))
if res:
@ -104,7 +111,13 @@ class Correlation(object):
'''
return self.all_correlation_types
def sanythise_correlation_types(self, correlation_types):
def get_correlation_obj_type(self):
if self.correlation_name=='pgpdump':
return 'pgp'
else:
return 'cryptocurrency'
def sanythise_correlation_types(self, correlation_types, r_boolean=False):
'''
Check if all correlation types in the list are valid.
@ -115,11 +128,20 @@ class Correlation(object):
:rtype: list
'''
if correlation_types is None:
return self.get_all_correlation_types()
if r_boolean:
return False
else:
return self.get_all_correlation_types()
for correl in correlation_types: # # TODO: # OPTIMIZE:
if correl not in self.get_all_correlation_types():
return self.get_all_correlation_types()
return correlation_types
if r_boolean:
return False
else:
return self.get_all_correlation_types()
if r_boolean:
return True
else:
return correlation_types
def _get_domain_correlation_obj(self, domain, correlation_type):
@ -254,11 +276,57 @@ class Correlation(object):
correlation_obj[correlation_object] = res
return correlation_obj
def update_correlation_daterange(self, subtype, obj_id, date): # # TODO: update fisrt_seen
# obj_id don't exit
if not r_serv_metadata.exists('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id)):
r_serv_metadata.hset('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id), 'first_seen', date)
r_serv_metadata.hset('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id), 'last_seen', date)
else:
last_seen = r_serv_metadata.hget('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id), 'last_seen')
if not last_seen:
r_serv_metadata.hset('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id), 'last_seen', date)
else:
if int(last_seen) < int(date):
r_serv_metadata.hset('{}_metadata_{}:{}'.format(self.correlation_name, subtype, obj_id), 'last_seen', date)
def save_item_correlation(self, subtype, date, obj_id, item_id, item_date):
update_correlation_daterange(subtype, obj_id, item_date)
# global set
r_serv_metadata.sadd('set_{}_{}:{}'.format(self.correlation_name, subtype, obj_id), item_id)
# daily
r_serv_metadata.hincrby('{}:{}:{}'.format(self.correlation_name, subtype, item_date), obj_id, 1)
# all type
r_serv_metadata.zincrby('{}_all:{}'.format(self.correlation_name, subtype), obj_id, 1)
## object_metadata
# item
r_serv_metadata.sadd('item_{}_{}:{}'.format(self.correlation_name, subtype, item_id), obj_id)
def save_domain_correlation(self, domain, correlation_type, correlation_value):
r_serv_metadata.sadd('domain_{}_{}:{}'.format(self.correlation_name, correlation_type, domain), correlation_value)
r_serv_metadata.sadd('set_domain_{}_{}:{}'.format(self.correlation_name, correlation_type, correlation_value), domain)
def save_correlation(self, subtype, obj_id): # # TODO: add first_seen/last_seen
r_serv_metadata.zincrby('{}_all:{}'.format(self.correlation_name, subtype), obj_id, 0)
def create_correlation(self, subtype, obj_id, obj_meta):
res = self.sanythise_correlation_types(correlation_type, r_boolean=True)
if not res:
print('invalid subtype')
return False
if not exist_correlation(subtype, obj_id):
res = save_correlation(subtype, obj_id)
if res:
if 'tags' in obj_metadata:
# # TODO: handle mixed tags: taxonomies and Galaxies
Tag.api_add_obj_tags(tags=obj_metadata['tags'], object_id=obj_id, object_type=self.get_correlation_obj_type())
return True
return False
######## API EXPOSED ########