mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
Merge branch 'master' into crawler_v2
This commit is contained in:
commit
f97698ad44
9 changed files with 57 additions and 23 deletions
|
@ -75,23 +75,23 @@ def export_ail_item(item_id):
|
||||||
tag_misp_object_attributes(l_obj_attr, dict_metadata['tags'])
|
tag_misp_object_attributes(l_obj_attr, dict_metadata['tags'])
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
# # TODO: create domain-port-history object
|
|
||||||
def export_domain(domain):
|
def export_domain(domain):
|
||||||
domain_obj = Domain.Domain(domain)
|
domain_obj = Domain.Domain(domain)
|
||||||
dict_metadata = domain_obj.get_domain_metadata(tags=True)
|
dict_metadata = domain_obj.get_domain_metadata(tags=True)
|
||||||
dict_metadata['ports'] = ['80', '223', '443']
|
|
||||||
|
|
||||||
# create domain-ip obj
|
# create domain-ip obj
|
||||||
obj = MISPObject('domain-ip', standalone=True)
|
obj = MISPObject('domain-crawled', standalone=True)
|
||||||
obj.first_seen = dict_metadata['first_seen']
|
obj.first_seen = dict_metadata['first_seen']
|
||||||
obj.last_seen = dict_metadata['last_check']
|
obj.last_seen = dict_metadata['last_check']
|
||||||
|
|
||||||
l_obj_attr = []
|
l_obj_attr = []
|
||||||
l_obj_attr.append( obj.add_attribute('first-seen', value=dict_metadata['first_seen']) )
|
|
||||||
l_obj_attr.append( obj.add_attribute('last-seen', value=dict_metadata['last_check']) )
|
|
||||||
l_obj_attr.append( obj.add_attribute('domain', value=domain) )
|
l_obj_attr.append( obj.add_attribute('domain', value=domain) )
|
||||||
for port in dict_metadata['ports']:
|
dict_all_url = Domain.get_domain_all_url(domain, domain_obj.get_domain_type())
|
||||||
l_obj_attr.append( obj.add_attribute('port', value=port) )
|
for crawled_url in dict_all_url:
|
||||||
|
attribute = obj.add_attribute('url', value=crawled_url)
|
||||||
|
attribute.first_seen = str(dict_all_url[crawled_url]['first_seen'])
|
||||||
|
attribute.last_seen = str(dict_all_url[crawled_url]['last_seen'])
|
||||||
|
l_obj_attr.append( attribute )
|
||||||
|
|
||||||
# add tags
|
# add tags
|
||||||
if dict_metadata['tags']:
|
if dict_metadata['tags']:
|
||||||
|
|
|
@ -190,7 +190,7 @@ def unpack_file(map_uuid_global_id, misp_obj):
|
||||||
def get_misp_import_fct(map_uuid_global_id, misp_obj):
|
def get_misp_import_fct(map_uuid_global_id, misp_obj):
|
||||||
if misp_obj.name == 'ail-leak':
|
if misp_obj.name == 'ail-leak':
|
||||||
unpack_item_obj(map_uuid_global_id, misp_obj)
|
unpack_item_obj(map_uuid_global_id, misp_obj)
|
||||||
elif misp_obj.name == 'domain-ip':
|
elif misp_obj.name == 'domain-crawled':
|
||||||
pass
|
pass
|
||||||
elif misp_obj.name == 'pgp-meta':
|
elif misp_obj.name == 'pgp-meta':
|
||||||
unpack_obj_pgp(map_uuid_global_id, misp_obj)
|
unpack_obj_pgp(map_uuid_global_id, misp_obj)
|
||||||
|
|
|
@ -327,6 +327,28 @@ def get_all_domain_up_by_type(domain_type):
|
||||||
else:
|
else:
|
||||||
return ({"status": "error", "reason": "Invalid domain type"}, 400)
|
return ({"status": "error", "reason": "Invalid domain type"}, 400)
|
||||||
|
|
||||||
|
def get_domain_all_url(domain, domain_type, domain_ports=None):
|
||||||
|
if not domain_ports:
|
||||||
|
domain_ports = get_domain_all_ports(domain, domain_type)
|
||||||
|
all_url = {}
|
||||||
|
for port in domain_ports:
|
||||||
|
for dict_history in get_domain_history_with_status(domain, domain_type, port, add_root_item=True):
|
||||||
|
if dict_history['status']: # domain UP
|
||||||
|
crawled_items = get_domain_items(domain, dict_history['root_item'])
|
||||||
|
for item_id in crawled_items:
|
||||||
|
item_url = Item.get_item_link(item_id)
|
||||||
|
item_date = int(Item.get_item_date(item_id))
|
||||||
|
if item_url:
|
||||||
|
if item_url not in all_url:
|
||||||
|
all_url[item_url] = {'first_seen': item_date,'last_seen': item_date}
|
||||||
|
else: # update first_seen / last_seen
|
||||||
|
if item_date < all_url[item_url]['first_seen']:
|
||||||
|
all_url[item_url]['first_seen'] = item_date
|
||||||
|
if item_date > all_url[item_url]['last_seen']:
|
||||||
|
all_url[item_url]['last_seen'] = item_date
|
||||||
|
return all_url
|
||||||
|
|
||||||
|
|
||||||
def get_domain_items(domain, root_item_id):
|
def get_domain_items(domain, root_item_id):
|
||||||
dom_item = get_domain_item_children(domain, root_item_id)
|
dom_item = get_domain_item_children(domain, root_item_id)
|
||||||
dom_item.append(root_item_id)
|
dom_item.append(root_item_id)
|
||||||
|
@ -605,7 +627,7 @@ def get_domain_history(domain, domain_type, port): # TODO: add date_range: from
|
||||||
'''
|
'''
|
||||||
return r_serv_onion.zrange('crawler_history_{}:{}:{}'.format(domain_type, domain, port), 0, -1, withscores=True)
|
return r_serv_onion.zrange('crawler_history_{}:{}:{}'.format(domain_type, domain, port), 0, -1, withscores=True)
|
||||||
|
|
||||||
def get_domain_history_with_status(domain, domain_type, port): # TODO: add date_range: from to + nb_elem
|
def get_domain_history_with_status(domain, domain_type, port, add_root_item=False): # TODO: add date_range: from to + nb_elem
|
||||||
'''
|
'''
|
||||||
Retun .
|
Retun .
|
||||||
|
|
||||||
|
@ -619,14 +641,17 @@ def get_domain_history_with_status(domain, domain_type, port): # TODO: add date_
|
||||||
history = get_domain_history(domain, domain_type, port)
|
history = get_domain_history(domain, domain_type, port)
|
||||||
for root_item, epoch_val in history:
|
for root_item, epoch_val in history:
|
||||||
epoch_val = int(epoch_val) # force int
|
epoch_val = int(epoch_val) # force int
|
||||||
|
dict_history = {"epoch": epoch_val, "date": time.strftime('%Y/%m/%d - %H:%M.%S', time.gmtime(epoch_val))}
|
||||||
# domain down, root_item==epoch_val
|
# domain down, root_item==epoch_val
|
||||||
try:
|
try:
|
||||||
int(root_item)
|
int(root_item)
|
||||||
status = False
|
dict_history['status'] = False
|
||||||
# domain up, root_item=str
|
# domain up, root_item=str
|
||||||
except ValueError:
|
except ValueError:
|
||||||
status = True
|
dict_history['status'] = True
|
||||||
l_history.append({"epoch": epoch_val, "date": time.strftime('%Y/%m/%d - %H:%M.%S', time.gmtime(epoch_val)), "status": status})
|
if add_root_item:
|
||||||
|
dict_history['root_item'] = root_item
|
||||||
|
l_history.append(dict_history)
|
||||||
return l_history
|
return l_history
|
||||||
|
|
||||||
def verify_if_domain_exist(domain):
|
def verify_if_domain_exist(domain):
|
||||||
|
|
|
@ -62,7 +62,7 @@ sleep 0.1
|
||||||
|
|
||||||
for ((i=0;i<=$((${n} - 1));i++)); do
|
for ((i=0;i<=$((${n} - 1));i++)); do
|
||||||
port_number=$((${p} + $i))
|
port_number=$((${p} + $i))
|
||||||
screen -S "Docker_Splash" -X screen -t "docker_splash:$port_number" bash -c 'sudo docker run -d -p '$port_number':8050 --restart=always --cpus=1 --memory=4.5G -v '$f':/etc/splash/proxy-profiles/ --net="bridge" scrapinghub/splash --maxrss '$u'; read x'
|
screen -S "Docker_Splash" -X screen -t "docker_splash:$port_number" bash -c 'sudo docker run -d -p '$port_number':8050 --restart=always --cpus=1 --memory=2G -v '$f':/etc/splash/proxy-profiles/ --net="bridge" scrapinghub/splash --maxrss '$u'; read x'
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
printf "$GREEN Splash server launched on port $port_number$DEFAULT\n"
|
printf "$GREEN Splash server launched on port $port_number$DEFAULT\n"
|
||||||
done
|
done
|
||||||
|
|
|
@ -137,14 +137,10 @@ def export_object_file():
|
||||||
dict_misp_event_export = None
|
dict_misp_event_export = None
|
||||||
|
|
||||||
if l_obj_invalid:
|
if l_obj_invalid:
|
||||||
for obj_dict in l_obj_to_export:
|
|
||||||
obj_dict['uuid'] = str(uuid.uuid4())
|
|
||||||
obj_dict['type'] = Correlate_object.get_obj_str_type_subtype(obj_dict['type'], obj_dict.get('subtype', None))
|
|
||||||
|
|
||||||
# get user saved obj to export # # TODO: # performance
|
# get user saved obj to export # # TODO: # performance
|
||||||
l_obj_to_export = AILObjects.get_user_list_of_obj_to_export(user_id)
|
l_obj_to_export = AILObjects.get_user_list_of_obj_to_export(user_id)
|
||||||
|
|
||||||
for obj_dict in l_obj_invalid:
|
for obj_dict in l_obj_invalid: # set uuid input
|
||||||
obj_dict['uuid'] = str(uuid.uuid4())
|
obj_dict['uuid'] = str(uuid.uuid4())
|
||||||
obj_dict['type'] = Correlate_object.get_obj_str_type_subtype(obj_dict['type'], obj_dict.get('subtype', None))
|
obj_dict['type'] = Correlate_object.get_obj_str_type_subtype(obj_dict['type'], obj_dict.get('subtype', None))
|
||||||
|
|
||||||
|
@ -152,7 +148,6 @@ def export_object_file():
|
||||||
l_obj_invalid=l_obj_invalid, dict_misp_event_export=dict_misp_event_export)
|
l_obj_invalid=l_obj_invalid, dict_misp_event_export=dict_misp_event_export)
|
||||||
else:
|
else:
|
||||||
if export_to_misp and MispExport.ping_misp():
|
if export_to_misp and MispExport.ping_misp():
|
||||||
l_obj_to_export = AILObjects.get_user_list_of_obj_to_export(user_id)
|
|
||||||
event = MispExport.create_list_of_objs_to_export(l_obj_to_export, r_type='event')
|
event = MispExport.create_list_of_objs_to_export(l_obj_to_export, r_type='event')
|
||||||
|
|
||||||
event_metadata = MispExport.create_misp_event(event, distribution=dict_misp_event_export.get('export_to_misp', None),
|
event_metadata = MispExport.create_misp_event(event, distribution=dict_misp_event_export.get('export_to_misp', None),
|
||||||
|
@ -167,7 +162,6 @@ def export_object_file():
|
||||||
l_obj_invalid=[], dict_misp_event_export=[])
|
l_obj_invalid=[], dict_misp_event_export=[])
|
||||||
else:
|
else:
|
||||||
# get user saved obj to export # # TODO: # performance
|
# get user saved obj to export # # TODO: # performance
|
||||||
l_obj_to_export = AILObjects.get_user_list_of_obj_to_export(user_id)
|
|
||||||
json_export = MispExport.create_list_of_objs_to_export(l_obj_to_export)
|
json_export = MispExport.create_list_of_objs_to_export(l_obj_to_export)
|
||||||
export_filename = MispExport.get_export_filename(json_export)
|
export_filename = MispExport.get_export_filename(json_export)
|
||||||
json_export = MispExport.create_in_memory_file(json_export.to_json())
|
json_export = MispExport.create_in_memory_file(json_export.to_json())
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
<div class="text-center">
|
<div class="text-center">
|
||||||
<canvas id="canvas_{{loop.index0}}" style="max-height: 400px; max-width: 100%;"></canvas>
|
<canvas id="canvas_{{loop.index0}}" style="max-height: 400px; max-width: 100%;"></canvas>
|
||||||
</div>
|
</div>
|
||||||
<div class="card-body">
|
<div class="card-body pb-0">
|
||||||
<h5 class="card-title">
|
<h5 class="card-title">
|
||||||
<a target="_blank" href="{{ url_for('crawler_splash.showDomain') }}?domain={{dict_domain["id"]}}">
|
<a target="_blank" href="{{ url_for('crawler_splash.showDomain') }}?domain={{dict_domain["id"]}}">
|
||||||
{{dict_domain["id"]}}
|
{{dict_domain["id"]}}
|
||||||
|
@ -39,6 +39,9 @@
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{% with obj_type='domain', obj_id=dict_domain["id"], obj_lvl=0%}
|
||||||
|
{% include 'import_export/block_add_user_object_to_export_small.html' %}
|
||||||
|
{% endwith %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if loop.index0 % 4 == 3 %}
|
{% if loop.index0 % 4 == 3 %}
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
<div class="d-flex flex-row-reverse bd-highlight">
|
||||||
|
<div>
|
||||||
|
<a class="btn" target="_blank" href="{{ url_for('import_export.add_object_id_to_export')}}?obj_type={{obj_type}}&obj_id={{obj_id}}&obj_lvl={{obj_lvl}}{%if obj_subtype%}&obj_subtype={{obj_subtype}}{%endif%}">
|
||||||
|
<img id="misp-logo" src="{{ url_for('static', filename='image/misp-logo.png')}}" height="25">
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<a class="btn btn-outline-light" href="{{ url_for('correlation.show_correlation')}}?object_type={{obj_type}}&correlation_id={{ obj_id }}" target="_blank" style="font-size: 15px">
|
||||||
|
<i class="fas fa-project-diagram text-secondary"></i>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
|
@ -73,7 +73,7 @@
|
||||||
<span class="btn btn-info input-group-addon add-field col-1"><i class="fas fa-plus"></i></span>
|
<span class="btn btn-info input-group-addon add-field col-1"><i class="fas fa-plus"></i></span>
|
||||||
</div>
|
</div>
|
||||||
{% for obj_dict in l_obj_to_export %}
|
{% for obj_dict in l_obj_to_export %}
|
||||||
{% with obj_type=obj_dict['type'], obj_id=obj_dict['id'], obj_lvl=obj_dict['lvl'], input_uuid=obj_dict['uuid'], obj_error=False%}
|
{% with obj_type=obj_dict['type'], obj_id=obj_dict['id'], obj_lvl=obj_dict['lvl'], input_uuid=obj_dict, obj_error=False%}
|
||||||
{% include 'import_export/block_to_export_input.html' %}
|
{% include 'import_export/block_to_export_input.html' %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
|
@ -112,7 +112,7 @@ then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#Update MISP Taxonomies and Galaxies
|
#Update MISP Taxonomies and Galaxies
|
||||||
python3 -m pip install git+https://github.com/MISP/PyTaxonomies
|
python3 -m pip install git+https://github.com/MISP/PyTaxonomies --upgrade
|
||||||
python3 -m pip install git+https://github.com/MISP/PyMISPGalaxies --upgrade
|
python3 -m pip install git+https://github.com/MISP/PyMISPGalaxies --upgrade
|
||||||
|
|
||||||
#Update PyMISP
|
#Update PyMISP
|
||||||
|
|
Loading…
Reference in a new issue