mirror of
https://github.com/MISP/misp-galaxy.git
synced 2024-11-22 23:07:19 +00:00
Merge branch 'master' of https://github.com/MISP/misp-galaxy
This commit is contained in:
commit
cc0bd96527
10 changed files with 52840 additions and 14141 deletions
12
README.md
12
README.md
|
@ -94,12 +94,12 @@ The MISP galaxy (JSON files) are dual-licensed under:
|
|||
or
|
||||
|
||||
~~~~
|
||||
Copyright (c) 2015-2018 Alexandre Dulaunoy - a@foo.be
|
||||
Copyright (c) 2015-2018 CIRCL - Computer Incident Response Center Luxembourg
|
||||
Copyright (c) 2015-2018 Andras Iklody
|
||||
Copyright (c) 2015-2018 Raphael Vinot
|
||||
Copyright (c) 2015-2018 Deborah Servili
|
||||
Copyright (c) 2016-2018 Various contributors to MISP Project
|
||||
Copyright (c) 2015-2019 Alexandre Dulaunoy - a@foo.be
|
||||
Copyright (c) 2015-2019 CIRCL - Computer Incident Response Center Luxembourg
|
||||
Copyright (c) 2015-2019 Andras Iklody
|
||||
Copyright (c) 2015-2019 Raphael Vinot
|
||||
Copyright (c) 2015-2019 Deborah Servili
|
||||
Copyright (c) 2016-2019 Various contributors to MISP Project
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -409,10 +409,6 @@
|
|||
"uuid": "2287c024-9643-43ef-8776-858d3994b9ac",
|
||||
"value": "Streaming service"
|
||||
},
|
||||
{
|
||||
"uuid": "97e018e8-e03b-48ff-8add-1059f035069a",
|
||||
"value": "Puplishing industry"
|
||||
},
|
||||
{
|
||||
"uuid": "867cbcb3-8baa-476f-bec5-ceb36e9b1e09",
|
||||
"value": "Publishing industry"
|
||||
|
@ -486,5 +482,5 @@
|
|||
"value": "Immigration"
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
"version": 2
|
||||
}
|
||||
|
|
|
@ -2101,7 +2101,8 @@
|
|||
"https://www.cfr.org/interactive/cyber-operations/apt-28",
|
||||
"https://blogs.microsoft.com/on-the-issues/2018/08/20/we-are-taking-new-steps-against-broadening-threats-to-democracy/",
|
||||
"https://www.bleepingcomputer.com/news/security/microsoft-disrupts-apt28-hacking-campaign-aimed-at-us-midterm-elections/",
|
||||
"https://www.bleepingcomputer.com/news/security/apt28-uses-lojax-first-uefi-rootkit-seen-in-the-wild/"
|
||||
"https://www.bleepingcomputer.com/news/security/apt28-uses-lojax-first-uefi-rootkit-seen-in-the-wild/",
|
||||
"https://www.accenture.com/us-en/blogs/blogs-snakemackerel-delivers-zekapab-malware"
|
||||
],
|
||||
"synonyms": [
|
||||
"APT 28",
|
||||
|
@ -2110,6 +2111,7 @@
|
|||
"PawnStorm",
|
||||
"Fancy Bear",
|
||||
"Sednit",
|
||||
"SNAKEMACKEREL",
|
||||
"TsarTeam",
|
||||
"Tsar Team",
|
||||
"TG-4127",
|
||||
|
@ -2173,7 +2175,8 @@
|
|||
"https://www.us-cert.gov/sites/default/files/publications/AR-17-20045_Enhanced_Analysis_of_GRIZZLY_STEPPE_Activity.pdf",
|
||||
"https://www.fireeye.com/blog/threat-research/2017/03/dissecting_one_ofap.html",
|
||||
"https://www.cfr.org/interactive/cyber-operations/dukes",
|
||||
"https://pylos.co/2018/11/18/cozybear-in-from-the-cold/"
|
||||
"https://pylos.co/2018/11/18/cozybear-in-from-the-cold/",
|
||||
"https://cloudblogs.microsoft.com/microsoftsecure/2018/12/03/analysis-of-cyberattack-on-u-s-think-tanks-non-profits-public-sector-by-unidentified-attackers/"
|
||||
],
|
||||
"synonyms": [
|
||||
"Dukes",
|
||||
|
@ -2191,7 +2194,8 @@
|
|||
"The Dukes",
|
||||
"Minidionis",
|
||||
"SeaDuke",
|
||||
"Hammer Toss"
|
||||
"Hammer Toss",
|
||||
"YTTRIUM"
|
||||
]
|
||||
},
|
||||
"related": [
|
||||
|
@ -3320,12 +3324,14 @@
|
|||
"https://raw.githubusercontent.com/pan-unit42/playbook_viewer/master/playbook_json/oilrig.json",
|
||||
"https://www.cfr.org/interactive/cyber-operations/oilrig",
|
||||
"https://researchcenter.paloaltonetworks.com/2018/09/unit42-oilrig-uses-updated-bondupdater-target-middle-eastern-government/",
|
||||
"https://researchcenter.paloaltonetworks.com/2018/11/unit42-analyzing-oilrigs-ops-tempo-testing-weaponization-delivery/"
|
||||
"https://researchcenter.paloaltonetworks.com/2018/11/unit42-analyzing-oilrigs-ops-tempo-testing-weaponization-delivery/",
|
||||
"https://www.crowdstrike.com/blog/meet-crowdstrikes-adversary-of-the-month-for-november-helix-kitten/"
|
||||
],
|
||||
"synonyms": [
|
||||
"Twisted Kitten",
|
||||
"Cobalt Gypsy",
|
||||
"Crambus"
|
||||
"Crambus",
|
||||
"Helix Kitten"
|
||||
]
|
||||
},
|
||||
"related": [
|
||||
|
|
162
tools/mitre-cti/v2.0/create_mitre-galaxy.py
Executable file
162
tools/mitre-cti/v2.0/create_mitre-galaxy.py
Executable file
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/env python3
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description='Create a couple galaxy/cluster with cti\'s intrusion-sets\nMust be in the mitre/cti/enterprise-attack/intrusion-set folder')
|
||||
parser.add_argument("-p", "--path", required=True, help="Path of the mitre/cti folder")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
values = []
|
||||
misp_dir = '../../../'
|
||||
|
||||
|
||||
domains = ['enterprise-attack', 'mobile-attack', 'pre-attack']
|
||||
types = ['attack-pattern', 'course-of-action', 'intrusion-set', 'malware', 'tool']
|
||||
all_data = {} # variable that will contain everything
|
||||
|
||||
# read in existing data
|
||||
# THIS IS FOR MIGRATION - reading the data from the enterprise-attack, mobile-attack, pre-attack
|
||||
# first build a data set of the MISP Galaxy ATT&CK elements by using the UUID as reference, this speeds up lookups later on.
|
||||
# at the end we will convert everything again to separate datasets
|
||||
all_data_uuid = {}
|
||||
for domain in domains:
|
||||
for t in types:
|
||||
fname = os.path.join(misp_dir, 'clusters', 'mitre-{}-{}.json'.format(domain, t))
|
||||
if os.path.exists(fname):
|
||||
# print("##### {}".format(fname))
|
||||
with open(fname) as f:
|
||||
file_data = json.load(f)
|
||||
# print(file_data)
|
||||
for value in file_data['values']:
|
||||
if value['uuid'] in all_data_uuid:
|
||||
# exit("ERROR: Something is really wrong, we seem to have duplicates.")
|
||||
# if it already exists we need to copy over all the data manually to merge it
|
||||
# on the other hand, from a manual analysis it looks like it's mostly the relations that are different
|
||||
# so now we will just copy over the relationships
|
||||
# actually, at time of writing the code below results in no change as the new items always contained more than the previously seen items
|
||||
value_orig = all_data_uuid[value['uuid']]
|
||||
if 'related' in value_orig:
|
||||
for related_item in value_orig['related']:
|
||||
if related_item not in value['related']:
|
||||
value['related'].append(related_item)
|
||||
all_data_uuid[value['uuid']] = value
|
||||
|
||||
# THIS IS FOR NORMAL OPERATIONS - reading from the very old and new models - one model per type
|
||||
# FIXME implement this (copy paste above or put above in function and call function)
|
||||
|
||||
|
||||
# now load the MITRE ATT&CK
|
||||
for domain in domains:
|
||||
attack_dir = os.path.join(args.path, domain)
|
||||
if not os.path.exists(attack_dir):
|
||||
exit("ERROR: MITRE ATT&CK folder incorrect")
|
||||
|
||||
with open(os.path.join(attack_dir, domain + '.json')) as f:
|
||||
attack_data = json.load(f)
|
||||
|
||||
for item in attack_data['objects']:
|
||||
if item['type'] not in types:
|
||||
continue
|
||||
|
||||
# print(json.dumps(item, indent=2, sort_keys=True, ensure_ascii=False))
|
||||
try:
|
||||
# build the new data structure
|
||||
value = {}
|
||||
uuid = re.search('--(.*)$', item['id']).group(0)[2:]
|
||||
# item exist already in the all_data set
|
||||
update = False
|
||||
if uuid in all_data_uuid:
|
||||
value = all_data_uuid[uuid]
|
||||
|
||||
if 'description' in item:
|
||||
value['description'] = item['description']
|
||||
value['value'] = item['name'] + ' - ' + item['external_references'][0]['external_id']
|
||||
value['meta'] = {}
|
||||
value['meta']['refs'] = []
|
||||
value['uuid'] = re.search('--(.*)$', item['id']).group(0)[2:]
|
||||
|
||||
if 'aliases' in item:
|
||||
value['meta']['synonyms'] = item['aliases']
|
||||
if 'x_mitre_aliases' in item:
|
||||
value['meta']['synonyms'] = item['x_mitre_aliases']
|
||||
|
||||
for reference in item['external_references']:
|
||||
if 'url' in reference and reference['url'] not in value['meta']['refs']:
|
||||
value['meta']['refs'].append(reference['url'])
|
||||
if 'external_id' in reference:
|
||||
value['meta']['external_id'] = reference['external_id']
|
||||
|
||||
if 'kill_chain_phases' in item: # many (but not all) attack-patterns have this
|
||||
value['meta']['kill_chain'] = []
|
||||
for killchain in item['kill_chain_phases']:
|
||||
value['meta']['kill_chain'].append(killchain['kill_chain_name'] + ':' + killchain['phase_name'])
|
||||
if 'x_mitre_data_sources' in item:
|
||||
value['meta']['mitre_data_sources'] = item['x_mitre_data_sources']
|
||||
if 'x_mitre_platforms' in item:
|
||||
value['meta']['mitre_platforms'] = item['x_mitre_platforms']
|
||||
# TODO add the other x_mitre elements dynamically
|
||||
|
||||
# relationships will be build separately afterwards
|
||||
value['type'] = item['type'] # remove this before dump to json
|
||||
# print(json.dumps(value, sort_keys=True, indent=2))
|
||||
|
||||
all_data_uuid[uuid] = value
|
||||
|
||||
except Exception as e:
|
||||
print(json.dumps(item, sort_keys=True, indent=2))
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# process the 'relationship' type as we now know the existence of all ATT&CK uuids
|
||||
for item in attack_data['objects']:
|
||||
if item['type'] != 'relationship':
|
||||
continue
|
||||
# print(json.dumps(item, indent=2, sort_keys=True, ensure_ascii=False))
|
||||
|
||||
rel_type = item['relationship_type']
|
||||
dest_uuid = re.findall(r'--([0-9a-f-]+)', item['target_ref']).pop()
|
||||
source_uuid = re.findall(r'--([0-9a-f-]+)', item['source_ref']).pop()
|
||||
tags = []
|
||||
|
||||
# add the relation in the defined way
|
||||
rel_source = {
|
||||
"dest-uuid": dest_uuid,
|
||||
"tags": [
|
||||
"estimative-language:likelihood-probability=\"almost-certain\""
|
||||
],
|
||||
"type": rel_type
|
||||
}
|
||||
if 'related' not in all_data_uuid[source_uuid]:
|
||||
all_data_uuid[source_uuid]['related'] = []
|
||||
if rel_source not in all_data_uuid[source_uuid]['related']:
|
||||
all_data_uuid[source_uuid]['related'].append(rel_source)
|
||||
|
||||
# LATER find the opposite word of "rel_type" and build the relation in the opposite direction
|
||||
|
||||
# dump all_data to their respective file
|
||||
for t in types:
|
||||
fname = os.path.join(misp_dir, 'clusters', 'mitre-{}.json'.format(t))
|
||||
if not os.path.exists(fname):
|
||||
exit("File {} does not exist, this is unexpected.".format(fname))
|
||||
with open(fname) as f:
|
||||
file_data = json.load(f)
|
||||
|
||||
file_data['values'] = []
|
||||
for item in all_data_uuid.values():
|
||||
# print(json.dumps(item, sort_keys=True, indent=2))
|
||||
if item['type'] != t:
|
||||
continue
|
||||
item_2 = item.copy()
|
||||
item_2.pop('type', None)
|
||||
file_data['values'].append(item_2)
|
||||
|
||||
file_data['values'] = sorted(file_data['values'], key=lambda x: sorted(x['value'])) # FIXME the sort algo needs to be further improved
|
||||
file_data['version'] += 1
|
||||
with open(fname, 'w') as f:
|
||||
json.dump(file_data, f, indent=2, sort_keys=True, ensure_ascii=False)
|
||||
f.write('\n') # only needed for the beauty and to be compliant with jq_all_the_things
|
||||
|
||||
print("All done, please don't forget to ./validate_all.sh and ./jq_all_the_things.sh")
|
Loading…
Reference in a new issue