mirror of
https://github.com/MISP/misp-galaxy.git
synced 2024-11-26 16:57:18 +00:00
commit
b43f9d7b3d
33 changed files with 121692 additions and 546 deletions
647
clusters/tidal-campaigns.json
Normal file
647
clusters/tidal-campaigns.json
Normal file
|
@ -0,0 +1,647 @@
|
|||
{
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Campaigns",
|
||||
"description": "Tidal Campaigns Cluster",
|
||||
"name": "Tidal Campaigns",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/campaigns/",
|
||||
"type": "campaigns",
|
||||
"uuid": "3db4b6cb-5b89-4096-a057-e0205777adc9",
|
||||
"values": [
|
||||
{
|
||||
"description": "[2015 Ukraine Electric Power Attack](https://app.tidalcyber.com/campaigns/96e367d0-a744-5b63-85ec-595f505248a3) was a [Sandworm Team](https://app.tidalcyber.com/groups/16a65ee9-cd60-4f04-ba34-f2f45fcfc666) campaign during which they used [BlackEnergy](https://app.tidalcyber.com/software/908216c7-3ad4-4e0c-9dd3-a7ed5d1c695f) (specifically BlackEnergy3) and [KillDisk](https://app.tidalcyber.com/software/b5532e91-d267-4819-a05d-8c5358995add) to target and disrupt transmission and distribution substations within the Ukrainian power grid. This campaign was the first major public attack conducted against the Ukrainian power grid by Sandworm Team.",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0028",
|
||||
"first_seen": "2015-12-01T05:00:00Z",
|
||||
"last_seen": "2016-01-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "96e367d0-a744-5b63-85ec-595f505248a3",
|
||||
"value": "2015 Ukraine Electric Power Attack"
|
||||
},
|
||||
{
|
||||
"description": "[2016 Ukraine Electric Power Attack](https://app.tidalcyber.com/campaigns/06197e03-e1c1-56af-ba98-5071f98f91f1) was a [Sandworm Team](https://app.tidalcyber.com/groups/16a65ee9-cd60-4f04-ba34-f2f45fcfc666) campaign during which they used [Industroyer](https://app.tidalcyber.com/software/09398a7c-aee5-44af-b99d-f73d3b39c299) malware to target and disrupt distribution substations within the Ukrainian power grid. This campaign was the second major public attack conducted against Ukraine by [Sandworm Team](https://app.tidalcyber.com/groups/16a65ee9-cd60-4f04-ba34-f2f45fcfc666).<sup>[[ESET Industroyer](https://app.tidalcyber.com/references/9197f712-3c53-4746-9722-30e248511611)]</sup><sup>[[Dragos Crashoverride 2018](https://app.tidalcyber.com/references/d14442d5-2557-4a92-9a29-b15a20752f56)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0025",
|
||||
"first_seen": "2016-12-01T05:00:00Z",
|
||||
"last_seen": "2016-12-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "06197e03-e1c1-56af-ba98-5071f98f91f1",
|
||||
"value": "2016 Ukraine Electric Power Attack"
|
||||
},
|
||||
{
|
||||
"description": "In July 2023, U.S. authorities released joint Cybersecurity Advisory AA23-187A, which detailed increased observations of new variants of the Truebot botnet malware infecting organizations in the United States and Canada. Authorities assessed that Truebot infections are primarily motivated around collection and exfiltration of sensitive victim data for financial gain. Officials also assessed that actors were using both spearphishing emails containing malicious hyperlinks and exploitation of CVE-2022-31199 in the IT system auditing application Netwrix Auditor to deliver Truebot during these attacks. Additional tools associated with the attacks included Raspberry Robin for initial infections, FlawedGrace and Cobalt Strike for various post-exploitation activities, and Teleport, a custom tool for data exfiltration.<sup>[[U.S. CISA Increased Truebot Activity July 6 2023](/references/6f9b8f72-c55f-4268-903e-1f8a82efa5bb)]</sup>\n\nThe Advisory did not provide specific impacted victim sectors. The Advisory referred to activity taking place “in recent months” prior to July 2023 but did not provide an estimated date when the summarized activity began. A public threat report referenced in the Advisory reported an observed increase in Truebot infections beginning in August 2022, including several compromises involving education sector organizations.<sup>[[U.S. CISA Increased Truebot Activity July 6 2023](/references/6f9b8f72-c55f-4268-903e-1f8a82efa5bb)]</sup><sup>[[Cisco Talos Blog December 08 2022](/references/bcf92374-48a3-480f-a679-9fd34b67bcdd)]</sup>\n\n**Related Vulnerabilities**: CVE-2022-31199<sup>[[U.S. CISA Increased Truebot Activity July 6 2023](/references/6f9b8f72-c55f-4268-903e-1f8a82efa5bb)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5000",
|
||||
"first_seen": "2022-08-01T00:00:00Z",
|
||||
"last_seen": "2023-05-31T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"1dc8fd1e-0737-405a-98a1-111dd557f1b5",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"7cc57262-5081-447e-85a3-31ebb4ab2ae5"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "87e14285-b86f-4f50-8d60-85398ba728b1",
|
||||
"value": "2023 Increased Truebot Activity"
|
||||
},
|
||||
{
|
||||
"description": "In August 2023, U.S. Cybersecurity & Infrastructure Security Agency (CISA) and Norwegian National Cyber Security Centre (NCSC-NO) authorities released Cybersecurity Advisory AA23-213A, which detailed observed exploitation of two vulnerabilities, CVE-2023-35078 and CVE-2023-35081, affecting Ivanti Endpoint Manager Mobile (EPMM), a solution which provides elevated access to an organization's mobile devices. According to the Advisory, authorities observed unspecified advanced persistent threat (APT) actors exploiting CVE-2023-35078 as a zero-day from at least April 2023 in order to gather information from unspecified organizations in Norway, and to gain initial access to a Norwegian government agency.\n\nIvanti released a CVE-2023-35078 patch on July 23, but then determined that CVE-2023-35081 could be chained together with the first vulnerability, a process which can enable arbitrary upload and execution of actor files, such as web shells. Ivanti released a CVE-2023-35081 patch on July 28. The Advisory provided mitigation recommendations, vulnerability and compromise identification methods, and incident response guidance, which can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-213a).<sup>[[U.S. CISA CVE-2023-35078 Exploits](/references/62305b8a-76c8-49ec-82dc-6756643ccf7a)]</sup>\n\n**Related Vulnerabilities**: CVE-2023-35078<sup>[[U.S. CISA CVE-2023-35078 Exploits](/references/62305b8a-76c8-49ec-82dc-6756643ccf7a)]</sup>, CVE-2023-35081<sup>[[U.S. CISA CVE-2023-35078 Exploits](/references/62305b8a-76c8-49ec-82dc-6756643ccf7a)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5004",
|
||||
"first_seen": "2023-04-01T00:00:00Z",
|
||||
"last_seen": "2023-07-28T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"2d80c940-ba2c-4d45-8272-69928953e9eb",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"81e948b3-5ec0-4df8-b6e7-1b037b1b2e67",
|
||||
"7551097a-dfdd-426f-aaa2-a2916dd9b873"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "33fd2417-0a9c-4748-ab99-0e641ab29fbc",
|
||||
"value": "2023 Ivanti EPMM APT Vulnerability Exploits"
|
||||
},
|
||||
{
|
||||
"description": "In September 2023, U.S. cybersecurity authorities released Cybersecurity Advisory AA23-250A, which detailed multiple intrusions in early 2023 involving an aeronautical sector organization and attributed to multiple unspecified “nation-state advanced persistent threat (APT) actors”. As early as January, one set of actors exploited CVE-2022-47966, a vulnerability in the Zoho ManageEngine ServiceDesk Plus IT service management application that allows remote code execution, to access the organization’s public-facing web servers. A separate set of actors was also observed exploiting CVE-2022-42475, a vulnerability in Fortinet, Inc.’s FortiOS SSL-VPN that also allows remote code execution, to gain access to the organization’s firewall devices.\n\nAfter gaining access, the actors downloaded malware, performed network discovery, collected administrator credentials, and moved laterally, but according to the advisory, unclear data storage records inhibited insight into whether any proprietary information was accessed, altered, or exfiltrated. A common behavior among both sets of actors was log deletion from critical servers and the use of disabled, legitimate administrator credentials, which in one case belonged to a previously employed contractor (the organization confirmed the credentials were disabled before the observed threat activity).<sup>[[U.S. CISA Zoho Exploits September 7 2023](/references/6bb581e8-ed0e-41fe-bf95-49b5d11b4e6b)]</sup>\n\nIn addition to behavioral observations and indicators of compromise, the Advisory provided detection and mitigation guidance, which can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-250a).\n\n**Related Vulnerabilities**: CVE-2022-47966, CVE-2022-42475, CVE-2021-44228<sup>[[U.S. CISA Zoho Exploits September 7 2023](/references/6bb581e8-ed0e-41fe-bf95-49b5d11b4e6b)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5005",
|
||||
"first_seen": "2023-01-01T00:00:00Z",
|
||||
"last_seen": "2023-04-01T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"7e6ef160-8e4f-4132-bdc4-9991f01c472e",
|
||||
"793f4441-3916-4b3d-a3fd-686a59dc3de2",
|
||||
"532b7819-d407-41e9-9733-0d716b69eb17"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "d25f0485-fdf3-4b85-b2ec-53e98e215d0b",
|
||||
"value": "2023 Zoho ManageEngine APT Exploits"
|
||||
},
|
||||
{
|
||||
"description": "In April 2023, U.S. and UK cybersecurity authorities released joint Cybersecurity Advisory AA23-108, which detailed a campaign by Russia-backed APT28 to compromise vulnerable routers running Cisco Internetworking Operating System (IOS). Actors collected device information and conducted further network reconnaissance on victims “worldwide”, including U.S. government institutions, 250 Ukrainian entities, and “a small number” of victims elsewhere in Europe. Adversary activity occurred over an unspecified timeframe in 2021.\n\nActors exploited CVE-2017-6742, a Simple Network Management Protocol (SNMP) vulnerability for which Cisco released a patch in 2017, and used default authentication strings to gain initial access to devices and subsequently gather router information, such as router interface details. In some cases, authorities observed actors deploying Jaguar Tooth, a malicious software bundle consisting of a series of payloads and patches. Jaguar Tooth deployments allowed actors to collect further device information via execution of Cisco IOS Command Line Interface commands, discover other network devices, and achieve unauthenticated, backdoor access to victim systems.<sup>[[U.S. CISA APT28 Cisco Routers April 18 2023](/references/c532a6fc-b27f-4240-a071-3eaa866bce89)]</sup>\n\nIn addition to behavioral observations, the Advisory also provided mitigation recommendations and indicators of compromise, which can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-108).\n\n**Related Vulnerabilities**: CVE-2017-6742<sup>[[U.S. CISA APT28 Cisco Routers April 18 2023](/references/c532a6fc-b27f-4240-a071-3eaa866bce89)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5007",
|
||||
"first_seen": "2021-01-01T00:00:00Z",
|
||||
"last_seen": "2021-12-31T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"f01290d9-7160-44cb-949f-ee4947d04b6f",
|
||||
"b20e7912-6a8d-46e3-8e13-9a3fc4813852"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "ed8de8c3-03d2-4892-bd74-ccbc9afc3935",
|
||||
"value": "APT28 Cisco Router Exploits"
|
||||
},
|
||||
{
|
||||
"description": "U.S. authorities and various international partners released joint cybersecurity advisory AA20-150A, which detailed a series of attacks linked to APT28 that leveraged compromised Ubiquiti EdgeRouters to facilitate the attacks. Actors used the network of compromised routers for a range of malicious activities, including harvesting credentials, proxying network traffic, and hosting fake landing pages and post-exploitation tools. Attacks targeted organizations in a wide range of sectors around the world.<sup>[[U.S. Federal Bureau of Investigation 2 27 2024](/references/962fb031-dfd1-43a7-8202-3a2231b0472b)]</sup> According to a separate U.S. Justice Department announcement, the botnet involved in these attacks differed from previous APT28-linked cases, since nation-state actors accessed routers that had been initially compromised by a separate, unspecified cybercriminal group.<sup>[[U.S. Justice Department GRU Botnet February 2024](/references/26a554dc-39c0-4638-902d-7e84fe01b961)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5015",
|
||||
"first_seen": "2022-12-01T00:00:00Z",
|
||||
"last_seen": "2024-01-01T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"af5e9be5-b86e-47af-91dd-966a5e34a186",
|
||||
"6070668f-1cbd-4878-8066-c636d1d8659c",
|
||||
"d8f7e071-fbfd-46f8-b431-e241bb1513ac",
|
||||
"61cdbb28-cbfd-498b-9ab1-1f14337f9524",
|
||||
"e551ae97-d1b4-484e-9267-89f33829ec2c",
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"916ea1e8-d117-45a4-8564-0597a02b06e4",
|
||||
"b20e7912-6a8d-46e3-8e13-9a3fc4813852",
|
||||
"e809d252-12cc-494d-94f5-954c49eb87ce"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "2514a83a-3516-4d5d-a13c-2b6175989a26",
|
||||
"value": "APT28 Router Compromise Attacks"
|
||||
},
|
||||
{
|
||||
"description": "UK cybersecurity authorities and international partners published Cybersecurity Advisory AA24-057A (February 2024), which detailed recent tactics, techniques, and procedures (TTPs) used by Russian state-backed adversary group APT29 to target cloud environments. The advisory indicated that as more government agencies and enterprises move elements of their operations to cloud infrastructure, APT29 actors have especially adapted their TTPs for gaining initial access into these cloud environments.<sup>[[U.S. CISA APT29 Cloud Access](/references/e9e08eca-1e01-4ff0-a8ef-49ecf66aaf3d)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5016",
|
||||
"first_seen": "2023-02-26T00:00:00Z",
|
||||
"last_seen": "2024-02-26T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"af5e9be5-b86e-47af-91dd-966a5e34a186",
|
||||
"291c006e-f77a-4c9c-ae7e-084974c0e1eb"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "c1257a02-716f-4477-9eab-c38827418ed2",
|
||||
"value": "APT29 Cloud TTP Evolution"
|
||||
},
|
||||
{
|
||||
"description": "*Operationalize this intelligence by pivoting to relevant defensive resources via the Techniques below. Alternatively, use the **Add to Matrix** button above, then overlay entire sets of capabilities from your own defensive stack to identify threat overlaps & potential gaps (watch a [60-second tutorial here](https://www.youtube.com/watch?v=4jBo3XLO01E)).*\n\nIn December 2023, U.S. cybersecurity authorities and international partners released Cybersecurity Advisory AA23-347A, which detailed large-scale observed exploitation of CVE-2023-42793 since September 2023 by cyber threat actors associated with Russia’s Foreign Intelligence Service (SVR). According to the advisory, these actors are also known as APT29, the Dukes, CozyBear, and NOBELIUM/Midnight Blizzard.\n\nCVE-2023-42793 is an authentication bypass vulnerability in the JetBrains TeamCity software development program. After exploiting the vulnerability to gain access into victim networks, SVR actors were then observed escalating privileges, moving laterally, and deploying additional backdoors in an apparent effort to maintain long-term persistent access to victim environments. The advisory noted how SVR actors used access gained during the 2020 compromise of SolarWinds, another software company, to conduct supply chain operations affecting SolarWinds customers, but it also noted that such activity has not been observed in this case to date.\n\nJetBrains released a patch for CVE-2023-42793 in September 2023. The advisory indicated that the compromises observed to date appear to be opportunistic, impacting unpatched, internet-accessible TeamCity servers. “A few dozen” compromised entities have been identified so far (companies in disparate sectors in the United States, Europe, Asia, and Australia), but authorities assess that this tally does not represent the full number of compromised victims. Indicators of compromise, mitigation guidance, and detection resources – including Sigma and YARA rules – can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-347a).<sup>[[U.S. CISA SVR TeamCity Exploits December 2023](/references/5f66f864-58c2-4b41-8011-61f954e04b7e)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5012",
|
||||
"first_seen": "2023-09-01T00:00:00Z",
|
||||
"last_seen": "2023-12-14T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"08809fa0-61b6-4394-b103-1c4d19a5be16",
|
||||
"4a457eb3-e404-47e5-b349-8b1f743dc657"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "80ae546a-70e5-4427-be1d-e74efc428ffd",
|
||||
"value": "APT29 TeamCity Exploits"
|
||||
},
|
||||
{
|
||||
"description": "[C0010](https://app.tidalcyber.com/campaigns/a1e33caf-6eb0-442f-b97a-f6042f21df48) was a cyber espionage campaign conducted by UNC3890 that targeted Israeli shipping, government, aviation, energy, and healthcare organizations. Security researcher assess UNC3890 conducts operations in support of Iranian interests, and noted several limited technical connections to Iran, including PDB strings and Farsi language artifacts. [C0010](https://app.tidalcyber.com/campaigns/a1e33caf-6eb0-442f-b97a-f6042f21df48) began by at least late 2020, and was still ongoing as of mid-2022.<sup>[[Mandiant UNC3890 Aug 2022](https://app.tidalcyber.com/references/7b3fda0b-d327-4f02-bebe-2b8974f9959d)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0010",
|
||||
"first_seen": "2020-12-01T07:00:00Z",
|
||||
"last_seen": "2022-08-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "a1e33caf-6eb0-442f-b97a-f6042f21df48",
|
||||
"value": "C0010"
|
||||
},
|
||||
{
|
||||
"description": "[C0011](https://app.tidalcyber.com/campaigns/4c7386a7-9741-4ae4-8ad9-def03ed77e29) was a suspected cyber espionage campaign conducted by [Transparent Tribe](https://app.tidalcyber.com/groups/441b91d1-256a-4763-bac6-8f1c76764a25) that targeted students at universities and colleges in India. Security researchers noted this campaign against students was a significant shift from [Transparent Tribe](https://app.tidalcyber.com/groups/441b91d1-256a-4763-bac6-8f1c76764a25)'s historic targeting Indian government, military, and think tank personnel, and assessed it was still ongoing as of July 2022.<sup>[[Cisco Talos Transparent Tribe Education Campaign July 2022](https://app.tidalcyber.com/references/acb10fb6-608f-44d3-9faf-7e577b0e2786)]</sup> ",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0011",
|
||||
"first_seen": "2021-12-01T06:00:00Z",
|
||||
"last_seen": "2022-07-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "4c7386a7-9741-4ae4-8ad9-def03ed77e29",
|
||||
"value": "C0011"
|
||||
},
|
||||
{
|
||||
"description": "[C0015](https://app.tidalcyber.com/campaigns/85bbff82-ba0c-4193-a3b5-985afd5690c5) was a ransomware intrusion during which the unidentified attackers used [Bazar](https://app.tidalcyber.com/software/b35d9817-6ead-4dbd-a2fa-4b8e217f8eac), [Cobalt Strike](https://app.tidalcyber.com/software/9b6bcbba-3ab4-4a4c-a233-cd12254823f6), and [Conti](https://app.tidalcyber.com/software/8e995c29-2759-4aeb-9a0f-bb7cd97b06e5), along with other tools, over a 5 day period. Security researchers assessed the actors likely used the widely-circulated [Conti](https://app.tidalcyber.com/software/8e995c29-2759-4aeb-9a0f-bb7cd97b06e5) ransomware playbook based on the observed pattern of activity and operator errors.<sup>[[DFIR Conti Bazar Nov 2021](https://app.tidalcyber.com/references/a6f1a15d-448b-41d4-81f0-ee445cba83bd)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0015",
|
||||
"first_seen": "2021-08-01T05:00:00Z",
|
||||
"last_seen": "2021-08-01T05:00:00Z",
|
||||
"source": "MITRE",
|
||||
"tags": [
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"7e7b0c67-bb85-4996-a289-da0e792d7172"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "85bbff82-ba0c-4193-a3b5-985afd5690c5",
|
||||
"value": "C0015"
|
||||
},
|
||||
{
|
||||
"description": "[C0017](https://app.tidalcyber.com/campaigns/a56d7700-c015-52ca-9c52-fed4d122c100) was an [APT41](https://app.tidalcyber.com/groups/502223ee-8947-42f8-a532-a3b3da12b7d9) campaign conducted between May 2021 and February 2022 that successfully compromised at least six U.S. state government networks through the exploitation of vulnerable Internet facing web applications. During [C0017](https://app.tidalcyber.com/campaigns/a56d7700-c015-52ca-9c52-fed4d122c100), [APT41](https://app.tidalcyber.com/groups/502223ee-8947-42f8-a532-a3b3da12b7d9) was quick to adapt and use publicly-disclosed as well as zero-day vulnerabilities for initial access, and in at least two cases re-compromised victims following remediation efforts. The goals of [C0017](https://app.tidalcyber.com/campaigns/a56d7700-c015-52ca-9c52-fed4d122c100) are unknown, however [APT41](https://app.tidalcyber.com/groups/502223ee-8947-42f8-a532-a3b3da12b7d9) was observed exfiltrating Personal Identifiable Information (PII).<sup>[[Mandiant APT41](https://app.tidalcyber.com/references/e54415fe-40c2-55ff-9e75-881bc8a912b8)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0017",
|
||||
"first_seen": "2021-05-01T04:00:00Z",
|
||||
"last_seen": "2022-02-01T05:00:00Z",
|
||||
"source": "MITRE",
|
||||
"tags": [
|
||||
"a98d7a43-f227-478e-81de-e7299639a355"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "a56d7700-c015-52ca-9c52-fed4d122c100",
|
||||
"value": "C0017"
|
||||
},
|
||||
{
|
||||
"description": "\n[C0018](https://app.tidalcyber.com/campaigns/0452e367-aaa4-5a18-8028-a7ee136fe646) was a month-long ransomware intrusion that successfully deployed [AvosLocker](https://app.tidalcyber.com/software/e792dc8d-b0f4-5916-8850-a61ff53125d0) onto a compromised network. The unidentified actors gained initial access to the victim network through an exposed server and used a variety of open-source tools prior to executing [AvosLocker](https://app.tidalcyber.com/software/e792dc8d-b0f4-5916-8850-a61ff53125d0).<sup>[[Costa AvosLocker May 2022](https://app.tidalcyber.com/references/a94268d8-6b7c-574b-a588-d8fd80c27fd3)]</sup><sup>[[Cisco Talos Avos Jun 2022](https://app.tidalcyber.com/references/1170fdc2-6d8e-5b60-bf9e-ca915790e534)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0018",
|
||||
"first_seen": "2022-02-01T05:00:00Z",
|
||||
"last_seen": "2022-03-01T05:00:00Z",
|
||||
"source": "MITRE",
|
||||
"tags": [
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"7e7b0c67-bb85-4996-a289-da0e792d7172"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "0452e367-aaa4-5a18-8028-a7ee136fe646",
|
||||
"value": "C0018"
|
||||
},
|
||||
{
|
||||
"description": "[C0021](https://app.tidalcyber.com/campaigns/86bed8da-4cab-55fe-a2d0-9214db1a09cf) was a spearphishing campaign conducted in November 2018 that targeted public sector institutions, non-governmental organizations (NGOs), educational institutions, and private-sector corporations in the oil and gas, chemical, and hospitality industries. The majority of targets were located in the US, particularly in and around Washington D.C., with other targets located in Europe, Hong Kong, India, and Canada. [C0021](https://app.tidalcyber.com/campaigns/86bed8da-4cab-55fe-a2d0-9214db1a09cf)'s technical artifacts, tactics, techniques, and procedures (TTPs), and targeting overlap with previous suspected [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) activity.<sup>[[Microsoft Unidentified Dec 2018](https://app.tidalcyber.com/references/896c88f9-8765-4b60-b679-667b338757e3)]</sup><sup>[[FireEye APT29 Nov 2018](https://app.tidalcyber.com/references/30e769e0-4552-429b-b16e-27830d42edea)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0021",
|
||||
"first_seen": "2018-11-01T05:00:00Z",
|
||||
"last_seen": "2018-11-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "86bed8da-4cab-55fe-a2d0-9214db1a09cf",
|
||||
"value": "C0021"
|
||||
},
|
||||
{
|
||||
"description": "[C0026](https://app.tidalcyber.com/campaigns/41f283a1-b2ac-547d-98d5-ff907afd08c7) was a campaign identified in September 2022 that included the selective distribution of [KOPILUWAK](https://app.tidalcyber.com/software/d09c4459-1aa3-547d-99f4-7ac73b8043f0) and [QUIETCANARY](https://app.tidalcyber.com/software/52d3515c-5184-5257-bf24-56adccb4cccd) malware to previous [ANDROMEDA](https://app.tidalcyber.com/software/69aac793-9e6a-5167-bc62-823189ee2f7b) malware victims in Ukraine through re-registered [ANDROMEDA](https://app.tidalcyber.com/software/69aac793-9e6a-5167-bc62-823189ee2f7b) C2 domains. Several tools and tactics used during [C0026](https://app.tidalcyber.com/campaigns/41f283a1-b2ac-547d-98d5-ff907afd08c7) were consistent with historic [Turla](https://app.tidalcyber.com/groups/47ae4fb1-fc61-4e8e-9310-66dda706e1a2) operations.<sup>[[Mandiant Suspected Turla Campaign February 2023](https://app.tidalcyber.com/references/d8f43a52-a59e-5567-8259-821b1b6bde43)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0026",
|
||||
"first_seen": "2022-08-01T05:00:00Z",
|
||||
"last_seen": "2022-09-01T04:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "41f283a1-b2ac-547d-98d5-ff907afd08c7",
|
||||
"value": "C0026"
|
||||
},
|
||||
{
|
||||
"description": "[C0027](https://app.tidalcyber.com/campaigns/a9719584-4f52-5a5d-b0f7-1059e715c2b8) was a financially-motivated campaign linked to [Scattered Spider](https://app.tidalcyber.com/groups/3d77fb6c-cfb4-5563-b0be-7aa1ad535337) that targeted telecommunications and business process outsourcing (BPO) companies from at least June through December of 2022. During [C0027](https://app.tidalcyber.com/campaigns/a9719584-4f52-5a5d-b0f7-1059e715c2b8) [Scattered Spider](https://app.tidalcyber.com/groups/3d77fb6c-cfb4-5563-b0be-7aa1ad535337) used various forms of social engineering, performed SIM swapping, and attempted to leverage access from victim environments to mobile carrier networks.<sup>[[Crowdstrike TELCO BPO Campaign December 2022](https://app.tidalcyber.com/references/382785e1-4ef3-506e-b74f-cd07df9ae46e)]</sup>\n",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0027",
|
||||
"first_seen": "2022-06-01T04:00:00Z",
|
||||
"last_seen": "2022-12-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "a9719584-4f52-5a5d-b0f7-1059e715c2b8",
|
||||
"value": "C0027"
|
||||
},
|
||||
{
|
||||
"description": "In June 2023, U.S. authorities released Cybersecurity Advisory AA23-158A, which detailed observed exploits of a zero-day SQL injection vulnerability (CVE-2023-34362) affecting Progress Software's managed file transfer (MFT) solution, MOVEit Transfer. According to the Advisory, exploit activity began on May 27, 2023, as threat actors, which the Advisory attributed to \"CL0P Ransomware Gang, also known as TA505\", began compromising internet-facing MOVEit Transfer web applications. Actors deployed web shells, dubbed LEMURLOOT, on compromised MOVEit applications, which enabled persistence, discovery of files and folders stored on MOVEit servers, and staging and exfiltration of compressed victim data. Authorities indicated they expected to see \"widespread exploitation of unpatched software services in both private and public networks\".<sup>[[U.S. CISA CL0P CVE-2023-34362 Exploitation](/references/07e48ca8-b965-4234-b04a-dfad45d58b22)]</sup> Progress Software acknowledged the vulnerability and issued guidance on known affected versions, software upgrades, and patching.<sup>[[Progress Software MOVEit Transfer Critical Vulnerability](/references/9f364e22-b73c-4f3a-902c-a3f0eb01a2b9)]</sup>\n\n**Related Vulnerabilities**: CVE-2023-34362<sup>[[U.S. CISA CL0P CVE-2023-34362 Exploitation](/references/07e48ca8-b965-4234-b04a-dfad45d58b22)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5002",
|
||||
"first_seen": "2023-05-27T00:00:00Z",
|
||||
"last_seen": "2023-06-16T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"173e1480-8d9b-49c5-854d-594dde9740d6"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "f20c935b-e0c5-4941-b710-73cf06dd2b4a",
|
||||
"value": "Clop MOVEit Transfer Vulnerability Exploitation"
|
||||
},
|
||||
{
|
||||
"description": "[CostaRicto](https://app.tidalcyber.com/campaigns/fb011ed2-bfb9-4f0f-bd88-8b3fa0cf9b48) was a suspected hacker-for-hire cyber espionage campaign that targeted multiple industries worldwide, with a large number being financial institutions. [CostaRicto](https://app.tidalcyber.com/campaigns/fb011ed2-bfb9-4f0f-bd88-8b3fa0cf9b48) actors targeted organizations in Europe, the Americas, Asia, Australia, and Africa, with a large concentration in South Asia (especially India, Bangladesh, and Singapore), using custom malware, open source tools, and a complex network of proxies and SSH tunnels.<sup>[[BlackBerry CostaRicto November 2020](https://app.tidalcyber.com/references/93a23447-641c-4ee2-9fbd-64b2adea8a5f)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0004",
|
||||
"first_seen": "2019-10-01T04:00:00Z",
|
||||
"last_seen": "2020-11-01T04:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "fb011ed2-bfb9-4f0f-bd88-8b3fa0cf9b48",
|
||||
"value": "CostaRicto"
|
||||
},
|
||||
{
|
||||
"description": "German and South Korean cybersecurity authorities published an advisory highlighting recent attempts by North Korea-linked cyber actors to target enterprises and research centers in the defense sector. The advisory detailed a supply chain attack, attributed to an unnamed threat group, in which actors compromised a company that maintained a defense sector research center's web servers, then used stolen SSH credentials to remotely access the research center's network. The actors then used various methods to evade defenses, including impersonating security staff, deployed malware via a patch management system, and stole account information and email contents before being evicted from the network.<sup>[[BfV North Korea February 17 2024](/references/cc76be15-6d9d-40b2-b7f3-196bb0a7106a)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5014",
|
||||
"first_seen": "2022-12-01T00:00:00Z",
|
||||
"last_seen": "2022-12-31T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"6070668f-1cbd-4878-8066-c636d1d8659c",
|
||||
"d8f7e071-fbfd-46f8-b431-e241bb1513ac",
|
||||
"e7ea1f6d-59f2-40c1-bbfe-835dedf033ee"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "1a2caf4c-658d-4117-a912-55f4d6bca899",
|
||||
"value": "Defense Sector Supply Chain Compromise by North Korea-Linked Actors"
|
||||
},
|
||||
{
|
||||
"description": "In September 2023, French cybersecurity authorities released advisory CERTFR-2023-CTI-007, which detailed a network intrusion of the Regional and University Hospital Center of Brest, in northwestern France. Actors used valid credentials belonging to a healthcare professional to connect to a remote desktop service exposed to the Internet, then installed Cobalt Strike and SystemBC to provide backdoor network access. Authorities indicated that the credentials were likely compromised via unspecified infostealer malware.\n\nThe actors used multiple third-party tools for credential access and discovery, and they attempted to exploit at least five vulnerabilities for privilege escalation and lateral movement. Authorities worked with hospital personnel to isolate affected systems and disrupt the intrusion before suspected data exfiltration and encryption could take place. Based on infrastructural and behavioral overlaps with other incidents, officials attributed the intrusion to the FIN12 financially motivated actor group and indicated the same actors are responsible for dozens of attacks on French victims in recent years.\n\nAdditional details, indicators of compromise, and the observed Cobalt Strike configuration can be found in the [source report](https://www.cert.ssi.gouv.fr/uploads/CERTFR-2023-CTI-007.pdf).<sup>[[CERTFR-2023-CTI-007](/references/0f4a03c5-79b3-418e-a77d-305d5a32caca)]</sup>\n\n**Related Vulnerabilities**: CVE-2023-21746, CVE-2022-24521, CVE-2021-34527, CVE-2019-0708, CVE-2020-1472<sup>[[CERTFR-2023-CTI-007](/references/0f4a03c5-79b3-418e-a77d-305d5a32caca)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5006",
|
||||
"first_seen": "2023-03-01T00:00:00Z",
|
||||
"last_seen": "2023-03-31T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"2743d495-7728-4a75-9e5f-b64854039792",
|
||||
"ecd84106-2a5b-4d25-854e-b8d1f57f6b75",
|
||||
"a6ba64e1-4b4a-4bbd-a26d-ce35c22b2530",
|
||||
"4bc9ab8f-7f57-4b1a-8857-ffaa7e5cc930",
|
||||
"d385b541-4033-48df-93cd-237ca6e46f36"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "129ffe04-ea90-45d1-a2fd-7ff0bffa0433",
|
||||
"value": "FIN12 March 2023 Hospital Center Intrusion"
|
||||
},
|
||||
{
|
||||
"description": "[Frankenstein](https://app.tidalcyber.com/campaigns/2fab9878-8aae-445a-86db-6b47b473f56b) was described by security researchers as a highly-targeted campaign conducted by moderately sophisticated and highly resourceful threat actors in early 2019. The unidentified actors primarily relied on open source tools, including [Empire](https://app.tidalcyber.com/software/fea655ac-558f-4dd0-867f-9a5553626207). The campaign name refers to the actors' ability to piece together several unrelated open-source tool components.<sup>[[Talos Frankenstein June 2019](https://app.tidalcyber.com/references/a6faa495-db01-43e8-9db3-d446570802bc)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0001",
|
||||
"first_seen": "2019-01-01T06:00:00Z",
|
||||
"last_seen": "2019-04-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "2fab9878-8aae-445a-86db-6b47b473f56b",
|
||||
"value": "Frankenstein"
|
||||
},
|
||||
{
|
||||
"description": "[FunnyDream](https://app.tidalcyber.com/campaigns/94587edf-0292-445b-8c66-b16629597f1e) was a suspected Chinese cyber espionage campaign that targeted government and foreign organizations in Malaysia, the Philippines, Taiwan, Vietnam, and other parts of Southeast Asia. Security researchers linked the [FunnyDream](https://app.tidalcyber.com/campaigns/94587edf-0292-445b-8c66-b16629597f1e) campaign to possible Chinese-speaking threat actors through the use of the [Chinoxy](https://app.tidalcyber.com/software/7c36563a-9143-4766-8aef-4e1787e18d8c) backdoor and noted infrastructure overlap with the TAG-16 threat group.<sup>[[Bitdefender FunnyDream Campaign November 2020](https://app.tidalcyber.com/references/b62a9f2c-02ca-4dfa-95fc-5dc6ad9568de)]</sup><sup>[[Kaspersky APT Trends Q1 2020](https://app.tidalcyber.com/references/23c91719-5ebe-4d03-8018-df1809fffd2f)]</sup><sup>[[Recorded Future Chinese Activity in Southeast Asia December 2021](https://app.tidalcyber.com/references/0809db3b-81a8-475d-920a-cb913b30f42e)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0007",
|
||||
"first_seen": "2018-07-01T05:00:00Z",
|
||||
"last_seen": "2020-11-01T04:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "94587edf-0292-445b-8c66-b16629597f1e",
|
||||
"value": "FunnyDream"
|
||||
},
|
||||
{
|
||||
"description": "In November 2022, U.S. cybersecurity authorities released Cybersecurity Advisory AA22-320A, which detailed an incident response engagement at an unspecified U.S. Federal Civilian Executive Branch organization. Authorities assessed that the network compromise was carried out by unspecified Iranian government-sponsored advanced persistent threat (APT) actors. The actors achieved initial network access by exploiting the Log4Shell vulnerability in an unpatched VMware Horizon server. Post-exploit activities included installing XMRig crypto mining software and executing Mimikatz to harvest credentials, as well as moving laterally to the domain controller and implanting Ngrok reverse proxies on multiple hosts to maintain persistence.\n\nAdditional details, including incident response guidance and relevant mitigations, can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa22-320a).<sup>[[U.S. CISA Advisory November 25 2022](/references/daae1f54-8471-4620-82d5-023d04144acd)]</sup>\n\n**Related Vulnerabilities**: CVE-2021-44228<sup>[[U.S. CISA Advisory November 25 2022](/references/daae1f54-8471-4620-82d5-023d04144acd)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5008",
|
||||
"first_seen": "2022-06-15T00:00:00Z",
|
||||
"last_seen": "2022-07-15T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"7e6ef160-8e4f-4132-bdc4-9991f01c472e"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "7d6ff40d-51f3-42f8-b986-e7421f59b4bd",
|
||||
"value": "Iranian APT Credential Harvesting & Cryptomining Activity"
|
||||
},
|
||||
{
|
||||
"description": "In November 2020, U.S. cybersecurity authorities released joint Cybersecurity Advisory AA20-304A, which detailed efforts by an unspecified Iranian advanced persistent threat (APT) actor to target U.S. state websites, including election-related sites, with the goal of obtaining voter registration data. The actors used a legitimate vulnerability scanner, Acunetix, to scan state election websites, and they attempted to exploit sites with directory traversal, SQL injection, and web shell upload attacks. Authorities confirmed the actors successfully obtained voter registration data in at least one state – after abusing a website misconfiguration, they used a cURL-based scripting tool to iterate through and retrieve voter records. Officials assessed that the actor behind the website attacks is responsible for mass dissemination of intimidation emails to U.S. citizens and a disinformation campaign featuring a U.S. election-related propaganda video in mid-October 2020. Authorities furthermore assessed that information obtained during the website attacks was featured in the propaganda video.<sup>[[U.S. CISA Iran Voter Data November 3 2020](/references/be89be75-c33f-4c58-8bf0-979c1debaad7)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5010",
|
||||
"first_seen": "2020-09-20T00:00:00Z",
|
||||
"last_seen": "2020-10-20T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "18cf25b5-ed3a-40f6-bf0a-a3938a4f8da2",
|
||||
"value": "Iranian APT Targeting U.S. Voter Data"
|
||||
},
|
||||
{
|
||||
"description": "In September 2022, U.S., Canadian, United Kingdom, and Australian cybersecurity authorities released joint Cybersecurity Advisory AA22-257A, which detailed malicious cyber activity attributed to advanced persistent threat (APT) actors affiliated with the Iranian government’s Islamic Revolutionary Guard Corps (IRGC). The advisory updated a previous alert (AA21-321A), published in November 2021, and summarized recent activities linked to the actors. Since at least March 2021, the actors were observed targeting victims in a wide range of U.S. critical infrastructure sectors, including transportation and healthcare, and victims in unspecified sectors in Australia, Canada, and the United Kingdom.\n\nThe actors typically exploited vulnerabilities to gain initial network access. They were observed exploiting vulnerabilities in Microsoft Exchange servers (ProxyShell) and Fortinet devices in 2021, and VMware Horizon (Log4j) in 2022. After gaining access, the actors typically evaluated the perceived value of data held within a victim network and either encrypted it for ransom and/or exfiltrated it. The actors are believed to have sold some exfiltrated data or used it as leverage to further pressure victims into paying a ransom.\n\nIn addition to behavioral observations and indicators of compromise, the advisories provided detection and mitigation guidance, which can be found in the source reports [here](https://www.cisa.gov/news-events/cybersecurity-advisories/aa22-257a) and [here](https://www.cisa.gov/news-events/cybersecurity-advisories/aa21-321a).\n\n**Related Vulnerabilities**: CVE-2021-34523, CVE-2021-31207, CVE-2021-44228, CVE-2021-45046, CVE-2021-45105<sup>[[U.S. CISA IRGC Actors September 14 2022](/references/728b20b0-f702-4dbe-afea-50270648a3a2)]</sup>, CVE-2021-34473, CVE-2018-13379, CVE-2020-12812, CVE-2019-5591<sup>[[U.S. CISA Iranian Government Actors November 19 2021](/references/d7014279-bc6a-43d4-953a-a6bc1d97a13b)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5009",
|
||||
"first_seen": "2021-03-01T00:00:00Z",
|
||||
"last_seen": "2022-09-14T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"7e7b0c67-bb85-4996-a289-da0e792d7172",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"d84be7c9-c652-4a43-a79e-ef0fa2318c58",
|
||||
"1423b5a8-cff3-48d5-a0a2-09b3afc9f195",
|
||||
"1b98f09a-7d93-4abb-8f3e-1eacdb9f9871",
|
||||
"fde4c246-7d2d-4d53-938b-44651cf273f1",
|
||||
"c3779a84-8132-4c62-be2f-9312ad41c273",
|
||||
"c035da8e-f96c-4793-885d-45017d825596",
|
||||
"7e6ef160-8e4f-4132-bdc4-9991f01c472e",
|
||||
"d713747c-2d53-487e-9dac-259230f04460",
|
||||
"964c2590-4b52-48c6-afff-9a6d72e68908"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "338c6497-2b13-4c2b-bd45-d8b636c35cac",
|
||||
"value": "Iranian IRGC Data Extortion Operations"
|
||||
},
|
||||
{
|
||||
"description": "This object represents a collection of MITRE ATT&CK® Techniques and other objects (Groups and/or Software) related to joint Cybersecurity Advisory AA24-060B, which detailed recent exploits of vulnerabilities (CVE-2023-46805, CVE-2024-21887, and CVE-2024-21893) affecting Ivanti Connect Secure and Policy Secure VPN and gateway appliances by unspecified threat actors. Further background & contextual details can be found in the References tab below.",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5017",
|
||||
"first_seen": "2023-12-01T00:00:00Z",
|
||||
"last_seen": "2024-02-29T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"758c3085-2f79-40a8-ab95-f8a684737927",
|
||||
"af5e9be5-b86e-47af-91dd-966a5e34a186",
|
||||
"35e694ec-5133-46e3-b7e1-5831867c3b55",
|
||||
"1dc8fd1e-0737-405a-98a1-111dd557f1b5",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"d1ab6bd6-2688-4e54-a1d3-d180bb8fd41a",
|
||||
"1ff4614e-0ee6-4e04-921d-61abba7fcdb7",
|
||||
"e00b65fc-8f56-4a9e-9f09-ccf3124a3272"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "c2544d1d-3c99-4601-86fe-8b62020aaffc",
|
||||
"value": "Ivanti Gateway Vulnerability Exploits"
|
||||
},
|
||||
{
|
||||
"description": "In July 2023, U.S. Cybersecurity & Infrastructure Security Agency authorities released Cybersecurity Advisory AA23-201A, which detailed an observed exploit of a zero-day vulnerability (CVE-2023-3519) affecting NetScaler (formerly Citrix) Application Delivery Controller (\"ADC\") and NetScaler Gateway appliances. According to the Advisory, the exploitation activity occurred in June 2023, and the victim (an undisclosed entity in the critical infrastructure sector) reported it in July 2023.<sup>[[U.S. CISA CVE-2023-3519 Exploits](/references/021c4caa-7a7a-4e49-9c5c-6eec176bf923)]</sup> Citrix acknowledged the reported exploit of the vulnerability, which enables unauthenticated remote code execution, and released a patch on July 18, 2023.<sup>[[Citrix Bulletin CVE-2023-3519](/references/245ef1b7-778d-4df2-99a9-b51c95c57580)]</sup>\n\nAfter achieving initial access via exploit of CVE-2023-3519, threat actors dropped a web shell on the vulnerable ADC appliance, which was present on a non-production environment. The web shell enabled subsequent information discovery on the victim's Active Directory (\"AD\"), followed by collection and exfiltration of AD-related data. The actors also attempted lateral movement to a domain controller, but the Advisory indicated that network segementation controls for the ADC appliance blocked this attempted activity.<sup>[[U.S. CISA CVE-2023-3519 Exploits](/references/021c4caa-7a7a-4e49-9c5c-6eec176bf923)]</sup> Separately, in a blog on CVE-2023-3519 exploit investigations released the day after the CISA Advisory, Mandiant indicated that the type of activity observed is \"consistent with previous operations by China-nexus actors\".<sup>[[Mandiant CVE-2023-3519 Exploitation](/references/4404ed65-3020-453d-8c51-2885018ba03b)]</sup>\n\n**Related Vulnerabilities**: CVE-2023-3519<sup>[[U.S. CISA CVE-2023-3519 Exploits](/references/021c4caa-7a7a-4e49-9c5c-6eec176bf923)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5001",
|
||||
"first_seen": "2023-06-01T00:00:00Z",
|
||||
"last_seen": "2023-06-30T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"c475ad68-3fdc-4725-8abc-784c56125e96"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "86e3565d-93dc-40e5-8f84-20d1c15b8e9d",
|
||||
"value": "June 2023 Citrix Vulnerability Exploitation"
|
||||
},
|
||||
{
|
||||
"description": "In November 2023, U.S. cybersecurity authorities and international partners released Cybersecurity Advisory AA23-325A, which detailed observed exploitation of CVE-2023-4966 (known colloquially as the “Citrix Bleed” vulnerability) by threat actors believed to be affiliated with the LockBit ransomware operation.\n\nCitrix Bleed is a vulnerability in Citrix NetScaler web application delivery control (“ADC”) and NetScaler Gateway appliances, which allows adversaries to bypass password requirements and multifactor authentication, enabling hijacking of legitimate user sessions and subsequent credential harvesting, lateral movement, and data or resource access. Authorities indicated that they expected “widespread” Citrix Bleed exploitation on unpatched services due to the ease of carrying out the exploit.\n\nAfter successful Citrix Bleed exploitation, LockBit affiliates were observed using a variety of follow-on TTPs and using a range of software, including abuse of native utilities and popular legitimate remote management and monitoring (“RMM”) tools. Indicators of compromise associated with recent intrusions and further incident response and mitigation guidance can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-325a).<sup>[[U.S. CISA LockBit Citrix Bleed November 21 2023](/references/21f56e0c-9605-4fbb-9cb1-f868ba6eb053)]</sup> Public reporting suggested that actors associated with the Medusa and Qilin ransomware operations, plus other unknown ransomware and uncategorized actors, had also exploited Citrix Bleed as part of their operations.<sup>[[Malwarebytes Citrix Bleed November 24 2023](/references/fdc86cea-0015-48d1-934f-b22244de6306)]</sup><sup>[[Cybernews Yanfeng Qilin November 2023](/references/93c89ca5-1863-4ee2-9fff-258f94f655c4)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5011",
|
||||
"first_seen": "2023-08-01T00:00:00Z",
|
||||
"last_seen": "2023-11-16T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"35e694ec-5133-46e3-b7e1-5831867c3b55",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"15b77e5c-2285-434d-9719-73c14beba8bd",
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"7e7b0c67-bb85-4996-a289-da0e792d7172"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "f4225d6a-8734-401f-aa2a-1a73c23b16e6",
|
||||
"value": "LockBit Affiliate Citrix Bleed Exploits"
|
||||
},
|
||||
{
|
||||
"description": "[Night Dragon](https://app.tidalcyber.com/campaigns/85f136b3-d5a3-4c4c-a37c-40e4418dc989) was a cyber espionage campaign that targeted oil, energy, and petrochemical companies, along with individuals and executives in Kazakhstan, Taiwan, Greece, and the United States. The unidentified threat actors searched for information related to oil and gas field production systems, financials, and collected data from SCADA systems. Based on the observed techniques, tools, and network activities, security researchers assessed the campaign involved a threat group based in China.<sup>[[McAfee Night Dragon](https://app.tidalcyber.com/references/242d2933-ca2b-4511-803a-454727a3acc5)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0002",
|
||||
"first_seen": "2009-11-01T04:00:00Z",
|
||||
"last_seen": "2011-02-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "85f136b3-d5a3-4c4c-a37c-40e4418dc989",
|
||||
"value": "Night Dragon"
|
||||
},
|
||||
{
|
||||
"description": "[Operation CuckooBees](https://app.tidalcyber.com/campaigns/81bf4e45-f0d3-4fec-a9d4-1259cf8542a1) was a cyber espionage campaign targeting technology and manufacturing companies in East Asia, Western Europe, and North America since at least 2019. Security researchers noted the goal of [Operation CuckooBees](https://app.tidalcyber.com/campaigns/81bf4e45-f0d3-4fec-a9d4-1259cf8542a1), which was still ongoing as of May 2022, was likely the theft of proprietary information, research and development documents, source code, and blueprints for various technologies. Researchers assessed [Operation CuckooBees](https://app.tidalcyber.com/campaigns/81bf4e45-f0d3-4fec-a9d4-1259cf8542a1) was conducted by actors affiliated with [Winnti Group](https://app.tidalcyber.com/groups/6932662a-53a7-4e43-877f-6e940e2d744b), [APT41](https://app.tidalcyber.com/groups/502223ee-8947-42f8-a532-a3b3da12b7d9), and BARIUM.<sup>[[Cybereason OperationCuckooBees May 2022](https://app.tidalcyber.com/references/fe3e2c7e-2287-406c-b717-cf7721b5843a)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0012",
|
||||
"first_seen": "2019-12-01T07:00:00Z",
|
||||
"last_seen": "2022-05-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "81bf4e45-f0d3-4fec-a9d4-1259cf8542a1",
|
||||
"value": "Operation CuckooBees"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Dream Job](https://app.tidalcyber.com/campaigns/9a94e646-cbe5-54a1-8bf6-70ef745e641b) was a cyber espionage operation likely conducted by [Lazarus Group](https://app.tidalcyber.com/groups/0bc66e95-de93-4de7-b415-4041b7191f08) that targeted the defense, aerospace, government, and other sectors in the United States, Israel, Australia, Russia, and India. In at least one case, the cyber actors tried to monetize their network access to conduct a business email compromise (BEC) operation. In 2020, security researchers noted overlapping TTPs, to include fake job lures and code similarities, between [Operation Dream Job](https://app.tidalcyber.com/campaigns/9a94e646-cbe5-54a1-8bf6-70ef745e641b), Operation North Star, and Operation Interception; by 2022 security researchers described [Operation Dream Job](https://app.tidalcyber.com/campaigns/9a94e646-cbe5-54a1-8bf6-70ef745e641b) as an umbrella term covering both Operation Interception and Operation North Star.<sup>[[ClearSky Lazarus Aug 2020](https://app.tidalcyber.com/references/2827e6e4-8163-47fb-9e22-b59e59cd338f)]</sup><sup>[[McAfee Lazarus Jul 2020](https://app.tidalcyber.com/references/43581a7d-d71a-4121-abb6-127483a49d12)]</sup><sup>[[ESET Lazarus Jun 2020](https://app.tidalcyber.com/references/b16a0141-dea3-4b34-8279-7bc1ce3d7052)]</sup><sup>[[The Hacker News Lazarus Aug 2022](https://app.tidalcyber.com/references/8ae38830-1547-5cc1-83a4-87c3a7c82aa6)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0022",
|
||||
"first_seen": "2019-09-01T04:00:00Z",
|
||||
"last_seen": "2020-08-01T04:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "9a94e646-cbe5-54a1-8bf6-70ef745e641b",
|
||||
"value": "Operation Dream Job"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Dust Storm](https://app.tidalcyber.com/campaigns/af0c0f55-dc4f-4cb5-9350-3a2d7c07595f) was a long-standing persistent cyber espionage campaign that targeted multiple industries in Japan, South Korea, the United States, Europe, and several Southeast Asian countries. By 2015, the [Operation Dust Storm](https://app.tidalcyber.com/campaigns/af0c0f55-dc4f-4cb5-9350-3a2d7c07595f) threat actors shifted from government and defense-related intelligence targets to Japanese companies or Japanese subdivisions of larger foreign organizations supporting Japan's critical infrastructure, including electricity generation, oil and natural gas, finance, transportation, and construction.<sup>[[Cylance Dust Storm](https://app.tidalcyber.com/references/001dd53c-74e6-4add-aeb7-da76b0d2afe8)]</sup>\n\n[Operation Dust Storm](https://app.tidalcyber.com/campaigns/af0c0f55-dc4f-4cb5-9350-3a2d7c07595f) threat actors also began to use Android backdoors in their operations by 2015, with all identified victims at the time residing in Japan or South Korea.<sup>[[Cylance Dust Storm](https://app.tidalcyber.com/references/001dd53c-74e6-4add-aeb7-da76b0d2afe8)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0016",
|
||||
"first_seen": "2010-01-01T07:00:00Z",
|
||||
"last_seen": "2016-02-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "af0c0f55-dc4f-4cb5-9350-3a2d7c07595f",
|
||||
"value": "Operation Dust Storm"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Ghost](https://app.tidalcyber.com/campaigns/1fcfe949-5f96-578e-86ad-069ba123c867) was an [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) campaign starting in 2013 that included operations against ministries of foreign affairs in Europe and the Washington, D.C. embassy of a European Union country. During [Operation Ghost](https://app.tidalcyber.com/campaigns/1fcfe949-5f96-578e-86ad-069ba123c867), [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) used new families of malware and leveraged web services, steganography, and unique C2 infrastructure for each victim.<sup>[[ESET Dukes October 2019](https://app.tidalcyber.com/references/fbc77b85-cc5a-4c65-956d-b8556974b4ef)]</sup>\n",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0023",
|
||||
"first_seen": "2013-09-01T04:00:00Z",
|
||||
"last_seen": "2019-10-01T04:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "1fcfe949-5f96-578e-86ad-069ba123c867",
|
||||
"value": "Operation Ghost"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Honeybee](https://app.tidalcyber.com/campaigns/f741ed36-2d52-40ae-bbdc-70722f4071c7) was a campaign that targeted humanitarian aid and inter-Korean affairs organizations from at least late 2017 through early 2018. [Operation Honeybee](https://app.tidalcyber.com/campaigns/f741ed36-2d52-40ae-bbdc-70722f4071c7) initially targeted South Korea, but expanded to include Vietnam, Singapore, Japan, Indonesia, Argentina, and Canada. Security researchers assessed the threat actors were likely Korean speakers based on metadata used in both lure documents and executables, and named the campaign \"Honeybee\" after the author name discovered in malicious Word documents.<sup>[[McAfee Honeybee](https://app.tidalcyber.com/references/e6f0f7b5-01fe-437f-a9c9-2ea054e7d69d)]</sup> ",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0006",
|
||||
"first_seen": "2017-08-01T05:00:00Z",
|
||||
"last_seen": "2018-02-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "f741ed36-2d52-40ae-bbdc-70722f4071c7",
|
||||
"value": "Operation Honeybee"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Sharpshooter](https://app.tidalcyber.com/campaigns/57e858c8-fd0b-4382-a178-0165d03aa8a9) was a global cyber espionage campaign that targeted nuclear, defense, government, energy, and financial companies, with many located in Germany, Turkey, the United Kingdom, and the United States. Security researchers noted the campaign shared many similarities with previous [Lazarus Group](https://app.tidalcyber.com/groups/0bc66e95-de93-4de7-b415-4041b7191f08) operations, including fake job recruitment lures and shared malware code.<sup>[[McAfee Sharpshooter December 2018](https://app.tidalcyber.com/references/96b6d012-8620-4ef5-bf9a-5f88e465a495)]</sup><sup>[[Bleeping Computer Op Sharpshooter March 2019](https://app.tidalcyber.com/references/84430646-6568-4288-8710-2827692a8862)]</sup><sup>[[Threatpost New Op Sharpshooter Data March 2019](https://app.tidalcyber.com/references/2361b5b1-3a01-4d77-99c6-261f444a498e)]</sup> ",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0013",
|
||||
"first_seen": "2017-09-01T05:00:00Z",
|
||||
"last_seen": "2019-03-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "57e858c8-fd0b-4382-a178-0165d03aa8a9",
|
||||
"value": "Operation Sharpshooter"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Spalax](https://app.tidalcyber.com/campaigns/98d3a8ac-6af9-4471-83f6-e880ca70261f) was a campaign that primarily targeted Colombian government organizations and private companies, particularly those associated with the energy and metallurgical industries. The [Operation Spalax](https://app.tidalcyber.com/campaigns/98d3a8ac-6af9-4471-83f6-e880ca70261f) threat actors distributed commodity malware and tools using generic phishing topics related to COVID-19, banking, and law enforcement action. Security researchers noted indicators of compromise and some infrastructure overlaps with other campaigns dating back to April 2018, including at least one separately attributed to [APT-C-36](https://app.tidalcyber.com/groups/153c14a6-31b7-44f2-892e-6d9fdc152267), however identified enough differences to report this as separate, unattributed activity.<sup>[[ESET Operation Spalax Jan 2021](https://app.tidalcyber.com/references/b699dd10-7d3f-4542-bf8a-b3f0c747bd0e)]</sup> ",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0005",
|
||||
"first_seen": "2019-11-01T05:00:00Z",
|
||||
"last_seen": "2021-01-01T06:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "98d3a8ac-6af9-4471-83f6-e880ca70261f",
|
||||
"value": "Operation Spalax"
|
||||
},
|
||||
{
|
||||
"description": "[Operation Wocao](https://app.tidalcyber.com/campaigns/56e4e10f-8c8c-4b7c-8355-7ed89af181be) was a cyber espionage campaign that targeted organizations around the world, including in Brazil, China, France, Germany, Italy, Mexico, Portugal, Spain, the United Kingdom, and the United States. The suspected China-based actors compromised government organizations and managed service providers, as well as aviation, construction, energy, finance, health care, insurance, offshore engineering, software development, and transportation companies.<sup>[[FoxIT Wocao December 2019](https://app.tidalcyber.com/references/aa3e31c7-71cd-4a3f-b482-9049c9abb631)]</sup>\n\nSecurity researchers assessed the [Operation Wocao](https://app.tidalcyber.com/campaigns/56e4e10f-8c8c-4b7c-8355-7ed89af181be) actors used similar TTPs and tools as APT20, suggesting a possible overlap. [Operation Wocao](https://app.tidalcyber.com/campaigns/56e4e10f-8c8c-4b7c-8355-7ed89af181be) was named after an observed command line entry by one of the threat actors, possibly out of frustration from losing webshell access.<sup>[[FoxIT Wocao December 2019](https://app.tidalcyber.com/references/aa3e31c7-71cd-4a3f-b482-9049c9abb631)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0014",
|
||||
"first_seen": "2017-12-01T05:00:00Z",
|
||||
"last_seen": "2019-12-01T05:00:00Z",
|
||||
"source": "MITRE"
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "56e4e10f-8c8c-4b7c-8355-7ed89af181be",
|
||||
"value": "Operation Wocao"
|
||||
},
|
||||
{
|
||||
"description": "In May 2023, U.S. Cybersecurity & Infrastructure Security Agency (CISA) and Federal Bureau of Investigation (FBI) authorities released Cybersecurity Advisory AA23-131A, which detailed observed exploits of a vulnerability, CVE-2023-27350, affecting certain versions of PaperCut NG and PaperCut MF, software applications for print management. PaperCut released a patch for the vulnerability in March 2023.<sup>[[PaperCut MF/NG vulnerability bulletin](/references/d6e71b45-fc91-40f4-8201-2186994ae42a)]</sup> According to the Advisory, authorities observed unspecified threat actors exploiting the vulnerability in mid-April 2023, followed by exploitation by the self-identified Bl00dy Ransomware Gang the following month.<sup>[[U.S. CISA PaperCut May 2023](/references/b5ef2b97-7cc7-470b-ae97-a45dc4af32a6)]</sup>\n\nCVE-2023-27350 allows a remote actor to bypass authentication and remotely execute code on servers running affected versions of PaperCut software. In May, U.S. authorities observed Bl00dy Ransomware Gang actors exploiting the vulnerability to achieve initial access into education sector entities' networks and ingressing both legitimate remote management and maintenance (RMM) tools and several other command and control-related malware, including Lizar, Truebot, and Cobalt Strike. In some cases, the actors ultimately exfiltrated victim data and encrypted files, demanding payment in order to decrypt affected systems (the Advisory did not indicate how precisely actors encrypted data). The Advisory indicated that the \"Education Facilities Subsector\" maintains nearly 70% of exposed (but not necessarily vulnerable) U.S.-based PaperCut servers.<sup>[[U.S. CISA PaperCut May 2023](/references/b5ef2b97-7cc7-470b-ae97-a45dc4af32a6)]</sup>\n\nThe Advisory instructed defenders to focus CVE-2023-27350 detection efforts on three areas: network traffic signatures, system monitoring, and server settings and log files. More details and resources for detection can be found in the [source report](https://www.cisa.gov/news-events/cybersecurity-advisories/aa23-131a).\n\n**Related Vulnerabilities**: CVE-2023-27350<sup>[[U.S. CISA PaperCut May 2023](/references/b5ef2b97-7cc7-470b-ae97-a45dc4af32a6)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5003",
|
||||
"first_seen": "2023-04-15T00:00:00Z",
|
||||
"last_seen": "2023-05-30T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"5e7433ad-a894-4489-93bc-41e90da90019",
|
||||
"7e7b0c67-bb85-4996-a289-da0e792d7172",
|
||||
"15787198-6c8b-4f79-bf50-258d55072fee",
|
||||
"a98d7a43-f227-478e-81de-e7299639a355",
|
||||
"992bdd33-4a47-495d-883a-58010a2f0efb"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "38443d11-135a-47ac-909f-fa34744bc3a5",
|
||||
"value": "PaperCut Vulnerability Exploitation"
|
||||
},
|
||||
{
|
||||
"description": "*Operationalize this intelligence by pivoting to relevant defensive resources via the Techniques below. Alternatively, use the **Add to Matrix** button above, then overlay entire sets of capabilities from your own defensive stack to identify threat overlaps & potential gaps (watch a [60-second tutorial here](https://www.youtube.com/watch?v=4jBo3XLO01E)).*\n\nThis is a single object to represent the initial access and delivery methods observed with Pikabot distribution in the first year after its discovery. Distribution campaigns have been linked to the TA577 threat actor (previously known for distributing payloads including QakBot, IcedID, SystemBC, and Cobalt Strike)<sup>[[Malwarebytes Pikabot December 15 2023](/references/50b29ef4-7ade-4672-99b6-fdf367170a5b)]</sup><sup>[[Unit42 Malware Roundup December 29 2023](/references/a18e19b5-9046-4c2c-bd94-2cd5061064bf)]</sup>; however, the Technique- and Procedure level intelligence associated with these campaigns that is provided below was not explicitly linked to that group, so we are providing this intelligence to users in this Campaign form. The Water Curupira intrusion set (affiliated with the Black Basta ransomware operation) has also been observed distributing Pikabot.<sup>[[Trend Micro Pikabot January 9 2024](/references/dc7d882b-4e83-42da-8e2f-f557b675930a)]</sup>",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C5013",
|
||||
"first_seen": "2023-02-01T00:00:00Z",
|
||||
"last_seen": "2023-12-31T00:00:00Z",
|
||||
"owner": "TidalCyberIan",
|
||||
"source": "Tidal Cyber",
|
||||
"tags": [
|
||||
"f8669b82-2194-49a9-8e20-92e7f9ab0a6f",
|
||||
"84615fe0-c2a5-4e07-8957-78ebc29b4635"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "71f6d3b1-c45e-421c-99cb-3b695647cf0b",
|
||||
"value": "Pikabot Distribution Campaigns 2023"
|
||||
},
|
||||
{
|
||||
"description": "The [SolarWinds Compromise](https://app.tidalcyber.com/campaigns/8bde8146-0656-5800-82e6-e24e008e4f4a) was a sophisticated supply chain cyber operation conducted by [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) that was discovered in mid-December 2020. [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) used customized malware to inject malicious code into the SolarWinds Orion software build process that was later distributed through a normal software update; they also used password spraying, token theft, API abuse, spear phishing, and other supply chain attacks to compromise user accounts and leverage their associated access. Victims of this campaign included government, consulting, technology, telecom, and other organizations in North America, Europe, Asia, and the Middle East. Industry reporting initially referred to the actors involved in this campaign as UNC2452, NOBELIUM, StellarParticle, Dark Halo, and SolarStorm.<sup>[[SolarWinds Advisory Dec 2020](https://app.tidalcyber.com/references/4e8b908a-bdc5-441b-bc51-98dfa87f6b7a)]</sup><sup>[[SolarWinds Sunburst Sunspot Update January 2021](https://app.tidalcyber.com/references/1be1b6e0-1b42-4d07-856b-b6321c17bb88)]</sup><sup>[[FireEye SUNBURST Backdoor December 2020](https://app.tidalcyber.com/references/d006ed03-a8af-4887-9356-3481d81d43e4)]</sup><sup>[[Volexity SolarWinds](https://app.tidalcyber.com/references/355cecf8-ef3e-4a6e-a652-3bf26fe46d88)]</sup><sup>[[CrowdStrike StellarParticle January 2022](https://app.tidalcyber.com/references/149c1446-d6a1-4a63-9420-def9272d6cb9)]</sup><sup>[[Unit 42 SolarStorm December 2020](https://app.tidalcyber.com/references/ecbb602a-2427-5eba-8c2b-25d90c95f166)]</sup><sup>[[Microsoft Analyzing Solorigate Dec 2020](https://app.tidalcyber.com/references/8ad72d46-ba2c-426f-bb0d-eb47723c8e11)]</sup><sup>[[Microsoft Internal Solorigate Investigation Blog](https://app.tidalcyber.com/references/66cade99-0040-464c-98a6-bba57719f0a4)]</sup> \n\nIn April 2021, the US and UK governments attributed the [SolarWinds Compromise](https://app.tidalcyber.com/campaigns/8bde8146-0656-5800-82e6-e24e008e4f4a) to Russia's Foreign Intelligence Service (SVR); public statements included citations to [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447), Cozy Bear, and The Dukes.<sup>[[NSA Joint Advisory SVR SolarWinds April 2021](https://app.tidalcyber.com/references/43d9c469-1d54-454b-ba67-74e7f1de9c10)]</sup><sup>[[UK NSCS Russia SolarWinds April 2021](https://app.tidalcyber.com/references/f49e6780-8caa-4c3c-8d68-47a2cc4319a1)]</sup><sup>[[Mandiant UNC2452 APT29 April 2022](https://app.tidalcyber.com/references/5276508c-6792-56be-b757-e4b495ef6c37)]</sup> The US government assessed that of the approximately 18,000 affected public and private sector customers of Solar Winds’ Orion product, a much smaller number were compromised by follow-on [APT29](https://app.tidalcyber.com/groups/4c3e48b9-4426-4271-a7af-c3dfad79f447) activity on their systems.<sup>[[USG Joint Statement SolarWinds January 2021](https://app.tidalcyber.com/references/336a6549-a95d-5763-bbaf-5ef0d3141800)]</sup> ",
|
||||
"meta": {
|
||||
"campaign_attack_id": "C0024",
|
||||
"first_seen": "2019-08-01T05:00:00Z",
|
||||
"last_seen": "2021-01-01T06:00:00Z",
|
||||
"source": "MITRE",
|
||||
"tags": [
|
||||
"f2ae2283-f94d-4f8f-bbde-43f2bed66c55"
|
||||
]
|
||||
},
|
||||
"related": [],
|
||||
"uuid": "8bde8146-0656-5800-82e6-e24e008e4f4a",
|
||||
"value": "SolarWinds Compromise"
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}
|
10493
clusters/tidal-groups.json
Normal file
10493
clusters/tidal-groups.json
Normal file
File diff suppressed because it is too large
Load diff
57451
clusters/tidal-references.json
Normal file
57451
clusters/tidal-references.json
Normal file
File diff suppressed because it is too large
Load diff
35130
clusters/tidal-software.json
Normal file
35130
clusters/tidal-software.json
Normal file
File diff suppressed because one or more lines are too long
3430
clusters/tidal-tactic.json
Normal file
3430
clusters/tidal-tactic.json
Normal file
File diff suppressed because it is too large
Load diff
12739
clusters/tidal-technique.json
Normal file
12739
clusters/tidal-technique.json
Normal file
File diff suppressed because one or more lines are too long
9
galaxies/tidal-campaigns.json
Normal file
9
galaxies/tidal-campaigns.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal Campaigns Galaxy",
|
||||
"icon": "bullhorn",
|
||||
"name": "Tidal Campaigns",
|
||||
"namespace": "tidal",
|
||||
"type": "campaigns",
|
||||
"uuid": "3db4b6cb-5b89-4096-a057-e0205777adc9",
|
||||
"version": 1
|
||||
}
|
9
galaxies/tidal-groups.json
Normal file
9
galaxies/tidal-groups.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal Groups Galaxy",
|
||||
"icon": "user-secret",
|
||||
"name": "Tidal Groups",
|
||||
"namespace": "tidal",
|
||||
"type": "groups",
|
||||
"uuid": "877cdc4b-3392-4353-a7d4-2e46d40e5936",
|
||||
"version": 1
|
||||
}
|
9
galaxies/tidal-references.json
Normal file
9
galaxies/tidal-references.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal References Galaxy",
|
||||
"icon": "list",
|
||||
"name": "Tidal References",
|
||||
"namespace": "tidal",
|
||||
"type": "references",
|
||||
"uuid": "efd98ec4-16ef-41c4-bc3c-60c7c1ae8b39",
|
||||
"version": 1
|
||||
}
|
9
galaxies/tidal-software.json
Normal file
9
galaxies/tidal-software.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal Software Galaxy",
|
||||
"icon": "file-code",
|
||||
"name": "Tidal Software",
|
||||
"namespace": "tidal",
|
||||
"type": "software",
|
||||
"uuid": "6eb44da4-ed4f-4a5d-a444-0f105ff1b3c2",
|
||||
"version": 1
|
||||
}
|
9
galaxies/tidal-tactic.json
Normal file
9
galaxies/tidal-tactic.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal Tactic Galaxy",
|
||||
"icon": "map",
|
||||
"name": "Tidal Tactic",
|
||||
"namespace": "tidal",
|
||||
"type": "tactic",
|
||||
"uuid": "16b963e7-4b88-44e0-b184-16bf9e71fdc9",
|
||||
"version": 1
|
||||
}
|
9
galaxies/tidal-technique.json
Normal file
9
galaxies/tidal-technique.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"description": "Tidal Technique Galaxy",
|
||||
"icon": "user-ninja",
|
||||
"name": "Tidal Technique",
|
||||
"namespace": "tidal",
|
||||
"type": "technique",
|
||||
"uuid": "298b6aee-981b-4fd8-8759-a2e72ad223fa",
|
||||
"version": 1
|
||||
}
|
|
@ -1,506 +1,39 @@
|
|||
#!/usr/bin/python
|
||||
from modules.universe import Universe
|
||||
from modules.site import IndexSite, StatisticsSite
|
||||
from utils.helper import generate_relations_table
|
||||
|
||||
import multiprocessing
|
||||
from multiprocessing import Pool
|
||||
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
import json
|
||||
import operator
|
||||
import os
|
||||
import time
|
||||
from typing import List
|
||||
import sys
|
||||
|
||||
import validators
|
||||
sys.setrecursionlimit(10000)
|
||||
|
||||
FILES_TO_IGNORE = []
|
||||
CLUSTER_PATH = "../../clusters"
|
||||
SITE_PATH = "./site/docs"
|
||||
GALAXY_PATH = "../../galaxies"
|
||||
|
||||
|
||||
FILES_TO_IGNORE = [] # if you want to skip a specific cluster in the generation
|
||||
|
||||
# Variables for statistics
|
||||
public_relations_count = 0
|
||||
private_relations_count = 0
|
||||
private_clusters = []
|
||||
public_clusters_dict = {}
|
||||
relation_count_dict = {}
|
||||
synonyms_count_dict = {}
|
||||
empty_uuids_dict = {}
|
||||
|
||||
INTRO = """
|
||||
# MISP Galaxy
|
||||
|
||||
The MISP galaxy offers a streamlined approach for representing large entities, known as clusters, which can be linked to MISP events or attributes. Each cluster consists of one or more elements, represented as key-value pairs. MISP galaxy comes with a default knowledge base, encompassing areas like Threat Actors, Tools, Ransomware, and ATT&CK matrices. However, users have the flexibility to modify, update, replace, or share these elements according to their needs.
|
||||
|
||||
Clusters and vocabularies within MISP galaxy can be utilized in their original form or as a foundational knowledge base. The distribution settings for each cluster can be adjusted, allowing for either restricted or wide dissemination.
|
||||
|
||||
Additionally, MISP galaxies enable the representation of existing standards like the MITRE ATT&CK™ framework, as well as custom matrices.
|
||||
|
||||
The aim is to provide a core set of clusters for organizations embarking on analysis, which can be further tailored to include localized, private information or additional, shareable data.
|
||||
|
||||
Clusters serve as an open and freely accessible knowledge base, which can be utilized and expanded within [MISP](https://www.misp-project.org/) or other threat intelligence platforms.
|
||||
|
||||
![Overview of the integration of MISP galaxy in the MISP Threat Intelligence Sharing Platform](https://raw.githubusercontent.com/MISP/misp-galaxy/aa41337fd78946a60aef3783f58f337d2342430a/doc/images/galaxy.png)
|
||||
|
||||
## Publicly available clusters
|
||||
|
||||
"""
|
||||
|
||||
STATISTICS = """
|
||||
## Statistics
|
||||
|
||||
You can find some statistics about MISP galaxies [here](./statistics.md).
|
||||
|
||||
"""
|
||||
|
||||
CONTRIBUTING = """
|
||||
|
||||
# Contributing
|
||||
|
||||
In the dynamic realm of threat intelligence, a variety of models and approaches exist to systematically organize, categorize, and delineate threat actors, hazards, or activity groups. We embrace innovative methodologies for articulating threat intelligence. The galaxy model is particularly versatile, enabling you to leverage and integrate methodologies that you trust and are already utilizing within your organization or community.
|
||||
|
||||
We encourage collaboration and contributions to the [MISP Galaxy JSON files](https://github.com/MISP/misp-galaxy/). Feel free to fork the project, enhance existing elements or clusters, or introduce new ones. Your insights are valuable - share them with us through a pull-request.
|
||||
|
||||
"""
|
||||
def write_relations_table(cluster):
|
||||
if cluster.relationships:
|
||||
print(f"Writing {cluster.uuid}.md")
|
||||
with open(os.path.join(relation_path, f"{cluster.uuid}.md"), "w") as index:
|
||||
index.write(generate_relations_table(cluster))
|
||||
|
||||
|
||||
class Galaxy:
|
||||
def __init__(
|
||||
self, cluster_list: List[dict], authors, description, name, json_file_name
|
||||
):
|
||||
|
||||
self.cluster_list = cluster_list
|
||||
self.authors = authors
|
||||
self.description = description
|
||||
self.name = name
|
||||
self.json_file_name = json_file_name
|
||||
self.clusters = self._create_clusters()
|
||||
self.entry = ""
|
||||
|
||||
def _create_metadata_entry(self):
|
||||
self.entry += "---\n"
|
||||
self.entry += f"title: {self.name}\n"
|
||||
meta_description = self.description.replace('"', "-")
|
||||
self.entry += f"description: {meta_description}\n"
|
||||
self.entry += "---\n"
|
||||
|
||||
def _create_title_entry(self):
|
||||
self.entry += f"# {self.name}\n"
|
||||
|
||||
def _create_description_entry(self):
|
||||
self.entry += f"{self.description}\n"
|
||||
|
||||
def _create_authors_entry(self):
|
||||
if self.authors:
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Authors"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" | Authors and/or Contributors|\n"
|
||||
self.entry += f" |----------------------------|\n"
|
||||
for author in self.authors:
|
||||
self.entry += f" |{author}|\n"
|
||||
|
||||
def _create_clusters(self):
|
||||
clusters = []
|
||||
for cluster in self.cluster_list:
|
||||
clusters.append(
|
||||
Cluster(
|
||||
value=cluster.get("value", None),
|
||||
description=cluster.get("description", None),
|
||||
uuid=cluster.get("uuid", None),
|
||||
date=cluster.get("date", None),
|
||||
related_list=cluster.get("related", None),
|
||||
meta=cluster.get("meta", None),
|
||||
galaxie=self,
|
||||
def get_cluster_relationships(cluster_data):
|
||||
galaxy, cluster = cluster_data
|
||||
relationships = universe.get_relationships_with_levels(
|
||||
universe.galaxies[galaxy].clusters[cluster]
|
||||
)
|
||||
)
|
||||
return clusters
|
||||
|
||||
def _create_clusters_entry(self, cluster_dict):
|
||||
for cluster in self.clusters:
|
||||
self.entry += cluster.create_entry(cluster_dict)
|
||||
|
||||
def create_entry(self, cluster_dict):
|
||||
self._create_metadata_entry()
|
||||
self._create_title_entry()
|
||||
self._create_description_entry()
|
||||
self._create_authors_entry()
|
||||
self._create_clusters_entry(cluster_dict)
|
||||
return self.entry
|
||||
|
||||
def write_entry(self, path, cluster_dict):
|
||||
self.create_entry(cluster_dict)
|
||||
galaxy_path = os.path.join(path, self.json_file_name)
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
with open(os.path.join(galaxy_path, "index.md"), "w") as index:
|
||||
index.write(self.entry)
|
||||
|
||||
|
||||
class Cluster:
|
||||
def __init__(self, description, uuid, date, value, related_list, meta, galaxie):
|
||||
self.description = description
|
||||
self.uuid = uuid
|
||||
self.date = date
|
||||
self.value = value
|
||||
self.related_list = related_list
|
||||
self.meta = meta
|
||||
self.entry = ""
|
||||
self.galaxie = galaxie
|
||||
|
||||
global public_clusters_dict
|
||||
if self.galaxie:
|
||||
public_clusters_dict[self.uuid] = self.galaxie
|
||||
|
||||
def _create_title_entry(self):
|
||||
self.entry += f"## {self.value}\n"
|
||||
self.entry += f"\n"
|
||||
|
||||
def _create_description_entry(self):
|
||||
if self.description:
|
||||
self.entry += f"{self.description}\n"
|
||||
|
||||
def _create_synonyms_entry(self):
|
||||
if isinstance(self.meta, dict) and self.meta.get("synonyms"):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Synonyms"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f' "synonyms" in the meta part typically refer to alternate names or labels that are associated with a particular {self.value}.\n\n'
|
||||
self.entry += f" | Known Synonyms |\n"
|
||||
self.entry += f" |---------------------|\n"
|
||||
global synonyms_count_dict
|
||||
synonyms_count = 0
|
||||
for synonym in sorted(self.meta["synonyms"]):
|
||||
synonyms_count += 1
|
||||
self.entry += f" | `{synonym}` |\n"
|
||||
synonyms_count_dict[self.uuid] = synonyms_count
|
||||
|
||||
def _create_uuid_entry(self):
|
||||
if self.uuid:
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? tip "Internal MISP references"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" UUID `{self.uuid}` which can be used as unique global reference for `{self.value}` in MISP communities and other software using the MISP galaxy\n"
|
||||
self.entry += f"\n"
|
||||
|
||||
def _create_refs_entry(self):
|
||||
if isinstance(self.meta, dict) and self.meta.get("refs"):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "External references"\n'
|
||||
self.entry += f"\n"
|
||||
|
||||
for ref in self.meta["refs"]:
|
||||
if validators.url(ref):
|
||||
self.entry += f" - [{ref}]({ref}) - :material-archive: :material-arrow-right: [webarchive](https://web.archive.org/web/*/{ref})\n"
|
||||
else:
|
||||
self.entry += f" - {ref}\n"
|
||||
|
||||
self.entry += f"\n"
|
||||
|
||||
def _create_associated_metadata_entry(self):
|
||||
if isinstance(self.meta, dict):
|
||||
excluded_meta = ["synonyms", "refs"]
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Associated metadata"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" |Metadata key {{ .no-filter }} |Value|\n"
|
||||
self.entry += f" |-----------------------------------|-----|\n"
|
||||
for meta in sorted(self.meta.keys()):
|
||||
if meta not in excluded_meta:
|
||||
self.entry += f" | {meta} | {self.meta[meta]} |\n"
|
||||
|
||||
def get_related_clusters(
|
||||
self, cluster_dict, depth=-1, visited=None, level=1, related_private_clusters={}
|
||||
):
|
||||
global public_relations_count
|
||||
global private_relations_count
|
||||
global private_clusters
|
||||
global empty_uuids_dict
|
||||
empty_uuids = 0
|
||||
|
||||
if visited is None:
|
||||
visited = {}
|
||||
|
||||
related_clusters = []
|
||||
if depth == 0 or not self.related_list:
|
||||
return related_clusters
|
||||
|
||||
if self.uuid in visited and visited[self.uuid] <= level:
|
||||
return related_clusters
|
||||
else:
|
||||
visited[self.uuid] = level
|
||||
|
||||
for cluster in self.related_list:
|
||||
dest_uuid = cluster["dest-uuid"]
|
||||
|
||||
# Cluster is private
|
||||
if dest_uuid not in cluster_dict:
|
||||
# Check if UUID is empty
|
||||
if not dest_uuid:
|
||||
empty_uuids += 1
|
||||
continue
|
||||
private_relations_count += 1
|
||||
if dest_uuid not in private_clusters:
|
||||
private_clusters.append(dest_uuid)
|
||||
if dest_uuid in related_private_clusters:
|
||||
related_clusters.append(
|
||||
(
|
||||
self,
|
||||
related_private_clusters[dest_uuid],
|
||||
level,
|
||||
)
|
||||
)
|
||||
else:
|
||||
related_clusters.append(
|
||||
(
|
||||
self,
|
||||
Cluster(
|
||||
value="Private Cluster",
|
||||
uuid=dest_uuid,
|
||||
date=None,
|
||||
description=None,
|
||||
related_list=None,
|
||||
meta=None,
|
||||
galaxie=None,
|
||||
),
|
||||
level,
|
||||
)
|
||||
)
|
||||
related_private_clusters[dest_uuid] = related_clusters[-1][1]
|
||||
continue
|
||||
|
||||
related_cluster = cluster_dict[dest_uuid]
|
||||
|
||||
public_relations_count += 1
|
||||
|
||||
related_clusters.append((self, related_cluster, level))
|
||||
|
||||
if (depth > 1 or depth == -1) and (
|
||||
cluster["dest-uuid"] not in visited
|
||||
or visited[cluster["dest-uuid"]] > level + 1
|
||||
):
|
||||
new_depth = depth - 1 if depth > 1 else -1
|
||||
if cluster["dest-uuid"] in cluster_dict:
|
||||
related_clusters += cluster_dict[
|
||||
cluster["dest-uuid"]
|
||||
].get_related_clusters(
|
||||
cluster_dict,
|
||||
new_depth,
|
||||
visited,
|
||||
level + 1,
|
||||
related_private_clusters,
|
||||
)
|
||||
|
||||
if empty_uuids > 0:
|
||||
empty_uuids_dict[self.value] = empty_uuids
|
||||
|
||||
# Remove duplicates
|
||||
to_remove = set()
|
||||
cluster_dict = {}
|
||||
for cluster in related_clusters:
|
||||
key1 = (cluster[0], cluster[1])
|
||||
key2 = (cluster[1], cluster[0])
|
||||
|
||||
if key1 in cluster_dict:
|
||||
if cluster_dict[key1][2] > cluster[2]:
|
||||
to_remove.add(cluster_dict[key1])
|
||||
cluster_dict[key1] = cluster
|
||||
else:
|
||||
to_remove.add(cluster)
|
||||
|
||||
elif key2 in cluster_dict:
|
||||
if cluster_dict[key2][2] > cluster[2]:
|
||||
to_remove.add(cluster_dict[key2])
|
||||
cluster_dict[key2] = cluster
|
||||
else:
|
||||
to_remove.add(cluster)
|
||||
|
||||
else:
|
||||
cluster_dict[key1] = cluster
|
||||
related_clusters = [
|
||||
cluster for cluster in related_clusters if cluster not in to_remove
|
||||
]
|
||||
|
||||
return related_clusters
|
||||
|
||||
def _create_related_entry(self):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Related clusters"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" To see the related clusters, click [here](./relations/{self.uuid}.md).\n"
|
||||
|
||||
def _get_related_entry(self, relations):
|
||||
output = ""
|
||||
output += f"## Related clusters for {self.value}\n"
|
||||
output += f"\n"
|
||||
output += f"| Cluster A | Cluster B | Level {{ .graph }} |\n"
|
||||
output += f"|-----------|-----------|-------|\n"
|
||||
for relation in relations:
|
||||
placeholder = "__TMP__"
|
||||
|
||||
cluster_a_section = (
|
||||
relation[0]
|
||||
.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
cluster_b_section = (
|
||||
relation[1]
|
||||
.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
if cluster_b_section != "private-cluster":
|
||||
output += f"| [{relation[0].value} ({relation[0].uuid})](../../{relation[0].galaxie.json_file_name}/index.md#{cluster_a_section}) | [{relation[1].value} ({relation[1].uuid})](../../{relation[1].galaxie.json_file_name}/index.md#{cluster_b_section}) | {relation[2]} |\n"
|
||||
else:
|
||||
output += f"| [{relation[0].value} ({relation[0].uuid})](../../{relation[0].galaxie.json_file_name}/index.md#{cluster_a_section}) | {relation[1].value} ({relation[1].uuid}) | {relation[2]} |\n"
|
||||
return output
|
||||
|
||||
def create_entry(self, cluster_dict):
|
||||
self._create_title_entry()
|
||||
self._create_description_entry()
|
||||
self._create_synonyms_entry()
|
||||
self._create_uuid_entry()
|
||||
self._create_refs_entry()
|
||||
self._create_associated_metadata_entry()
|
||||
if self.related_list:
|
||||
self._create_related_entry()
|
||||
self._write_relations(cluster_dict, SITE_PATH)
|
||||
return self.entry
|
||||
|
||||
def _write_relations(self, cluster_dict, path):
|
||||
related_clusters = self.get_related_clusters(cluster_dict)
|
||||
global relation_count_dict
|
||||
relation_count_dict[self.uuid] = len(related_clusters)
|
||||
galaxy_path = os.path.join(path, self.galaxie.json_file_name)
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
relation_path = os.path.join(galaxy_path, "relations")
|
||||
if not os.path.exists(relation_path):
|
||||
os.mkdir(relation_path)
|
||||
with open(os.path.join(relation_path, ".pages"), "w") as index:
|
||||
index.write(f"hide: true\n")
|
||||
with open(os.path.join(relation_path, f"{self.uuid}.md"), "w") as index:
|
||||
index.write(self._get_related_entry(related_clusters))
|
||||
|
||||
|
||||
def create_index(galaxies):
|
||||
index_output = INTRO
|
||||
for galaxie in galaxies:
|
||||
index_output += f"- [{galaxie.name}](./{galaxie.json_file_name}/index.md)\n"
|
||||
index_output += STATISTICS
|
||||
index_output += CONTRIBUTING
|
||||
return index_output
|
||||
|
||||
|
||||
def get_top_x(dict, x, big_to_small=True):
|
||||
sorted_dict = sorted(
|
||||
dict.items(), key=operator.itemgetter(1), reverse=big_to_small
|
||||
)[:x]
|
||||
top_x = [key for key, value in sorted_dict]
|
||||
top_x_values = sorted(dict.values(), reverse=big_to_small)[:x]
|
||||
return top_x, top_x_values
|
||||
|
||||
|
||||
def name_to_section(name):
|
||||
placeholder = "__TMP__"
|
||||
return (
|
||||
name.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
|
||||
def create_statistics(cluster_dict):
|
||||
statistic_output = ""
|
||||
statistic_output += f"# MISP Galaxy statistics\n"
|
||||
statistic_output += "The MISP galaxy statistics are automatically generated based on the MISP galaxy JSON files. Therefore the statistics only include detailed infomration about public clusters and relations. Some statistics about private clusters and relations is included but only as an approximation based on the information gathered from the public clusters.\n"
|
||||
|
||||
statistic_output += f"# Cluster statistics\n"
|
||||
statistic_output += f"## Number of clusters\n"
|
||||
statistic_output += f"Here you can find the total number of clusters including public and private clusters. The number of public clusters has been calculated based on the number of unique Clusters in the MISP galaxy JSON files. The number of private clusters could only be approximated based on the number of relations to non-existing clusters. Therefore the number of private clusters is not accurate and only an approximation.\n"
|
||||
statistic_output += f"\n"
|
||||
statistic_output += f"| No. | Type | Count {{ .pie-chart }}|\n"
|
||||
statistic_output += f"|----|------|-------|\n"
|
||||
statistic_output += f"| 1 | Public clusters | {len(public_clusters_dict)} |\n"
|
||||
statistic_output += f"| 2 | Private clusters | {len(private_clusters)} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
statistic_output += f"## Galaxies with the most clusters\n"
|
||||
galaxy_counts = {}
|
||||
for galaxy in public_clusters_dict.values():
|
||||
galaxy_counts[galaxy] = galaxy_counts.get(galaxy, 0) + 1
|
||||
top_galaxies, top_galaxies_values = get_top_x(galaxy_counts, 20)
|
||||
statistic_output += f" | No. | Galaxy | Count {{ .log-bar-chart }}|\n"
|
||||
statistic_output += f" |----|--------|-------|\n"
|
||||
for i, galaxy in enumerate(top_galaxies, 1):
|
||||
galaxy_section = name_to_section(galaxy.json_file_name)
|
||||
statistic_output += f" | {i} | [{galaxy.name}](../{galaxy_section}) | {top_galaxies_values[i-1]} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
statistic_output += f"## Galaxies with the least clusters\n"
|
||||
flop_galaxies, flop_galaxies_values = get_top_x(galaxy_counts, 20, False)
|
||||
statistic_output += f" | No. | Galaxy | Count {{ .bar-chart }}|\n"
|
||||
statistic_output += f" |----|--------|-------|\n"
|
||||
for i, galaxy in enumerate(flop_galaxies, 1):
|
||||
galaxy_section = name_to_section(galaxy.json_file_name)
|
||||
statistic_output += f" | {i} | [{galaxy.name}](../{galaxy_section}) | {flop_galaxies_values[i-1]} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
statistic_output += f"# Relation statistics\n"
|
||||
statistic_output += f"Here you can find the total number of relations including public and private relations. The number includes relations between public clusters and relations between public and private clusters. Therefore relatons between private clusters are not included in the statistics.\n"
|
||||
statistic_output += f"\n"
|
||||
statistic_output += f"## Number of relations\n"
|
||||
statistic_output += f"| No. | Type | Count {{ .pie-chart }}|\n"
|
||||
statistic_output += f"|----|------|-------|\n"
|
||||
statistic_output += f"| 1 | Public relations | {public_relations_count} |\n"
|
||||
statistic_output += f"| 2 | Private relations | {private_relations_count} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
statistic_output += f"**Average number of relations per cluster**: {int(sum(relation_count_dict.values()) / len(relation_count_dict))}\n"
|
||||
|
||||
statistic_output += f"## Cluster with the most relations\n"
|
||||
relation_count_dict_names = {
|
||||
cluster_dict[uuid].value: count for uuid, count in relation_count_dict.items()
|
||||
}
|
||||
top_25_relation, top_25_relation_values = get_top_x(relation_count_dict_names, 20)
|
||||
statistic_output += f" | No. | Cluster | Count {{ .bar-chart }}|\n"
|
||||
statistic_output += f" |----|--------|-------|\n"
|
||||
relation_count_dict_galaxies = {
|
||||
cluster_dict[uuid].value: cluster_dict[uuid].galaxie.json_file_name
|
||||
for uuid in relation_count_dict.keys()
|
||||
}
|
||||
for i, cluster in enumerate(top_25_relation, 1):
|
||||
cluster_section = name_to_section(cluster)
|
||||
statistic_output += f" | {i} | [{cluster}](../{relation_count_dict_galaxies[cluster]}/#{cluster_section}) | {top_25_relation_values[i-1]} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
statistic_output += f"# Synonyms statistics\n"
|
||||
statistic_output += f"## Cluster with the most synonyms\n"
|
||||
synonyms_count_dict_names = {
|
||||
cluster_dict[uuid].value: count for uuid, count in synonyms_count_dict.items()
|
||||
}
|
||||
top_synonyms, top_synonyms_values = get_top_x(synonyms_count_dict_names, 20)
|
||||
statistic_output += f" | No. | Cluster | Count {{ .bar-chart }}|\n"
|
||||
statistic_output += f" |----|--------|-------|\n"
|
||||
synonyms_count_dict_galaxies = {
|
||||
cluster_dict[uuid].value: cluster_dict[uuid].galaxie.json_file_name
|
||||
for uuid in synonyms_count_dict.keys()
|
||||
}
|
||||
for i, cluster in enumerate(top_synonyms, 1):
|
||||
cluster_section = name_to_section(cluster)
|
||||
statistic_output += f" | {i} | [{cluster}](../{synonyms_count_dict_galaxies[cluster]}/#{cluster_section}) | {top_synonyms_values[i-1]} |\n"
|
||||
statistic_output += f"\n"
|
||||
|
||||
return statistic_output
|
||||
print(f"Processed {galaxy}, {cluster}")
|
||||
return cluster, galaxy, relationships
|
||||
|
||||
|
||||
def get_deprecated_galaxy_files():
|
||||
|
@ -514,8 +47,9 @@ def get_deprecated_galaxy_files():
|
|||
return deprecated_galaxy_files
|
||||
|
||||
|
||||
def main():
|
||||
if __name__ == "__main__":
|
||||
start_time = time.time()
|
||||
universe = Universe()
|
||||
|
||||
FILES_TO_IGNORE.extend(get_deprecated_galaxy_files())
|
||||
galaxies_fnames = []
|
||||
|
@ -524,43 +58,115 @@ def main():
|
|||
galaxies_fnames.append(f)
|
||||
galaxies_fnames.sort()
|
||||
|
||||
galaxies = []
|
||||
# Create the universe of clusters and galaxies
|
||||
for galaxy in galaxies_fnames:
|
||||
with open(os.path.join(CLUSTER_PATH, galaxy)) as fr:
|
||||
galaxie_json = json.load(fr)
|
||||
galaxies.append(
|
||||
Galaxy(
|
||||
galaxie_json["values"],
|
||||
galaxie_json["authors"],
|
||||
galaxie_json["description"],
|
||||
galaxie_json["name"],
|
||||
galaxy.split(".")[0],
|
||||
galaxy_json = json.load(fr)
|
||||
universe.add_galaxy(
|
||||
galaxy_name=galaxy_json["name"],
|
||||
json_file_name=galaxy,
|
||||
authors=galaxy_json["authors"],
|
||||
description=galaxy_json["description"],
|
||||
)
|
||||
for cluster in galaxy_json["values"]:
|
||||
universe.add_cluster(
|
||||
galaxy_name=galaxy_json.get("name", None),
|
||||
uuid=cluster.get("uuid", None),
|
||||
description=cluster.get("description", None),
|
||||
value=cluster.get("value", None),
|
||||
meta=cluster.get("meta", None),
|
||||
)
|
||||
|
||||
cluster_dict = {}
|
||||
for galaxy in galaxies:
|
||||
for cluster in galaxy.clusters:
|
||||
cluster_dict[cluster.uuid] = cluster
|
||||
# Define the relationships between clusters
|
||||
for galaxy in galaxies_fnames:
|
||||
with open(os.path.join(CLUSTER_PATH, galaxy)) as fr:
|
||||
galaxy_json = json.load(fr)
|
||||
for cluster in galaxy_json["values"]:
|
||||
if "related" in cluster:
|
||||
for related in cluster["related"]:
|
||||
universe.define_relationship(
|
||||
cluster["uuid"], related["dest-uuid"]
|
||||
)
|
||||
|
||||
# Write files
|
||||
tasks = []
|
||||
for galaxy_name, galaxy in universe.galaxies.items():
|
||||
for cluster_name, cluster in galaxy.clusters.items():
|
||||
tasks.append((galaxy_name, cluster_name))
|
||||
|
||||
with Pool(processes=multiprocessing.cpu_count()) as pool:
|
||||
result = pool.map(get_cluster_relationships, tasks)
|
||||
|
||||
for cluster, galaxy, relationships in result:
|
||||
universe.galaxies[galaxy].clusters[cluster].relationships = relationships
|
||||
|
||||
print("All clusters processed.")
|
||||
|
||||
print(f"Finished relations in {time.time() - start_time} seconds")
|
||||
|
||||
# Write output
|
||||
if not os.path.exists(SITE_PATH):
|
||||
os.mkdir(SITE_PATH)
|
||||
|
||||
for galaxy in galaxies:
|
||||
galaxy.write_entry(SITE_PATH, cluster_dict)
|
||||
index = IndexSite(SITE_PATH)
|
||||
index.add_content(
|
||||
"# MISP Galaxy\n\nThe MISP galaxy offers a streamlined approach for representing large entities, known as clusters, which can be linked to MISP events or attributes. Each cluster consists of one or more elements, represented as key-value pairs. MISP galaxy comes with a default knowledge base, encompassing areas like Threat Actors, Tools, Ransomware, and ATT&CK matrices. However, users have the flexibility to modify, update, replace, or share these elements according to their needs.\n\nClusters and vocabularies within MISP galaxy can be utilized in their original form or as a foundational knowledge base. The distribution settings for each cluster can be adjusted, allowing for either restricted or wide dissemination.\n\nAdditionally, MISP galaxies enable the representation of existing standards like the MITRE ATT&CK™ framework, as well as custom matrices.\n\nThe aim is to provide a core set of clusters for organizations embarking on analysis, which can be further tailored to include localized, private information or additional, shareable data.\n\nClusters serve as an open and freely accessible knowledge base, which can be utilized and expanded within [MISP](https://www.misp-project.org/) or other threat intelligence platforms.\n\n![Overview of the integration of MISP galaxy in the MISP Threat Intelligence Sharing Platform](https://raw.githubusercontent.com/MISP/misp-galaxy/aa41337fd78946a60aef3783f58f337d2342430a/doc/images/galaxy.png)\n\n## Publicly available clusters\n"
|
||||
)
|
||||
index.add_toc(universe.galaxies.values())
|
||||
index.add_content(
|
||||
"## Statistics\n\nYou can find some statistics about MISP galaxies [here](./statistics.md).\n\n"
|
||||
)
|
||||
index.add_content(
|
||||
"# Contributing\n\nIn the dynamic realm of threat intelligence, a variety of models and approaches exist to systematically organize, categorize, and delineate threat actors, hazards, or activity groups. We embrace innovative methodologies for articulating threat intelligence. The galaxy model is particularly versatile, enabling you to leverage and integrate methodologies that you trust and are already utilizing within your organization or community.\n\nWe encourage collaboration and contributions to the [MISP Galaxy JSON files](https://github.com/MISP/misp-galaxy/). Feel free to fork the project, enhance existing elements or clusters, or introduce new ones. Your insights are valuable - share them with us through a pull-request.\n"
|
||||
)
|
||||
index.write_entry()
|
||||
|
||||
index_output = create_index(galaxies)
|
||||
statistic_output = create_statistics(cluster_dict=cluster_dict)
|
||||
statistics = StatisticsSite(SITE_PATH)
|
||||
statistics.add_content("# MISP Galaxy Statistics\n\n")
|
||||
statistics.add_cluster_statistics(
|
||||
len(
|
||||
[
|
||||
cluster
|
||||
for galaxy in universe.galaxies.values()
|
||||
for cluster in galaxy.clusters.values()
|
||||
]
|
||||
),
|
||||
len(universe.private_clusters),
|
||||
)
|
||||
statistics.add_galaxy_statistics(universe.galaxies.values())
|
||||
statistics.add_relation_statistics(
|
||||
[
|
||||
cluster
|
||||
for galaxy in universe.galaxies.values()
|
||||
for cluster in galaxy.clusters.values()
|
||||
]
|
||||
)
|
||||
statistics.add_synonym_statistics(
|
||||
[
|
||||
cluster
|
||||
for galaxy in universe.galaxies.values()
|
||||
for cluster in galaxy.clusters.values()
|
||||
]
|
||||
)
|
||||
statistics.write_entry()
|
||||
|
||||
with open(os.path.join(SITE_PATH, "index.md"), "w") as index:
|
||||
index.write(index_output)
|
||||
for galaxy in universe.galaxies.values():
|
||||
galaxy.write_entry(SITE_PATH)
|
||||
|
||||
with open(os.path.join(SITE_PATH, "statistics.md"), "w") as index:
|
||||
index.write(statistic_output)
|
||||
for galaxy in universe.galaxies.values():
|
||||
galaxy_path = os.path.join(
|
||||
SITE_PATH, f"{galaxy.json_file_name}".replace(".json", "")
|
||||
)
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
relation_path = os.path.join(galaxy_path, "relations")
|
||||
if not os.path.exists(relation_path):
|
||||
os.mkdir(relation_path)
|
||||
with open(os.path.join(relation_path, ".pages"), "w") as index:
|
||||
index.write(f"hide: true\n")
|
||||
|
||||
print(f"Finished file creation in {time.time() - start_time} seconds")
|
||||
with ThreadPoolExecutor(
|
||||
max_workers=(multiprocessing.cpu_count() * 4)
|
||||
) as executor:
|
||||
executor.map(write_relations_table, galaxy.clusters.values())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
print(f"Finished in {time.time() - start_time} seconds")
|
||||
|
|
0
tools/mkdocs/modules/__init__.py
Normal file
0
tools/mkdocs/modules/__init__.py
Normal file
110
tools/mkdocs/modules/cluster.py
Normal file
110
tools/mkdocs/modules/cluster.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
import validators
|
||||
|
||||
|
||||
class Cluster:
|
||||
def __init__(self, uuid, galaxy, description=None, value=None, meta=None):
|
||||
self.uuid = uuid
|
||||
self.description = description
|
||||
self.value = value
|
||||
self.meta = meta
|
||||
|
||||
self.galaxy = galaxy # Reference to the Galaxy object this cluster belongs to
|
||||
self.outbound_relationships = set()
|
||||
self.inbound_relationships = set()
|
||||
self.relationships = set()
|
||||
|
||||
def add_outbound_relationship(self, cluster):
|
||||
self.outbound_relationships.add(cluster)
|
||||
|
||||
def add_inbound_relationship(self, cluster):
|
||||
self.inbound_relationships.add(cluster)
|
||||
|
||||
def save_relationships(self, relationships):
|
||||
self.relationships = relationships
|
||||
|
||||
def generate_entry(self):
|
||||
entry = ""
|
||||
entry += self._create_title_entry()
|
||||
entry += self._create_description_entry()
|
||||
entry += self._create_synonyms_entry()
|
||||
entry += self._create_uuid_entry()
|
||||
entry += self._create_refs_entry()
|
||||
entry += self._create_associated_metadata_entry()
|
||||
if self.relationships:
|
||||
entry += self._create_related_entry()
|
||||
return entry
|
||||
|
||||
def _create_title_entry(self):
|
||||
entry = ""
|
||||
entry += f"## {self.value}\n"
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_description_entry(self):
|
||||
entry = ""
|
||||
if self.description:
|
||||
entry += f"{self.description}\n"
|
||||
return entry
|
||||
|
||||
def _create_synonyms_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict) and self.meta.get("synonyms"):
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Synonyms"\n'
|
||||
entry += f"\n"
|
||||
entry += f' "synonyms" in the meta part typically refer to alternate names or labels that are associated with a particular {self.value}.\n\n'
|
||||
entry += f" | Known Synonyms |\n"
|
||||
entry += f" |---------------------|\n"
|
||||
synonyms_count = 0
|
||||
for synonym in sorted(self.meta["synonyms"]):
|
||||
synonyms_count += 1
|
||||
entry += f" | `{synonym}` |\n"
|
||||
return entry
|
||||
|
||||
def _create_uuid_entry(self):
|
||||
entry = ""
|
||||
if self.uuid:
|
||||
entry += f"\n"
|
||||
entry += f'??? tip "Internal MISP references"\n'
|
||||
entry += f"\n"
|
||||
entry += f" UUID `{self.uuid}` which can be used as unique global reference for `{self.value}` in MISP communities and other software using the MISP galaxy\n"
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_refs_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict) and self.meta.get("refs"):
|
||||
entry += f"\n"
|
||||
entry += f'??? info "External references"\n'
|
||||
entry += f"\n"
|
||||
|
||||
for ref in self.meta["refs"]:
|
||||
if validators.url(ref):
|
||||
entry += f" - [{ref}]({ref}) - :material-archive: :material-arrow-right: [webarchive](https://web.archive.org/web/*/{ref})\n"
|
||||
else:
|
||||
entry += f" - {ref}\n"
|
||||
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_associated_metadata_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict):
|
||||
excluded_meta = ["synonyms", "refs"]
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Associated metadata"\n'
|
||||
entry += f"\n"
|
||||
entry += f" |Metadata key {{ .no-filter }} |Value|\n"
|
||||
entry += f" |-----------------------------------|-----|\n"
|
||||
for meta in sorted(self.meta.keys()):
|
||||
if meta not in excluded_meta:
|
||||
entry += f" | {meta} | {self.meta[meta]} |\n"
|
||||
return entry
|
||||
|
||||
def _create_related_entry(self):
|
||||
entry = ""
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Related clusters"\n'
|
||||
entry += f"\n"
|
||||
entry += f" To see the related clusters, click [here](./relations/{self.uuid}.md).\n"
|
||||
return entry
|
78
tools/mkdocs/modules/galaxy.py
Normal file
78
tools/mkdocs/modules/galaxy.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
from modules.cluster import Cluster
|
||||
from typing import List
|
||||
import os
|
||||
|
||||
|
||||
class Galaxy:
|
||||
def __init__(
|
||||
self,
|
||||
galaxy_name: str,
|
||||
json_file_name: str,
|
||||
authors: List[str],
|
||||
description: str,
|
||||
):
|
||||
self.galaxy_name = galaxy_name
|
||||
self.json_file_name = json_file_name
|
||||
self.authors = authors
|
||||
self.description = description
|
||||
|
||||
self.clusters = {} # Maps uuid to Cluster objects
|
||||
|
||||
def add_cluster(self, uuid, description, value, meta):
|
||||
if uuid not in self.clusters:
|
||||
self.clusters[uuid] = Cluster(
|
||||
uuid=uuid, galaxy=self, description=description, value=value, meta=meta
|
||||
)
|
||||
|
||||
def write_entry(self, path):
|
||||
galaxy_path = os.path.join(path, f"{self.json_file_name}".replace(".json", ""))
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
with open(os.path.join(galaxy_path, "index.md"), "w") as index:
|
||||
index.write(self.generate_entry())
|
||||
|
||||
def generate_entry(self):
|
||||
entry = ""
|
||||
entry += self._create_metadata_entry()
|
||||
entry += self._create_title_entry()
|
||||
entry += self._create_description_entry()
|
||||
entry += self._create_authors_entry()
|
||||
entry += self._create_clusters_entry()
|
||||
return entry
|
||||
|
||||
def _create_metadata_entry(self):
|
||||
entry = ""
|
||||
entry += "---\n"
|
||||
entry += f"title: {self.galaxy_name}\n"
|
||||
meta_description = self.description.replace('"', "-")
|
||||
entry += f"description: {meta_description}\n"
|
||||
entry += "---\n"
|
||||
return entry
|
||||
|
||||
def _create_title_entry(self):
|
||||
entry = ""
|
||||
entry += f"# {self.galaxy_name}\n"
|
||||
return entry
|
||||
|
||||
def _create_description_entry(self):
|
||||
entry = ""
|
||||
entry += f"{self.description}\n"
|
||||
return entry
|
||||
|
||||
def _create_authors_entry(self):
|
||||
entry = ""
|
||||
if self.authors:
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Authors"\n'
|
||||
entry += f"\n"
|
||||
entry += f" | Authors and/or Contributors|\n"
|
||||
entry += f" |----------------------------|\n"
|
||||
for author in self.authors:
|
||||
entry += f" |{author}|\n"
|
||||
return entry
|
||||
|
||||
def _create_clusters_entry(self):
|
||||
entry = ""
|
||||
for cluster in self.clusters.values():
|
||||
entry += cluster.generate_entry()
|
||||
return entry
|
117
tools/mkdocs/modules/site.py
Normal file
117
tools/mkdocs/modules/site.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
import os
|
||||
|
||||
from utils.helper import create_bar_chart, get_top_x, create_pie_chart
|
||||
|
||||
|
||||
class Site:
|
||||
def __init__(self, path, name) -> None:
|
||||
self.path = path
|
||||
self.name = name
|
||||
self.content = ""
|
||||
|
||||
def add_content(self, content):
|
||||
self.content += content
|
||||
|
||||
def write_entry(self):
|
||||
if not os.path.exists(self.path):
|
||||
os.makedirs(self.path)
|
||||
with open(os.path.join(self.path, self.name), "w") as index:
|
||||
index.write(self.content)
|
||||
|
||||
|
||||
class IndexSite(Site):
|
||||
def __init__(self, path) -> None:
|
||||
super().__init__(path=path, name="index.md")
|
||||
|
||||
def add_toc(self, galaxies):
|
||||
for galaxy in galaxies:
|
||||
galaxy_folder = galaxy.json_file_name.replace(".json", "")
|
||||
self.add_content(f"- [{galaxy.galaxy_name}](./{galaxy_folder}/index.md)\n")
|
||||
self.add_content("\n")
|
||||
|
||||
|
||||
class StatisticsSite(Site):
|
||||
def __init__(self, path) -> None:
|
||||
super().__init__(path=path, name="statistics.md")
|
||||
|
||||
def add_galaxy_statistics(self, galaxies):
|
||||
galaxy_cluster_count = {galaxy: len(galaxy.clusters) for galaxy in galaxies}
|
||||
top_20 = get_top_x(galaxy_cluster_count, 20)
|
||||
flop_20 = get_top_x(galaxy_cluster_count, 20, False)
|
||||
self.add_content(f"# Galaxy statistics\n")
|
||||
self.add_content(f"## Galaxies with the most clusters\n\n")
|
||||
self.add_content(
|
||||
create_bar_chart(
|
||||
x_axis="Galaxy", y_axis="Count", values=top_20, galaxy=True
|
||||
)
|
||||
)
|
||||
self.add_content(f"## Galaxies with the least clusters\n\n")
|
||||
self.add_content(
|
||||
create_bar_chart(
|
||||
x_axis="Galaxy", y_axis="Count", values=flop_20, galaxy=True
|
||||
)
|
||||
)
|
||||
|
||||
def add_cluster_statistics(self, public_clusters, private_clusters):
|
||||
values = {
|
||||
"Public clusters": public_clusters,
|
||||
"Private clusters": private_clusters,
|
||||
}
|
||||
self.add_content(f"# Cluster statistics\n")
|
||||
self.add_content(f"## Number of clusters\n")
|
||||
self.add_content(
|
||||
f"Here you can find the total number of clusters including public and private clusters.The number of public clusters has been calculated based on the number of unique Clusters in the MISP galaxy JSON files. The number of private clusters could only be approximated based on the number of relations to non-existing clusters. Therefore the number of private clusters is not accurate and only an approximation.\n\n"
|
||||
)
|
||||
self.add_content(create_pie_chart(sector="Type", unit="Count", values=values))
|
||||
|
||||
def add_relation_statistics(self, clusters):
|
||||
cluster_relations = {}
|
||||
private_relations = 0
|
||||
public_relations = 0
|
||||
for cluster in clusters:
|
||||
cluster_relations[cluster] = len(cluster.relationships)
|
||||
for relation in cluster.relationships:
|
||||
if relation[1].value == "Private Cluster":
|
||||
private_relations += 1
|
||||
else:
|
||||
public_relations += 1
|
||||
top_20 = get_top_x(cluster_relations, 20)
|
||||
flop_20 = get_top_x(cluster_relations, 20, False)
|
||||
self.add_content(f"# Relation statistics\n")
|
||||
self.add_content(
|
||||
f"Here you can find the total number of relations including public and private relations. The number includes relations between public clusters and relations between public and private clusters. Therefore relatons between private clusters are not included in the statistics.\n\n"
|
||||
)
|
||||
self.add_content(f"## Number of relations\n\n")
|
||||
self.add_content(
|
||||
create_pie_chart(
|
||||
sector="Type",
|
||||
unit="Count",
|
||||
values={
|
||||
"Public relations": public_relations,
|
||||
"Private relations": private_relations,
|
||||
},
|
||||
)
|
||||
)
|
||||
self.add_content(
|
||||
f"**Average number of relations per cluster**: {int(sum(cluster_relations.values()) / len(cluster_relations))}\n"
|
||||
)
|
||||
self.add_content(f"## Cluster with the most relations\n\n")
|
||||
self.add_content(
|
||||
create_bar_chart(x_axis="Cluster", y_axis="Count", values=top_20)
|
||||
)
|
||||
self.add_content(f"## Cluster with the least relations\n\n")
|
||||
self.add_content(
|
||||
create_bar_chart(x_axis="Cluster", y_axis="Count", values=flop_20)
|
||||
)
|
||||
|
||||
def add_synonym_statistics(self, clusters):
|
||||
synonyms = {}
|
||||
for cluster in clusters:
|
||||
if cluster.meta and cluster.meta.get("synonyms"):
|
||||
synonyms[cluster] = len(cluster.meta["synonyms"])
|
||||
top_20 = get_top_x(synonyms, 20)
|
||||
self.add_content(f"# Synonym statistics\n")
|
||||
self.add_content(f"## Cluster with the most synonyms\n\n")
|
||||
self.add_content(
|
||||
create_bar_chart(x_axis="Cluster", y_axis="Count", values=top_20)
|
||||
)
|
109
tools/mkdocs/modules/universe.py
Normal file
109
tools/mkdocs/modules/universe.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
from modules.galaxy import Galaxy
|
||||
from modules.cluster import Cluster
|
||||
|
||||
from collections import defaultdict, deque
|
||||
|
||||
|
||||
class Universe:
|
||||
def __init__(self, add_inbound_relationship=False):
|
||||
self.galaxies = {} # Maps galaxy_name to Galaxy objects
|
||||
self.add_inbound_relationship = add_inbound_relationship
|
||||
self.private_clusters = {}
|
||||
|
||||
def add_galaxy(self, galaxy_name, json_file_name, authors, description):
|
||||
if galaxy_name not in self.galaxies:
|
||||
self.galaxies[galaxy_name] = Galaxy(
|
||||
galaxy_name=galaxy_name,
|
||||
json_file_name=json_file_name,
|
||||
authors=authors,
|
||||
description=description,
|
||||
)
|
||||
|
||||
def add_cluster(self, galaxy_name, uuid, description, value, meta):
|
||||
if galaxy_name in self.galaxies:
|
||||
self.galaxies[galaxy_name].add_cluster(
|
||||
uuid=uuid, description=description, value=value, meta=meta
|
||||
)
|
||||
|
||||
def define_relationship(self, cluster_a_id, cluster_b_id):
|
||||
cluster_a = None
|
||||
cluster_b = None
|
||||
|
||||
if cluster_a_id == cluster_b_id:
|
||||
return
|
||||
|
||||
# Search for Cluster A and Cluster B in all galaxies
|
||||
for galaxy in self.galaxies.values():
|
||||
if cluster_a_id in galaxy.clusters:
|
||||
cluster_a = galaxy.clusters[cluster_a_id]
|
||||
if cluster_b_id in galaxy.clusters:
|
||||
cluster_b = galaxy.clusters[cluster_b_id]
|
||||
if cluster_a and cluster_b:
|
||||
break
|
||||
|
||||
# If both clusters are found, define the relationship
|
||||
if cluster_a and cluster_b:
|
||||
cluster_a.add_outbound_relationship(cluster_b)
|
||||
cluster_b.add_inbound_relationship(cluster_a)
|
||||
else:
|
||||
if cluster_a:
|
||||
# private_cluster = self.add_cluster(uuid=cluster_b_id, galaxy_name="Unknown", description=None, value="Private Cluster", meta=None)
|
||||
private_cluster = Cluster(
|
||||
uuid=cluster_b_id,
|
||||
galaxy=None,
|
||||
description=None,
|
||||
value="Private Cluster",
|
||||
meta=None,
|
||||
)
|
||||
self.private_clusters[cluster_b_id] = private_cluster
|
||||
cluster_a.add_outbound_relationship(private_cluster)
|
||||
else:
|
||||
raise ValueError(f"Cluster {cluster_a} not found in any galaxy")
|
||||
|
||||
def get_relationships_with_levels(self, start_cluster):
|
||||
|
||||
def bfs_with_undirected_relationships(start_cluster):
|
||||
visited = set() # Tracks whether a cluster has been visited
|
||||
relationships = defaultdict(
|
||||
lambda: float("inf")
|
||||
) # Tracks the lowest level for each cluster pair
|
||||
|
||||
queue = deque([(start_cluster, 0)]) # Queue of (cluster, level)
|
||||
|
||||
while queue:
|
||||
current_cluster, level = queue.popleft()
|
||||
if current_cluster not in visited:
|
||||
visited.add(current_cluster)
|
||||
|
||||
# Process all relationships regardless of direction
|
||||
if self.add_inbound_relationship:
|
||||
neighbors = current_cluster.outbound_relationships.union(
|
||||
current_cluster.inbound_relationships
|
||||
)
|
||||
else:
|
||||
neighbors = current_cluster.outbound_relationships
|
||||
for neighbor in neighbors:
|
||||
link = frozenset([current_cluster, neighbor])
|
||||
if level + 1 < relationships[link]:
|
||||
relationships[link] = level + 1
|
||||
if (
|
||||
neighbor not in visited
|
||||
and neighbor.value != "Private Cluster"
|
||||
):
|
||||
queue.append((neighbor, level + 1))
|
||||
|
||||
# Convert the defaultdict to a list of tuples, ignoring direction
|
||||
processed_relationships = []
|
||||
for link, lvl in relationships.items():
|
||||
# Extract clusters from the frozenset; direction is irrelevant
|
||||
clusters = list(link)
|
||||
|
||||
# Arbitrarily choose the first cluster as 'source' for consistency
|
||||
if clusters[0].value == "Private Cluster":
|
||||
processed_relationships.append((clusters[1], clusters[0], lvl))
|
||||
else:
|
||||
processed_relationships.append((clusters[0], clusters[1], lvl))
|
||||
|
||||
return processed_relationships
|
||||
|
||||
return bfs_with_undirected_relationships(start_cluster)
|
|
@ -1,10 +1,16 @@
|
|||
document$.subscribe(function () {
|
||||
|
||||
const NODE_RADIUS = 8;
|
||||
const NODE_COLOR = "#69b3a2";
|
||||
// const NODE_COLOR = "#69b3a2";
|
||||
const Parent_Node_COLOR = "#ff0000";
|
||||
|
||||
|
||||
function applyTableFilter(tf) {
|
||||
var valuesToSelect = ['1', '2', '3'];
|
||||
tf.setFilterValue(4, valuesToSelect);
|
||||
tf.filter();
|
||||
}
|
||||
|
||||
function parseFilteredTable(tf, allData) {
|
||||
var data = [];
|
||||
tf.getFilteredData().forEach((row, i) => {
|
||||
|
@ -13,9 +19,11 @@ document$.subscribe(function () {
|
|||
data.push({
|
||||
source: row[1][0],
|
||||
sourcePath: sourcePath,
|
||||
target: row[1][1],
|
||||
sourceGalaxy: row[1][1],
|
||||
target: row[1][2],
|
||||
targetPath: targetPath,
|
||||
level: row[1][2]
|
||||
targetGalaxy: row[1][3],
|
||||
level: row[1][4]
|
||||
});
|
||||
});
|
||||
return data;
|
||||
|
@ -28,14 +36,16 @@ document$.subscribe(function () {
|
|||
var cells = row.querySelectorAll("td");
|
||||
var sourceAnchor = cells[0].querySelector("a");
|
||||
var sourcePath = sourceAnchor ? sourceAnchor.getAttribute("href") : null;
|
||||
var targetAnchor = cells[1].querySelector("a");
|
||||
var targetAnchor = cells[2].querySelector("a");
|
||||
var targetPath = targetAnchor ? targetAnchor.getAttribute("href") : null;
|
||||
data.push({
|
||||
source: cells[0].textContent,
|
||||
target: cells[1].textContent,
|
||||
sourceGalaxy: cells[1].textContent,
|
||||
target: cells[2].textContent,
|
||||
targetGalaxy: cells[3].textContent,
|
||||
sourcePath: sourcePath,
|
||||
targetPath: targetPath,
|
||||
level: cells[2].textContent
|
||||
level: cells[4].textContent
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -51,7 +61,8 @@ document$.subscribe(function () {
|
|||
var newNodes = Array.from(new Set(newData.flatMap(d => [d.source, d.target])))
|
||||
.map(id => ({
|
||||
id,
|
||||
path: nodePaths[id]
|
||||
path: nodePaths[id],
|
||||
galaxy: newData.find(d => d.source === id) ? newData.find(d => d.source === id).sourceGalaxy : newData.find(d => d.target === id).targetGalaxy
|
||||
}));
|
||||
|
||||
var newLinks = newData.map(d => ({ source: d.source, target: d.target }));
|
||||
|
@ -72,12 +83,46 @@ document$.subscribe(function () {
|
|||
nodePaths[d.target] = d.targetPath || null;
|
||||
});
|
||||
|
||||
// Extract unique galaxy names from data
|
||||
const galaxies = Array.from(new Set(data.flatMap(d => [d.sourceGalaxy, d.targetGalaxy])));
|
||||
|
||||
const colorScheme = [
|
||||
'#E63946', // Red
|
||||
'#F1FAEE', // Off White
|
||||
'#A8DADC', // Light Blue
|
||||
'#457B9D', // Medium Blue
|
||||
'#1D3557', // Dark Blue
|
||||
'#F4A261', // Sandy Brown
|
||||
'#2A9D8F', // Teal
|
||||
'#E9C46A', // Saffron
|
||||
'#F77F00', // Orange
|
||||
'#D62828', // Dark Red
|
||||
'#023E8A', // Royal Blue
|
||||
'#0077B6', // Light Sea Blue
|
||||
'#0096C7', // Sky Blue
|
||||
'#00B4D8', // Bright Sky Blue
|
||||
'#48CAE4', // Light Blue
|
||||
'#90E0EF', // Powder Blue
|
||||
'#ADE8F4', // Pale Cerulean
|
||||
'#CAF0F8', // Blithe Blue
|
||||
'#FFBA08', // Selective Yellow
|
||||
'#FFD60A' // Naples Yellow
|
||||
];
|
||||
const colorScale = d3.scaleOrdinal(colorScheme)
|
||||
.domain(galaxies);
|
||||
|
||||
var nodes = Array.from(new Set(data.flatMap(d => [d.source, d.target])))
|
||||
.map(id => ({
|
||||
id,
|
||||
path: nodePaths[id]
|
||||
path: nodePaths[id],
|
||||
galaxy: data.find(d => d.source === id) ? data.find(d => d.source === id).sourceGalaxy : data.find(d => d.target === id).targetGalaxy
|
||||
}));
|
||||
|
||||
let header = document.querySelector('h1').textContent;
|
||||
// const parentUUID = header.replace(/\s+/g, '-').charAt(0).toLowerCase() + header.replace(/\s+/g, '-').slice(1);
|
||||
// console.log("Parent UUID: " + parentUUID);
|
||||
const Parent_Node = nodes.find(node => node.id.includes(header));
|
||||
|
||||
var links = data.map(d => ({ source: d.source, target: d.target }));
|
||||
|
||||
var tooltip = d3.select("body").append("div")
|
||||
|
@ -96,9 +141,9 @@ document$.subscribe(function () {
|
|||
|
||||
var simulation = d3.forceSimulation(nodes)
|
||||
.force("link", d3.forceLink(links).id(d => d.id).distance(linkDistance))
|
||||
.force("charge", d3.forceManyBody().strength(-50))
|
||||
.force("charge", d3.forceManyBody().strength(-70))
|
||||
.force("center", d3.forceCenter(width / 2, height / 2))
|
||||
.alphaDecay(0.02); // A lower value, adjust as needed
|
||||
.alphaDecay(0.05); // A lower value, adjust as needed
|
||||
|
||||
// Create links
|
||||
var link = svg.append("g")
|
||||
|
@ -107,21 +152,22 @@ document$.subscribe(function () {
|
|||
.selectAll("line")
|
||||
.data(links)
|
||||
.enter().append("line")
|
||||
.attr("stroke-width", d => Math.sqrt(d.value));
|
||||
.attr("stroke-width", 1);
|
||||
|
||||
// Create nodes
|
||||
var node = svg.append("g")
|
||||
.attr("stroke", "#fff")
|
||||
.attr("stroke", "#D3D3D3")
|
||||
.attr("stroke-width", 1.5)
|
||||
.selectAll("circle")
|
||||
.data(nodes)
|
||||
.enter().append("circle")
|
||||
.attr("r", function (d, i) {
|
||||
return i === 0 ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
return d.id === Parent_Node.id ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
})
|
||||
.attr("fill", function (d, i) {
|
||||
return i === 0 ? Parent_Node_COLOR : NODE_COLOR;
|
||||
});
|
||||
return d.id === Parent_Node.id ? Parent_Node_COLOR : colorScale(d.galaxy);
|
||||
})
|
||||
.attr("class", d => "node galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-'));
|
||||
|
||||
// Apply tooltip on nodes
|
||||
node.on("mouseover", function (event, d) {
|
||||
|
@ -131,16 +177,41 @@ document$.subscribe(function () {
|
|||
tooltip.html(d.id)
|
||||
.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
node.style("opacity", 0.1);
|
||||
link.style("opacity", 0.1);
|
||||
d3.select(this)
|
||||
.attr("r", parseFloat(d3.select(this).attr("r")) + 5)
|
||||
.style("opacity", 1);
|
||||
svg.selectAll(".legend-text.galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.style("font-weight", "bold")
|
||||
.style("font-size", "14px");
|
||||
link.filter(l => l.source.id === d.id || l.target.id === d.id)
|
||||
.attr("stroke-width", 3)
|
||||
.style("opacity", 1);
|
||||
node.filter(n => n.id === d.id || links.some(l => (l.source.id === d.id && l.target.id === n.id) || (l.target.id === d.id && l.source.id === n.id)))
|
||||
.style("opacity", 1);
|
||||
})
|
||||
.on("mousemove", function (event) {
|
||||
tooltip.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
})
|
||||
.on("mouseout", function (d) {
|
||||
.on("mouseout", function (event, d) {
|
||||
tooltip.transition()
|
||||
.duration(500)
|
||||
.style("opacity", 0);
|
||||
node.style("opacity", 1);
|
||||
link.style("opacity", 1);
|
||||
d3.select(this).attr("r", function (d, i) {
|
||||
return d.id === Parent_Node.id ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
});
|
||||
svg.selectAll(".legend-text.galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.style("font-weight", "normal")
|
||||
.style("font-size", "12px");
|
||||
link.filter(l => l.source.id === d.id || l.target.id === d.id)
|
||||
.attr("stroke-width", 1);
|
||||
node.filter(n => n.id === d.id || links.some(l => (l.source.id === d.id && l.target.id === n.id) || (l.target.id === d.id && l.source.id === n.id)))
|
||||
});
|
||||
|
||||
|
||||
// Apply links on nodes
|
||||
node.on("dblclick", function (event, d) {
|
||||
|
@ -172,6 +243,93 @@ document$.subscribe(function () {
|
|||
if (!event.active) simulation.alphaTarget(0);
|
||||
}
|
||||
|
||||
// Prepare legend data
|
||||
const legendData = galaxies.map(galaxy => ({
|
||||
name: galaxy,
|
||||
color: colorScale(galaxy)
|
||||
}));
|
||||
|
||||
const maxCharLength = 10; // Maximum number of characters to display in legend
|
||||
// Create legend
|
||||
const legend = svg.append("g")
|
||||
.attr("class", "legend")
|
||||
.attr("transform", "translate(" + (width - 100) + ",20)"); // Adjust position as needed
|
||||
|
||||
// Add legend title
|
||||
legend.append("text")
|
||||
.attr("x", 0)
|
||||
.attr("y", -10)
|
||||
.style("font-size", "13px")
|
||||
.style("text-anchor", "start")
|
||||
.style("fill", "grey")
|
||||
.text("Galaxy Colors");
|
||||
|
||||
// Add colored rectangles and text labels for each galaxy
|
||||
const legendItem = legend.selectAll(".legend-item")
|
||||
.data(legendData)
|
||||
.enter().append("g")
|
||||
.attr("class", "legend-item")
|
||||
.attr("transform", (d, i) => `translate(0, ${i * 20})`);
|
||||
|
||||
legendItem.append("rect")
|
||||
.attr("width", 12)
|
||||
.attr("height", 12)
|
||||
.style("fill", d => d.color)
|
||||
.on("mouseover", function (event, d) {
|
||||
node.style("opacity", 0.1);
|
||||
link.style("opacity", 0.1);
|
||||
svg.selectAll(".galaxy-" + d.name.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.each(function () {
|
||||
var currentRadius = d3.select(this).attr("r");
|
||||
d3.select(this).style("opacity", 1);
|
||||
});
|
||||
tooltip.transition()
|
||||
.duration(200)
|
||||
.style("opacity", .9);
|
||||
tooltip.html(d.name)
|
||||
.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
})
|
||||
.on("mouseout", function (event, d) {
|
||||
node.style("opacity", 1);
|
||||
link.style("opacity", 1);
|
||||
tooltip.transition()
|
||||
.duration(500)
|
||||
.style("opacity", 0);
|
||||
});
|
||||
|
||||
legendItem.append("text")
|
||||
.attr("x", 24)
|
||||
.attr("y", 9)
|
||||
.attr("dy", "0.35em")
|
||||
.style("text-anchor", "start")
|
||||
.style("fill", "grey")
|
||||
.style("font-size", "12px")
|
||||
.attr("class", d => "legend-text galaxy-" + d.name.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.text(d => d.name.length > maxCharLength ? d.name.substring(0, maxCharLength) + "..." : d.name)
|
||||
.on("mouseover", function (event, d) {
|
||||
node.style("opacity", 0.1);
|
||||
link.style("opacity", 0.1);
|
||||
svg.selectAll(".galaxy-" + d.name.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.each(function () {
|
||||
d3.select(this).style("opacity", 1);
|
||||
});
|
||||
tooltip.transition()
|
||||
.duration(200)
|
||||
.style("opacity", .9);
|
||||
tooltip.html(d.name)
|
||||
.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
})
|
||||
.on("mouseout", function (event, d) {
|
||||
node.style("opacity", 1);
|
||||
link.style("opacity", 1);
|
||||
tooltip.transition()
|
||||
.duration(500)
|
||||
.style("opacity", 0);
|
||||
});
|
||||
|
||||
|
||||
// Update positions on each simulation 'tick'
|
||||
simulation.on("tick", () => {
|
||||
nodes.forEach(d => {
|
||||
|
@ -199,11 +357,12 @@ document$.subscribe(function () {
|
|||
.join(
|
||||
enter => enter.append("circle")
|
||||
.attr("r", function (d, i) {
|
||||
return i === 0 ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
return d.id === Parent_Node.id ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
})
|
||||
.attr("fill", function (d, i) {
|
||||
return i === 0 ? Parent_Node_COLOR : NODE_COLOR;
|
||||
}),
|
||||
return d.id === Parent_Node.id ? Parent_Node_COLOR : colorScale(d.galaxy);
|
||||
})
|
||||
.attr("class", d => "node galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-')),
|
||||
update => update,
|
||||
exit => exit.remove()
|
||||
);
|
||||
|
@ -218,15 +377,39 @@ document$.subscribe(function () {
|
|||
tooltip.html(d.id)
|
||||
.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
node.style("opacity", 0.1);
|
||||
link.style("opacity", 0.1);
|
||||
d3.select(this)
|
||||
.attr("r", parseFloat(d3.select(this).attr("r")) + 5)
|
||||
.style("opacity", 1);
|
||||
svg.selectAll(".legend-text.galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.style("font-weight", "bold")
|
||||
.style("font-size", "14px");
|
||||
link.filter(l => l.source.id === d.id || l.target.id === d.id)
|
||||
.attr("stroke-width", 3)
|
||||
.style("opacity", 1);
|
||||
node.filter(n => n.id === d.id || links.some(l => (l.source.id === d.id && l.target.id === n.id) || (l.target.id === d.id && l.source.id === n.id)))
|
||||
.style("opacity", 1);
|
||||
})
|
||||
.on("mousemove", function (event) {
|
||||
tooltip.style("left", (event.pageX) + "px")
|
||||
.style("top", (event.pageY - 28) + "px");
|
||||
})
|
||||
.on("mouseout", function (d) {
|
||||
.on("mouseout", function (event, d) {
|
||||
tooltip.transition()
|
||||
.duration(500)
|
||||
.style("opacity", 0);
|
||||
node.style("opacity", 1);
|
||||
link.style("opacity", 1);
|
||||
d3.select(this).attr("r", function (d, i) {
|
||||
return d.id === Parent_Node.id ? NODE_RADIUS + 5 : NODE_RADIUS;
|
||||
});
|
||||
svg.selectAll(".legend-text.galaxy-" + d.galaxy.replace(/\s+/g, '-').replace(/[\s.]/g, '-'))
|
||||
.style("font-weight", "normal")
|
||||
.style("font-size", "12px");
|
||||
link.filter(l => l.source.id === d.id || l.target.id === d.id)
|
||||
.attr("stroke-width", 1);
|
||||
node.filter(n => n.id === d.id || links.some(l => (l.source.id === d.id && l.target.id === n.id) || (l.target.id === d.id && l.source.id === n.id)))
|
||||
});
|
||||
|
||||
// Apply links on nodes
|
||||
|
@ -253,6 +436,8 @@ document$.subscribe(function () {
|
|||
// Restart the simulation with new data
|
||||
simulation.nodes(nodes);
|
||||
simulation.force("link").links(links);
|
||||
linkDistance = Math.sqrt((width * height) / nodes.length);
|
||||
simulation.force("link").distance(linkDistance);
|
||||
simulation.alpha(1).restart();
|
||||
}
|
||||
});
|
||||
|
@ -265,12 +450,14 @@ document$.subscribe(function () {
|
|||
var tf = new TableFilter(table, {
|
||||
base_path: "../../../../01_attachements/modules/tablefilter/",
|
||||
highlight_keywords: true,
|
||||
col_2: "checklist",
|
||||
col_widths: ["350px", "350px", "100px"],
|
||||
col_types: ["string", "string", "number"],
|
||||
col_1: "checklist",
|
||||
col_3: "checklist",
|
||||
col_4: "checklist",
|
||||
col_widths: ["180px", "180px", "180px", "180px", "100px"],
|
||||
col_types: ["string", "string", "string", "string", "number"],
|
||||
grid_layout: false,
|
||||
responsive: false,
|
||||
watermark: ["Filter table ...", "Filter table ..."],
|
||||
watermark: ["Filter table ...", "Filter table ...", "Filter table ...", "Filter table ..."],
|
||||
auto_filter: {
|
||||
delay: 100 //milliseconds
|
||||
},
|
||||
|
@ -297,7 +484,13 @@ document$.subscribe(function () {
|
|||
});
|
||||
|
||||
tf.init();
|
||||
var data = parseTable(table);
|
||||
var allData = parseTable(table);
|
||||
if (allData.length > 1000) {
|
||||
applyTableFilter(tf);
|
||||
data = parseFilteredTable(tf, allData);
|
||||
} else {
|
||||
data = allData;
|
||||
}
|
||||
var graphId = "graph" + index;
|
||||
var div = document.createElement("div");
|
||||
div.id = graphId;
|
||||
|
@ -306,7 +499,7 @@ document$.subscribe(function () {
|
|||
|
||||
// Listen for table filtering events
|
||||
tf.emitter.on(['after-filtering'], function () {
|
||||
filterTableAndGraph(tf, simulation, data);
|
||||
filterTableAndGraph(tf, simulation, allData);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
0
tools/mkdocs/utils/__init__.py
Normal file
0
tools/mkdocs/utils/__init__.py
Normal file
83
tools/mkdocs/utils/helper.py
Normal file
83
tools/mkdocs/utils/helper.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import operator
|
||||
|
||||
|
||||
def get_top_x(dict, x, big_to_small=True):
|
||||
sorted_dict = sorted(
|
||||
dict.items(), key=operator.itemgetter(1), reverse=big_to_small
|
||||
)[:x]
|
||||
top_x = {key: value for key, value in sorted_dict}
|
||||
return top_x
|
||||
|
||||
|
||||
def name_to_section(name):
|
||||
placeholder = "__TMP__"
|
||||
return (
|
||||
name.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
|
||||
def create_bar_chart(x_axis, y_axis, values, log=False, galaxy=False):
|
||||
if not log:
|
||||
chart = f"| No. | {x_axis} | {y_axis} {{ .bar-chart }}|\n"
|
||||
else:
|
||||
chart = f"| No. | {x_axis} | {y_axis} {{ .log-bar-chart }}|\n"
|
||||
chart += f"|----|--------|-------|\n"
|
||||
for i, (x, y) in enumerate(values.items()):
|
||||
if galaxy:
|
||||
chart += f"| {i+1} | {galaxy_transform_to_link(x)} | {y} |\n"
|
||||
else:
|
||||
chart += f"| {i+1} | {cluster_transform_to_link(x)} | {y} |\n"
|
||||
chart += "\n"
|
||||
return chart
|
||||
|
||||
|
||||
def create_pie_chart(sector, unit, values):
|
||||
chart = f"| No. | {sector} | {unit} {{ .pie-chart }}|\n"
|
||||
chart += f"|----|--------|-------|\n"
|
||||
for i, (x, y) in enumerate(values.items()):
|
||||
chart += f"| {i+1} | {x} | {y} |\n"
|
||||
chart += "\n"
|
||||
return chart
|
||||
|
||||
|
||||
def cluster_transform_to_link(cluster, uuid=False):
|
||||
placeholder = "__TMP__"
|
||||
section = (
|
||||
cluster.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
)
|
||||
galaxy_folder = cluster.galaxy.json_file_name.replace(".json", "")
|
||||
if uuid:
|
||||
return f"[{cluster.value} ({cluster.uuid})](../../{galaxy_folder}/index.md#{section})"
|
||||
else:
|
||||
return f"[{cluster.value}](../../{galaxy_folder}/index.md#{section})"
|
||||
|
||||
|
||||
def galaxy_transform_to_link(galaxy):
|
||||
galaxy_folder = galaxy.json_file_name.replace(".json", "")
|
||||
return f"[{galaxy.galaxy_name}](../../{galaxy_folder}/index.md)"
|
||||
|
||||
|
||||
def generate_relations_table(cluster):
|
||||
relationships = cluster.relationships
|
||||
markdown = f"# {cluster.value} \n\n"
|
||||
markdown += f"{cluster.description} \n\n"
|
||||
markdown += "|Cluster A | Galaxy A | Cluster B | Galaxy B | Level { .graph } |\n"
|
||||
markdown += "| --- | --- | --- | --- | --- |\n"
|
||||
for from_cluster, to_cluster, level in relationships:
|
||||
from_galaxy = from_cluster.galaxy
|
||||
if to_cluster.value != "Private Cluster":
|
||||
to_galaxy = to_cluster.galaxy
|
||||
markdown += f"{cluster_transform_to_link(from_cluster, uuid=True)} | {galaxy_transform_to_link(from_galaxy)} | {cluster_transform_to_link(to_cluster, uuid=True)} | {galaxy_transform_to_link(to_galaxy)} | {level}\n"
|
||||
else:
|
||||
markdown += f"{cluster_transform_to_link(from_cluster, uuid=True)} | {galaxy_transform_to_link(from_galaxy)} | {to_cluster.value} ({to_cluster.uuid}) | Unknown | {level}\n"
|
||||
return markdown
|
0
tools/tidal-api/api/__init__.py
Normal file
0
tools/tidal-api/api/__init__.py
Normal file
15
tools/tidal-api/api/api.py
Normal file
15
tools/tidal-api/api/api.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
import requests
|
||||
|
||||
class TidalAPI:
|
||||
def __init__(self):
|
||||
self.base_url = 'https://app-api.tidalcyber.com/api/v1/'
|
||||
|
||||
def get_data(self, endpoint):
|
||||
url = self.base_url + endpoint
|
||||
try:
|
||||
response = requests.get(url)
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
print(f'Error: {e}')
|
||||
return None
|
||||
|
20
tools/tidal-api/config/campaigns.json
Normal file
20
tools/tidal-api/config/campaigns.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal Campaigns",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal Campaigns Galaxy",
|
||||
"type": "campaigns",
|
||||
"uuid": "3db4b6cb-5b89-4096-a057-e0205777adc9",
|
||||
"icon": "bullhorn"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Campaigns",
|
||||
"description": "Tidal Campaigns Cluster",
|
||||
"name": "Tidal Campaigns",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/campaigns/",
|
||||
"type": "campaigns"
|
||||
}
|
||||
}
|
20
tools/tidal-api/config/groups.json
Normal file
20
tools/tidal-api/config/groups.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal Groups",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal Groups Galaxy",
|
||||
"type": "groups",
|
||||
"uuid": "877cdc4b-3392-4353-a7d4-2e46d40e5936",
|
||||
"icon": "user-secret"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Threat Groups",
|
||||
"description": "Tidal Groups Galaxy",
|
||||
"name": "Tidal Groups",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/groups/",
|
||||
"type": "groups"
|
||||
}
|
||||
}
|
20
tools/tidal-api/config/references.json
Normal file
20
tools/tidal-api/config/references.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal References",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal References Galaxy",
|
||||
"type": "references",
|
||||
"uuid": "efd98ec4-16ef-41c4-bc3c-60c7c1ae8b39",
|
||||
"icon": "list"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "References",
|
||||
"description": "Tidal References Cluster",
|
||||
"name": "Tidal References",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/references/",
|
||||
"type": "references"
|
||||
}
|
||||
}
|
20
tools/tidal-api/config/software.json
Normal file
20
tools/tidal-api/config/software.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal Software",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal Software Galaxy",
|
||||
"type": "software",
|
||||
"uuid": "6eb44da4-ed4f-4a5d-a444-0f105ff1b3c2",
|
||||
"icon": "file-code"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Software",
|
||||
"description": "Tidal Software Cluster",
|
||||
"name": "Tidal Software",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/software/",
|
||||
"type": "software"
|
||||
}
|
||||
}
|
20
tools/tidal-api/config/tactic.json
Normal file
20
tools/tidal-api/config/tactic.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal Tactic",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal Tactic Galaxy",
|
||||
"type": "tactic",
|
||||
"uuid": "16b963e7-4b88-44e0-b184-16bf9e71fdc9",
|
||||
"icon": "map"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Tactic",
|
||||
"description": "Tidal Tactic Cluster",
|
||||
"name": "Tidal Tactic",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/tactic/",
|
||||
"type": "tactic"
|
||||
}
|
||||
}
|
20
tools/tidal-api/config/technique.json
Normal file
20
tools/tidal-api/config/technique.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"galaxy": {
|
||||
"name": "Tidal Technique",
|
||||
"namespace": "tidal",
|
||||
"description": "Tidal Technique Galaxy",
|
||||
"type": "technique",
|
||||
"uuid": "298b6aee-981b-4fd8-8759-a2e72ad223fa",
|
||||
"icon": "user-ninja"
|
||||
},
|
||||
"cluster": {
|
||||
"authors": [
|
||||
"Tidal Cyber"
|
||||
],
|
||||
"category": "Technique",
|
||||
"description": "Tidal Technique Cluster",
|
||||
"name": "Tidal Technique",
|
||||
"source": "https://app-api.tidalcyber.com/api/v1/technique/",
|
||||
"type": "technique"
|
||||
}
|
||||
}
|
131
tools/tidal-api/main.py
Normal file
131
tools/tidal-api/main.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
from api.api import TidalAPI
|
||||
from models.galaxy import Galaxy
|
||||
from models.cluster import (
|
||||
GroupCluster,
|
||||
SoftwareCluster,
|
||||
CampaignsCluster,
|
||||
TechniqueCluster,
|
||||
TacticCluster,
|
||||
ReferencesCluster,
|
||||
)
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
CONFIG = "./config"
|
||||
GALAXY_PATH = "../../galaxies"
|
||||
CLUSTER_PATH = "../../clusters"
|
||||
|
||||
|
||||
def create_galaxy(
|
||||
endpoint: str,
|
||||
version: int,
|
||||
extended_relations: bool = False,
|
||||
create_subs: bool = False,
|
||||
):
|
||||
api = TidalAPI()
|
||||
data = api.get_data(endpoint)
|
||||
with open(f"{CONFIG}/{endpoint}.json", "r") as file:
|
||||
config = json.load(file)
|
||||
|
||||
galaxy = Galaxy(**config["galaxy"], version=version)
|
||||
galaxy.save_to_file(f"{GALAXY_PATH}/tidal-{endpoint}.json")
|
||||
|
||||
match endpoint:
|
||||
case "groups":
|
||||
cluster = GroupCluster(
|
||||
**config["cluster"],
|
||||
uuid=galaxy.uuid,
|
||||
enrichment=extended_relations,
|
||||
subs=create_subs,
|
||||
version=version,
|
||||
)
|
||||
cluster.add_values(data)
|
||||
case "software":
|
||||
cluster = SoftwareCluster(
|
||||
**config["cluster"],
|
||||
uuid=galaxy.uuid,
|
||||
version=version,
|
||||
enrichment=extended_relations,
|
||||
subs=create_subs,
|
||||
)
|
||||
cluster.add_values(data)
|
||||
case "campaigns":
|
||||
cluster = CampaignsCluster(**config["cluster"], uuid=galaxy.uuid, version=version)
|
||||
cluster.add_values(data)
|
||||
case "technique":
|
||||
cluster = TechniqueCluster(
|
||||
**config["cluster"], uuid=galaxy.uuid, subs=create_subs, version=version
|
||||
)
|
||||
cluster.add_values(data)
|
||||
case "tactic":
|
||||
cluster = TacticCluster(**config["cluster"], uuid=galaxy.uuid, version=version)
|
||||
cluster.add_values(data)
|
||||
case "references":
|
||||
cluster = ReferencesCluster(**config["cluster"], uuid=galaxy.uuid, version=version)
|
||||
cluster.add_values(data)
|
||||
case _:
|
||||
print("Error: Invalid endpoint")
|
||||
return
|
||||
|
||||
cluster.save_to_file(f"{CLUSTER_PATH}/tidal-{endpoint}.json")
|
||||
print(f"Galaxy tidal-{endpoint} created")
|
||||
|
||||
|
||||
def main(args, galaxies):
|
||||
if args.all:
|
||||
for galaxy in galaxies:
|
||||
create_galaxy(
|
||||
galaxy, args.version, args.extended_relations, args.create_subs
|
||||
)
|
||||
else:
|
||||
create_galaxy(
|
||||
args.type, args.version, args.extended_relations, args.create_subs
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
galaxies = []
|
||||
for f in os.listdir(CONFIG):
|
||||
if f.endswith(".json"):
|
||||
galaxies.append(f.split(".")[0])
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Create galaxy and cluster json files from Tidal API"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="Create all galaxies and clusters",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--type",
|
||||
choices=galaxies,
|
||||
help="The type of the file to create",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--version",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The version of the galaxy",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extended-relations",
|
||||
action="store_true",
|
||||
help="Create extended relations for the clusters",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--create-subs",
|
||||
action="store_true",
|
||||
help="Create subclusters from the API",
|
||||
)
|
||||
parser.set_defaults(func=main)
|
||||
|
||||
args = parser.parse_args()
|
||||
if hasattr(args, "func"):
|
||||
args.func(args, galaxies=galaxies)
|
||||
else:
|
||||
parser.print_help()
|
0
tools/tidal-api/models/__init__.py
Normal file
0
tools/tidal-api/models/__init__.py
Normal file
623
tools/tidal-api/models/cluster.py
Normal file
623
tools/tidal-api/models/cluster.py
Normal file
|
@ -0,0 +1,623 @@
|
|||
from dataclasses import dataclass, field, asdict
|
||||
from typing import Type
|
||||
import json
|
||||
|
||||
|
||||
@dataclass
|
||||
class Meta:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class GroupsMeta(Meta):
|
||||
source: str = None
|
||||
group_attack_id: str = None
|
||||
country: str = None
|
||||
observed_countries: list = None
|
||||
observed_motivations: list = None
|
||||
target_categories: list = None
|
||||
tags: list = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssociatedGroupsMeta(Meta):
|
||||
id: str = None
|
||||
owner_id: str = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class SoftwareMeta(Meta):
|
||||
source: str = None
|
||||
type: list = None
|
||||
software_attack_id: str = None
|
||||
platforms: list = None
|
||||
tags: list = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssociatedSoftwareMeta(Meta):
|
||||
id: str = None
|
||||
owner_id: str = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TechniqueMeta(Meta):
|
||||
source: str = None
|
||||
platforms: list = None
|
||||
tags: list = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class SubTechniqueMeta(Meta):
|
||||
source: str = None
|
||||
technique_attack_id: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TacticMeta(Meta):
|
||||
source: str = None
|
||||
tactic_attack_id: str = None
|
||||
ordinal_position: str = None
|
||||
tags: list = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReferencesMeta(Meta):
|
||||
source: str = None
|
||||
refs: list = None
|
||||
title: str = None
|
||||
author: str = None
|
||||
date_accessed: str = None
|
||||
date_published: str = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CampaignsMeta(Meta):
|
||||
source: str = None
|
||||
campaign_attack_id: str = None
|
||||
first_seen: str = None
|
||||
last_seen: str = None
|
||||
tags: list = None
|
||||
owner: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterValue:
|
||||
description: str = ""
|
||||
meta: Meta = field(default_factory=Meta)
|
||||
related: list = field(default_factory=list)
|
||||
uuid: str = ""
|
||||
value: str = ""
|
||||
|
||||
def return_value(self):
|
||||
value_dict = asdict(self)
|
||||
value_dict["meta"] = {
|
||||
k: v for k, v in asdict(self.meta).items() if v is not None and v != []
|
||||
}
|
||||
return value_dict
|
||||
|
||||
|
||||
class Cluster:
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
):
|
||||
self.authors = authors
|
||||
self.category = category
|
||||
self.description = description
|
||||
self.name = name
|
||||
self.source = source
|
||||
self.type = type
|
||||
self.uuid = uuid
|
||||
self.version = version
|
||||
self.values = []
|
||||
self.CLUSTER_PATH = "../../clusters"
|
||||
|
||||
def add_values(self, data: dict, meta_class: Type[Meta]):
|
||||
pass
|
||||
|
||||
def save_to_file(self, path):
|
||||
with open(path, "w") as file:
|
||||
file.write(json.dumps(self.__dict__(), indent=4))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Cluster: {self.name} - {self.type} - {self.uuid}"
|
||||
|
||||
def __dict__(self) -> dict:
|
||||
return {
|
||||
"authors": self.authors,
|
||||
"category": self.category,
|
||||
"description": self.description,
|
||||
"name": self.name,
|
||||
"source": self.source,
|
||||
"type": self.type,
|
||||
"uuid": self.uuid,
|
||||
"values": self.values,
|
||||
"version": self.version,
|
||||
}
|
||||
|
||||
def _get_relation_from_mitre_id(
|
||||
self, mitre_id: str, cluster: str, meta_key: str, array: bool = False
|
||||
):
|
||||
with open(f"{self.CLUSTER_PATH}/{cluster}.json", "r") as file:
|
||||
mitre = json.load(file)
|
||||
for entry in mitre["values"]:
|
||||
try:
|
||||
if array:
|
||||
for id in entry["meta"][meta_key]:
|
||||
if id == mitre_id:
|
||||
return entry["uuid"]
|
||||
else:
|
||||
if entry["meta"][meta_key] == mitre_id:
|
||||
return entry["uuid"]
|
||||
except KeyError:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
class GroupCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
enrichment: bool = False,
|
||||
subs: bool = False,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
self.enrichment = enrichment
|
||||
self.subs = subs
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = GroupsMeta(
|
||||
source=entry.get("source"),
|
||||
group_attack_id=entry.get("group_attack_id"),
|
||||
country=(
|
||||
entry.get("country")[0].get("country_code")
|
||||
if entry.get("country")
|
||||
else None
|
||||
),
|
||||
observed_countries=[
|
||||
x.get("country_code") for x in entry.get("observed_country")
|
||||
],
|
||||
observed_motivations=[
|
||||
x.get("name") for x in entry.get("observed_motivation")
|
||||
],
|
||||
target_categories=[x.get("name") for x in entry.get("observed_sector")],
|
||||
tags=[x.get("tag") for x in entry.get("tags")],
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
related = []
|
||||
if self.enrichment:
|
||||
related_cluster = self._get_relation_from_mitre_id(
|
||||
entry.get("group_attack_id"), "threat-actor", "synonyms", True
|
||||
)
|
||||
if related_cluster:
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": related_cluster,
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
if self.subs:
|
||||
for associated_group in entry.get("associated_groups"):
|
||||
found = False
|
||||
for x in self.values:
|
||||
if associated_group.get("associated_group_id") == x.get("uuid"):
|
||||
x["related"].append(
|
||||
{
|
||||
"dest-uuid": entry.get("id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
associated_meta = AssociatedGroupsMeta(
|
||||
id=associated_group.get("id"),
|
||||
owner_id=associated_group.get("owner_id"),
|
||||
owner=associated_group.get("owner_name"),
|
||||
)
|
||||
associated_related = []
|
||||
associated_related.append(
|
||||
{
|
||||
"dest-uuid": entry.get("id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
value = ClusterValue(
|
||||
description=associated_group.get("description"),
|
||||
meta=associated_meta,
|
||||
related=associated_related,
|
||||
uuid=associated_group.get("associated_group_id"),
|
||||
value=associated_group.get("name") + " - Associated Group",
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": associated_group.get("associated_group_id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=related,
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
|
||||
# Code Block for handling duplicate from Tidal API data (hopefully only temporary)
|
||||
if value.uuid == "3290dcb9-5781-4b87-8fa0-6ae820e152cd":
|
||||
value.value = "Volt Typhoon - Tidal"
|
||||
|
||||
self.values.append(value.return_value())
|
||||
|
||||
|
||||
class SoftwareCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
enrichment: bool = False,
|
||||
subs: bool = False,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
self.enrichment = enrichment
|
||||
self.subs = subs
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = SoftwareMeta(
|
||||
source=entry.get("source"),
|
||||
type=[entry.get("type")],
|
||||
software_attack_id=entry.get("software_attack_id"),
|
||||
platforms=[x.get("name") for x in entry.get("platforms")],
|
||||
tags=[x.get("tag") for x in entry.get("tags")],
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
related = []
|
||||
for relation in entry.get("groups"):
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": relation.get("group_id"),
|
||||
"type": "used-by",
|
||||
}
|
||||
)
|
||||
if self.enrichment:
|
||||
related_cluster = self._get_relation_from_mitre_id(
|
||||
entry.get("software_attack_id"), "mitre-tool", "external_id"
|
||||
)
|
||||
if related_cluster:
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": related_cluster,
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
|
||||
related_cluster = self._get_relation_from_mitre_id(
|
||||
entry.get("software_attack_id"), "mitre-malware", "external_id"
|
||||
)
|
||||
if related_cluster:
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": related_cluster,
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
if self.subs:
|
||||
for associated_software in entry.get("associated_software"):
|
||||
found = False
|
||||
for x in self.values:
|
||||
if associated_software.get("associated_software_id") == x.get("uuid"):
|
||||
x["related"].append(
|
||||
{
|
||||
"dest-uuid": entry.get("id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
associated_meta = AssociatedSoftwareMeta(
|
||||
id=associated_software.get("id"),
|
||||
owner_id=associated_software.get("owner_id"),
|
||||
owner=associated_software.get("owner_name"),
|
||||
)
|
||||
associated_related = []
|
||||
associated_related.append(
|
||||
{
|
||||
"dest-uuid": entry.get("id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
value = ClusterValue(
|
||||
description=associated_software.get("description"),
|
||||
meta=associated_meta,
|
||||
related=associated_related,
|
||||
uuid=associated_software.get("associated_software_id"),
|
||||
value=associated_software.get("name") + " - Associated Software",
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": associated_software.get(
|
||||
"associated_software_id"
|
||||
),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=related,
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
|
||||
|
||||
class TechniqueCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
subs: bool = False,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
self.subs = subs
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = TechniqueMeta(
|
||||
source=entry.get("source"),
|
||||
platforms=[x.get("name") for x in entry.get("platforms")],
|
||||
tags=[x.get("tag") for x in entry.get("tags")],
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
related = []
|
||||
for relation in entry.get("tactic"):
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": relation.get("tactic_id"),
|
||||
"type": "uses",
|
||||
}
|
||||
)
|
||||
|
||||
if self.subs:
|
||||
for sub_technique in entry.get("sub_technique"):
|
||||
sub_meta = SubTechniqueMeta(
|
||||
source=sub_technique.get("source"),
|
||||
technique_attack_id=sub_technique.get("technique_attack_id"),
|
||||
)
|
||||
sub_related = []
|
||||
for relation in sub_technique.get("tactic"):
|
||||
sub_related.append(
|
||||
{
|
||||
"dest-uuid": relation.get("tactic_id"),
|
||||
"type": "uses",
|
||||
}
|
||||
)
|
||||
sub_value = ClusterValue(
|
||||
description=sub_technique.get("description"),
|
||||
meta=sub_meta,
|
||||
related=sub_related,
|
||||
uuid=sub_technique.get("id"),
|
||||
value=sub_technique.get("name"),
|
||||
)
|
||||
|
||||
# Code for handling duplicate from Tidal API data (hopefully only temporary)
|
||||
if sub_value.uuid == "be637d66-5110-4872-bc15-63b062c3f290":
|
||||
sub_value.value = "Botnet - Duplicate"
|
||||
elif sub_value.uuid == "5c6c3492-5dbc-43ee-a3f2-ba1976d3b379":
|
||||
sub_value.value = "DNS - Duplicate"
|
||||
elif sub_value.uuid == "83e4f633-67fb-4d87-b1b3-8a7a2e60778b":
|
||||
sub_value.value = "DNS Server - Duplicate"
|
||||
elif sub_value.uuid == "b9f5f6b7-ecff-48c8-a23e-c58fd9e41a0d":
|
||||
sub_value.value = "Domains - Duplicate"
|
||||
elif sub_value.uuid == "6e4a0960-dcdc-4e42-9aa1-70d6fc3677b2":
|
||||
sub_value.value = "Server - Duplicate"
|
||||
elif sub_value.uuid == "c30faf84-496b-4f27-a4bc-aa36d583c69f":
|
||||
sub_value.value = "Serverless - Duplicate"
|
||||
elif sub_value.uuid == "2c04d7c8-67a3-4b1a-bd71-47b7c5a54b23":
|
||||
sub_value.value = "Virtual Private Server - Duplicate"
|
||||
elif sub_value.uuid == "2e883e0d-1108-431a-a2dd-98ba98b69417":
|
||||
sub_value.value = "Web Services - Duplicate"
|
||||
elif sub_value.uuid == "d76c3dde-dba5-4748-8d51-c93fc34f885e":
|
||||
sub_value.value = "Cloud Account - Duplicate"
|
||||
elif sub_value.uuid == "12908bde-a5eb-40a5-ae27-d93960d0bfdc":
|
||||
sub_value.value = "Domain Account - Duplicate"
|
||||
elif sub_value.uuid == "df5f6835-ca0a-4ef5-bb3a-b011e4025545":
|
||||
sub_value.value = "Local Account - Duplicate"
|
||||
elif sub_value.uuid == "3c4a2f3a-5877-4a27-a417-76318523657e":
|
||||
sub_value.value = "Cloud Accounts - Duplicate"
|
||||
elif sub_value.uuid == "4b187604-88ab-4972-9836-90a04c705e10":
|
||||
sub_value.value = "Cloud Accounts - Duplicate2"
|
||||
elif sub_value.uuid == "49ae7bf1-a313-41d6-ad4c-74efc4c80ab6":
|
||||
sub_value.value = "Email Accounts - Duplicate"
|
||||
elif sub_value.uuid == "3426077d-3b9c-4f77-a1c6-d68f0dea670e":
|
||||
sub_value.value = "Social Media Accounts - Duplicate"
|
||||
elif sub_value.uuid == "fe595943-f264-4d05-a8c7-7afc8985bfc3":
|
||||
sub_value.value = "Code Repositories - Duplicate"
|
||||
elif sub_value.uuid == "2735f8d1-0e46-4cd7-bfbb-78941bb266fd":
|
||||
sub_value.value = "Steganography - Duplicate"
|
||||
elif sub_value.uuid == "6f152555-36a5-4ec9-8b9b-f0b32c3ccef8":
|
||||
sub_value.value = "Code Signing Certificates - Duplicate"
|
||||
elif sub_value.uuid == "5bcbb0c5-7061-481f-a677-09028a6c59f7":
|
||||
sub_value.value = "Digital Certificates - Duplicate"
|
||||
elif sub_value.uuid == "4c0db4e5-14e0-4fb7-88b0-bb391ce5ad58":
|
||||
sub_value.value = "Digital Certificates - Duplicate2"
|
||||
elif sub_value.uuid == "5a57d258-0b23-431b-b50e-3150d2c0e52c":
|
||||
sub_value.value = "Exploits - Duplicate"
|
||||
elif sub_value.uuid == "0f77a14a-d450-4885-b81f-23eeffa53a7e":
|
||||
sub_value.value = "Malware - Duplicate"
|
||||
elif sub_value.uuid == "ba553ad4-5699-4458-ae4e-76e1faa43291":
|
||||
sub_value.value = "Spearphishing Attachment - Duplicate"
|
||||
elif sub_value.uuid == "d08a9977-9fc2-46bb-84f9-dbb5187c426d":
|
||||
sub_value.value = "Spearphishing Link - Duplicate"
|
||||
elif sub_value.uuid == "350c12a3-33f6-5942-8892-4d6e70abbfc1":
|
||||
sub_value.value = "Spearphishing Voice - Duplicate"
|
||||
|
||||
self.values.append(sub_value.return_value())
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": sub_technique.get("id"),
|
||||
"type": "similar",
|
||||
}
|
||||
)
|
||||
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=related,
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
|
||||
|
||||
class TacticCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = TacticMeta(
|
||||
source=entry.get("source"),
|
||||
tactic_attack_id=entry.get("tactic_attack_id"),
|
||||
ordinal_position=str(entry.get("ordinal_position")),
|
||||
tags=[x.get("tag") for x in entry.get("tags")],
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
related = []
|
||||
for relation in entry.get("techniques"):
|
||||
related.append(
|
||||
{
|
||||
"dest-uuid": relation.get("technique_id"),
|
||||
"type": "uses",
|
||||
}
|
||||
)
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=related,
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
|
||||
|
||||
class ReferencesCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = ReferencesMeta(
|
||||
source=entry.get("source"),
|
||||
refs=[entry.get("url")] if entry.get("url") != "" else None,
|
||||
title=entry.get("title"),
|
||||
author=entry.get("author"),
|
||||
date_accessed=entry.get("date_accessed"),
|
||||
date_published=entry.get("date_published"),
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=[],
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
self.values.append(value.return_value())
|
||||
|
||||
|
||||
class CampaignsCluster(Cluster):
|
||||
def __init__(
|
||||
self,
|
||||
authors: str,
|
||||
category: str,
|
||||
description: str,
|
||||
name: str,
|
||||
source: str,
|
||||
type: str,
|
||||
uuid: str,
|
||||
version: int,
|
||||
):
|
||||
super().__init__(authors, category, description, name, source, type, uuid, version)
|
||||
|
||||
def add_values(self, data):
|
||||
for entry in data["data"]:
|
||||
meta = CampaignsMeta(
|
||||
source=entry.get("source"),
|
||||
campaign_attack_id=entry.get("campaign_attack_id"),
|
||||
first_seen=entry.get("first_seen"),
|
||||
last_seen=entry.get("last_seen"),
|
||||
tags=[x.get("tag") for x in entry.get("tags")],
|
||||
owner=entry.get("owner_name"),
|
||||
)
|
||||
related = []
|
||||
value = ClusterValue(
|
||||
description=entry.get("description"),
|
||||
meta=meta,
|
||||
related=related,
|
||||
uuid=entry.get("id"),
|
||||
value=entry.get("name"),
|
||||
)
|
||||
self.values.append(value.return_value())
|
17
tools/tidal-api/models/galaxy.py
Normal file
17
tools/tidal-api/models/galaxy.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
import json
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
|
||||
@dataclass
|
||||
class Galaxy:
|
||||
description: str
|
||||
icon: str
|
||||
name: str
|
||||
namespace: str
|
||||
type: str
|
||||
uuid: str
|
||||
version: str
|
||||
|
||||
def save_to_file(self, path: str):
|
||||
with open(path, "w") as file:
|
||||
file.write(json.dumps(asdict(self), indent=4))
|
Loading…
Reference in a new issue