mirror of
https://github.com/MISP/misp-galaxy.git
synced 2024-11-25 16:27:19 +00:00
4841 lines
215 KiB
JSON
4841 lines
215 KiB
JSON
{
|
||
"authors": [
|
||
"DISARM Project"
|
||
],
|
||
"category": "disarm",
|
||
"description": "DISARM is a framework designed for describing and understanding disinformation incidents.",
|
||
"name": "Techniques",
|
||
"source": "https://github.com/DISARMFoundation/DISARMframeworks",
|
||
"type": "disarm-techniques",
|
||
"uuid": "fb0e6978-2647-5a95-8324-7ebfd2f88b67",
|
||
"values": [
|
||
{
|
||
"description": "Organise citizens around pro-state messaging. Coordinate paid or volunteer groups to push state propaganda.",
|
||
"meta": {
|
||
"external_id": "T0002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0002.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "559c0ffb-5098-55bd-a173-fbd335bb4ba7",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "eededb33-c7f5-5c1b-bacd-924af3b6e332",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e6e01a8f-bb6f-5b16-b77d-61f0abbc84a5",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "5f6e9165-2d53-52af-883a-13fff2749308",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "4fc5ef29-5af2-5630-b57a-4df9844932c1",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "782afafa-e997-571a-9b25-d04bb322480c",
|
||
"value": "Facilitate State Propaganda"
|
||
},
|
||
{
|
||
"description": "Use or adapt existing narrative themes, where narratives are the baseline stories of a target audience. Narratives form the bedrock of our worldviews. New information is understood through a process firmly grounded in this bedrock. If new information is not consitent with the prevailing narratives of an audience, it will be ignored. Effective campaigns will frame their misinformation in the context of these narratives. Highly effective campaigns will make extensive use of audience-appropriate archetypes and meta-narratives throughout their content creation and amplifiction practices.",
|
||
"meta": {
|
||
"external_id": "T0003",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0003.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "a706cf86-2170-58ea-a6d4-79cc47f23e54",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "c14e8f7d-3c10-5cb5-a654-9d23315d631c",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "689e65f1-d834-581a-adf2-4e8a96d32464",
|
||
"value": "Leverage Existing Narratives"
|
||
},
|
||
{
|
||
"description": "Advance competing narratives connected to same issue ie: on one hand deny incident while at same time expresses dismiss. Suppressing or discouraging narratives already spreading requires an alternative. The most simple set of narrative techniques in response would be the construction and promotion of contradictory alternatives centred on denial, deflection, dismissal, counter-charges, excessive standards of proof, bias in prohibition or enforcement, and so on. These competing narratives allow loyalists cover, but are less compelling to opponents and fence-sitters than campaigns built around existing narratives or highly explanatory master narratives. Competing narratives, as such, are especially useful in the \"firehose of misinformation\" approach.",
|
||
"meta": {
|
||
"external_id": "T0004",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0004.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "d14b6626-052a-5d30-a0da-598e1490bdaa",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "73bfaf89-d10a-5515-83fb-bc5ba11f5a2a",
|
||
"value": "Develop Competing Narratives"
|
||
},
|
||
{
|
||
"description": "Create key social engineering assets needed to amplify content, manipulate algorithms, fool public and/or specific incident/campaign targets. Computational propaganda depends substantially on false perceptions of credibility and acceptance. By creating fake users and groups with a variety of interests and commitments, attackers can ensure that their messages both come from trusted sources and appear more widely adopted than they actually are.",
|
||
"meta": {
|
||
"external_id": "T0007",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0007.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "0aa00b22-361f-5e5b-ac46-901cf6d2dfcc",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "a374c7de-23fd-5cae-805c-83817cb66d66",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "42827d89-3a37-568e-9de3-8ebd379c3d8f",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e39234ab-979c-51c8-8f34-5a9337bd030e",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "6fc10d9d-96a5-5ae1-a0f7-0136a9819a6e",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "345ea7b9-1504-57cf-9c8f-7b01613d89e6",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "e3cbbc7a-da73-50fb-9893-4ce88edb211f",
|
||
"value": "Create Inauthentic Social Media Pages and Groups"
|
||
},
|
||
{
|
||
"description": "Stories planted or promoted in computational propaganda operations often make use of experts fabricated from whole cloth, sometimes specifically for the story itself.",
|
||
"meta": {
|
||
"external_id": "T0009",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0009.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "486790ce-6083-5403-b971-d4c0b291b6a4",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e9cf452f-3ebc-5de8-9f21-dde3133c92c0",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "fea19aab-9522-55bf-9608-addf7d6aaf8d",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "2fe43d88-db8f-5156-98fb-4b9db0e5fff3",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "25ca7eff-d789-5c36-a49d-34194b7246d4",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "bd602fee-4354-5b31-99f1-832053c1bba0",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "29768133-b941-5974-ab10-c15bbb86e387",
|
||
"value": "Create Fake Experts"
|
||
},
|
||
{
|
||
"description": "Utilise Academic/Pseudoscientific Justifications",
|
||
"meta": {
|
||
"external_id": "T0009.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0009.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4c721f5a-101e-5b5e-b260-7b08b92eac83",
|
||
"value": "Utilise Academic/Pseudoscientific Justifications"
|
||
},
|
||
{
|
||
"description": "Cultivate propagandists for a cause, the goals of which are not fully comprehended, and who are used cynically by the leaders of the cause. Independent actors use social media and specialised web sites to strategically reinforce and spread messages compatible with their own. Their networks are infiltrated and used by state media disinformation organisations to amplify the state’s own disinformation strategies against target populations. Many are traffickers in conspiracy theories or hoaxes, unified by a suspicion of Western governments and mainstream media. Their narratives, which appeal to leftists hostile to globalism and military intervention and nationalists against immigration, are frequently infiltrated and shaped by state-controlled trolls and altered news items from agencies such as RT and Sputnik. Also know as \"useful idiots\" or \"unwitting agents\".",
|
||
"meta": {
|
||
"external_id": "T0010",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0010.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "60e783f2-4e22-5495-abdf-cb73e1a5a4c1",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e4a0a68e-dbf8-56b6-8029-2d670aad813c",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "70fb13e3-1d1f-5064-bf27-c3644f0a2045",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "943ccc85-a339-5e32-ade9-09bc4bf6b4fd",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "686ccd43-c358-5d5d-bd42-3e2279151670",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "f99b442a-7c34-5c66-b699-64c3da69374c",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "efd2cf88-94ba-5fdc-8611-660baf44b2e9",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "babb37b7-5b28-5ba4-8f08-5c7c271937c7",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "d12e5fda-c5d3-5cc5-806b-69edb56aac61",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "a479d596-6f66-53eb-ae24-d3a67536464f",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "3055e156-f234-5293-9ab2-d9761a620060",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "8c281e28-298e-5c1b-8e44-f768006d6c26",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "39baec3d-f2ce-5fee-ba7d-3db7d6469946",
|
||
"value": "Cultivate Ignorant Agents"
|
||
},
|
||
{
|
||
"description": "Create media assets to support inauthentic organisations (e.g. think tank), people (e.g. experts) and/or serve as sites to distribute malware/launch phishing operations.",
|
||
"meta": {
|
||
"external_id": "T0013",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0013.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "594ec374-28b9-5191-8bb7-edd9196daf4e",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "f2adbe9e-7c80-504d-adc5-624e04eab4f1",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "534951bc-8d1e-58be-b051-c9243eac96fb",
|
||
"value": "Create Inauthentic Websites"
|
||
},
|
||
{
|
||
"description": "Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns (see: Develop Information Pathways) to promote operation messaging while raising money to support its activities.",
|
||
"meta": {
|
||
"external_id": "T0014",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0014.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "c112eafc-0f31-5bd7-9083-20ef706c8d57",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "5b5c3e04-acf2-50dd-9861-c44bcc8f2cc3",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "bbb8b174-44b6-5f59-bcf0-eab169bc7be1",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "f0bb5056-fedb-5507-8554-c958ec8d9fdc",
|
||
"value": "Prepare Fundraising Campaigns"
|
||
},
|
||
{
|
||
"description": "Raising funds from malign actors may include contributions from foreign agents, cutouts or proxies, shell companies, dark money groups, etc.",
|
||
"meta": {
|
||
"external_id": "T0014.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0014.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d23f9cc0-058e-5354-b2c6-90e7b6737922",
|
||
"value": "Raise Funds from Malign Actors"
|
||
},
|
||
{
|
||
"description": "Raising funds from ignorant agents may include scams, donations intended for one stated purpose but then used for another, etc.",
|
||
"meta": {
|
||
"external_id": "T0014.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0014.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "dc89eee0-bf5e-51f0-957d-0e9e8a2cceff",
|
||
"value": "Raise Funds from Ignorant Agents"
|
||
},
|
||
{
|
||
"description": "Create one or more hashtags and/or hashtag groups. Many incident-based campaigns will create hashtags to promote their fabricated event. Creating a hashtag for an incident can have two important effects: 1. Create a perception of reality around an event. Certainly only \"real\" events would be discussed in a hashtag. After all, the event has a name!, and 2. Publicise the story more widely through trending lists and search behaviour. Asset needed to direct/control/manage \"conversation\" connected to launching new incident/campaign with new hashtag for applicable social media sites).",
|
||
"meta": {
|
||
"external_id": "T0015",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0015.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "677c9e22-42b6-5b78-985c-e72936ae1b3f",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "6d3c1c71-746e-5e9d-9960-4845d712c899",
|
||
"value": "Create Hashtags and Search Artefacts"
|
||
},
|
||
{
|
||
"description": "Create attention grabbing headlines (outrage, doubt, humour) required to drive traffic & engagement. This is a key asset.",
|
||
"meta": {
|
||
"external_id": "T0016",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0016.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "c52274ce-09fe-5b50-b2f2-741be794da6e",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "d8b22aa2-cbb6-5a97-b9ad-32eaf3514768",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "4880efa6-1123-5703-9c44-9f0600670dd9",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "d6182931-a76a-59e6-901e-f63b9f6d2301",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e9380fb8-fc55-5ac7-94ab-2af4c13b6361",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "9570ebf8-f69b-5064-a627-a19cb429d0f5",
|
||
"value": "Create Clickbait"
|
||
},
|
||
{
|
||
"description": "Fundraising campaigns refer to an influence operation’s systematic effort to seek financial support for a charity, cause, or other enterprise using online activities that further promote operation information pathways while raising a profit. Many influence operations have engaged in crowdfunding services166 on platforms including Tipee, Patreon, and GoFundMe. An operation may use its previously prepared fundraising campaigns to promote operation messaging while raising money to support its activities.",
|
||
"meta": {
|
||
"external_id": "T0017",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0017.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "99575145-b81a-5c7d-8107-e2ad419b3e20",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "f3cd91e7-b21a-529f-82bd-dd3aa3c3106b",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "7f21fe4b-d314-5511-a9b1-0b9fcfee8b5e",
|
||
"value": "Conduct Fundraising"
|
||
},
|
||
{
|
||
"description": "An influence operation may Conduct Crowdfunding Campaigns on platforms such as GoFundMe, GiveSendGo, Tipeee, Patreon, etc.",
|
||
"meta": {
|
||
"external_id": "T0017.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0017.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "999145bb-914b-5f7e-b47e-8756af2f5484",
|
||
"value": "Conduct Crowdfunding Campaigns"
|
||
},
|
||
{
|
||
"description": "Create or fund advertisements targeted at specific populations",
|
||
"meta": {
|
||
"external_id": "T0018",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0018.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "14dad601-4ddd-5cfd-a48d-9b53212769ce",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "87208979-6982-53d5-ad0f-49cef659555c",
|
||
"value": "Purchase Targeted Advertisements"
|
||
},
|
||
{
|
||
"description": "Iteratively test incident performance (messages, content etc), e.g. A/B test headline/content enagagement metrics; website and/or funding campaign conversion rates",
|
||
"meta": {
|
||
"external_id": "T0020",
|
||
"kill_chain": [
|
||
"tactics:Conduct Pump Priming"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0020.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "fd9e1e82-0c60-5bf6-a9a2-4f29ca6d39ba",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "30be4903-350a-505c-9166-fa65b8894778",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "635f5592-0e2a-5f06-b164-c5af2ec9ef5e",
|
||
"value": "Trial Content"
|
||
},
|
||
{
|
||
"description": "\"Conspiracy narratives\" appeal to the human desire for explanatory order, by invoking the participation of poweful (often sinister) actors in pursuit of their own political goals. These narratives are especially appealing when an audience is low-information, marginalised or otherwise inclined to reject the prevailing explanation. Conspiracy narratives are an important component of the \"firehose of falsehoods\" model.",
|
||
"meta": {
|
||
"external_id": "T0022",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0022.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "90ec53d1-ed62-516d-b530-49cb3e9e851c",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "cbba0c79-9287-5ead-a70a-34f740b4325f",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "877c29b5-38ae-570a-93b3-9e4e70ec27ef",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "047898ad-129f-5d18-9dea-985e7f68becd",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "af04983d-8531-54df-a7c7-39e5ae6bb523",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "f1d52ce1-f431-5732-a071-215cb3306f3e",
|
||
"value": "Leverage Conspiracy Theory Narratives"
|
||
},
|
||
{
|
||
"description": "An influence operation may amplify an existing conspiracy theory narrative that aligns with its incident or campaign goals. By amplifying existing conspiracy theory narratives, operators can leverage the power of the existing communities that support and propagate those theories without needing to expend resources creating new narratives or building momentum and buy in around new narratives.",
|
||
"meta": {
|
||
"external_id": "T0022.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0022.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "f3c7a9c8-9196-5b2f-8d10-46ca31380987",
|
||
"value": "Amplify Existing Conspiracy Theory Narratives"
|
||
},
|
||
{
|
||
"description": "While this requires more resources than amplifying existing conspiracy theory narratives, an influence operation may develop original conspiracy theory narratives in order to achieve greater control and alignment over the narrative and their campaign goals. Prominent examples include the USSR's Operation INFEKTION disinformation campaign run by the KGB in the 1980s to plant the idea that the United States had invented HIV/AIDS as part of a biological weapons research project at Fort Detrick, Maryland. More recently, Fort Detrick featured prominently in a new conspiracy theory narratives around the origins of the COVID-19 outbreak and pandemic.",
|
||
"meta": {
|
||
"external_id": "T0022.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0022.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b90838cb-7124-5f07-9fa6-94f0b5b21343",
|
||
"value": "Develop Original Conspiracy Theory Narratives"
|
||
},
|
||
{
|
||
"description": "Change, twist, or exaggerate existing facts to construct a narrative that differs from reality. Examples: images and ideas can be distorted by being placed in an improper content",
|
||
"meta": {
|
||
"external_id": "T0023",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0023.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "1993a35d-d276-569b-ba66-66623f982dc4",
|
||
"value": "Distort Facts"
|
||
},
|
||
{
|
||
"description": "Reframing context refers to removing an event from its surrounding context to distort its intended meaning. Rather than deny that an event occurred, reframing context frames an event in a manner that may lead the target audience to draw a different conclusion about its intentions.",
|
||
"meta": {
|
||
"external_id": "T0023.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0023.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c887503d-e5f5-5f06-a92a-9e50ec908eb6",
|
||
"value": "Reframe Context"
|
||
},
|
||
{
|
||
"description": "An influence operation may edit open-source content, such as collaborative blogs or encyclopaedias, to promote its narratives on outlets with existing credibility and audiences. Editing open-source content may allow an operation to post content on platforms without dedicating resources to the creation and maintenance of its own assets.",
|
||
"meta": {
|
||
"external_id": "T0023.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0023.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b4984f13-619b-54a7-bf2c-acc5cdc01437",
|
||
"value": "Edit Open-Source Content"
|
||
},
|
||
{
|
||
"description": "Create fake online polls, or manipulate existing online polls. Data gathering tactic to target those who engage, and potentially their networks of friends/followers as well",
|
||
"meta": {
|
||
"external_id": "T0029",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0029.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "e23dbc10-0eca-5100-bf14-cf2db9db31b8",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "01b3516b-b8b1-5a56-ae24-5300cceb70f8",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "a64a6568-d047-55b9-a3ab-f77fb3c9ada3",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "e21e17e9-3834-59de-bc31-9e43b73c8973",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "0280f954-5654-5d23-8dcf-a1f5334f3e6a",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "d3216499-77fd-528e-8b65-7c3bded9adda",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "61aa4bb6-218c-5a10-9f1c-1a494f6871e7",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "1a8c4e8c-3543-5ab1-b4d0-939de9e7875f",
|
||
"value": "Online Polls"
|
||
},
|
||
{
|
||
"description": "Influencers are people on social media platforms who have large audiences. \n\nThreat Actors can try to trick Influencers such as celebrities, journalists, or local leaders who aren’t associated with their campaign into amplifying campaign content. This gives them access to the Influencer’s audience without having to go through the effort of building it themselves, and it helps legitimise their message by associating it with the Influencer, benefitting from their audience’s trust in them.",
|
||
"meta": {
|
||
"external_id": "T0039",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0039.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "47ad777e-029c-5afa-9c37-50c3d86f3d39",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "1744386c-0d46-54a8-a5b8-cba1bd7dc369",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "a1441814-0d69-5b19-9dae-64c61d7dfdbd",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "590350b9-2614-572b-825b-b2498ebf4c17",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "f29dff54-af05-55d1-a056-899007481493",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "53e8c51b-c178-5429-8cee-022c6741cc91",
|
||
"value": "Bait Influencer"
|
||
},
|
||
{
|
||
"description": "Campaigns often leverage tactical and informational asymmetries on the threat surface, as seen in the Distort and Deny strategies, and the \"firehose of misinformation\". Specifically, conspiracy theorists can be repeatedly wrong, but advocates of the truth need to be perfect. By constantly escalating demands for proof, propagandists can effectively leverage this asymmetry while also priming its future use, often with an even greater asymmetric advantage. The conspiracist is offered freer rein for a broader range of \"questions\" while the truth teller is burdened with higher and higher standards of proof.",
|
||
"meta": {
|
||
"external_id": "T0040",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0040.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "b942a92e-13d5-5726-92bd-4fb5531f381b",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "70218fb2-3d85-5714-b990-2d18e345e184",
|
||
"value": "Demand Insurmountable Proof"
|
||
},
|
||
{
|
||
"description": "Wrap lies or altered context/facts around truths. Influence campaigns pursue a variety of objectives with respect to target audiences, prominent among them: 1. undermine a narrative commonly referenced in the target audience; or 2. promote a narrative less common in the target audience, but preferred by the attacker. In both cases, the attacker is presented with a heavy lift. They must change the relative importance of various narratives in the interpretation of events, despite contrary tendencies. When messaging makes use of factual reporting to promote these adjustments in the narrative space, they are less likely to be dismissed out of hand; when messaging can juxtapose a (factual) truth about current affairs with the (abstract) truth explicated in these narratives, propagandists can undermine or promote them selectively. Context matters.",
|
||
"meta": {
|
||
"external_id": "T0042",
|
||
"kill_chain": [
|
||
"tactics:Conduct Pump Priming"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0042.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ab4b4b44-5f15-5c92-934b-30cc73f67afc",
|
||
"value": "Seed Kernel of Truth"
|
||
},
|
||
{
|
||
"description": "Direct messaging via chat app is an increasing method of delivery. These messages are often automated and new delivery and storage methods make them anonymous, viral, and ephemeral. This is a difficult space to monitor, but also a difficult space to build acclaim or notoriety.",
|
||
"meta": {
|
||
"external_id": "T0043",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0043.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "b8b49628-4c3d-528c-90f1-2fee8722e2c5",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "ebd0aab4-013c-52fa-bae5-8fb3bd7704b8",
|
||
"value": "Chat Apps"
|
||
},
|
||
{
|
||
"description": "Examples include Signal, WhatsApp, Discord, Wire, etc.",
|
||
"meta": {
|
||
"external_id": "T0043.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0043.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7308289b-5875-5015-bead-adf63a552c28",
|
||
"value": "Use Encrypted Chat Apps"
|
||
},
|
||
{
|
||
"description": "Examples include SMS, etc.",
|
||
"meta": {
|
||
"external_id": "T0043.002",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0043.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "211e93c2-463a-5271-9384-61a6b8ca4af6",
|
||
"value": "Use Unencrypted Chats Apps"
|
||
},
|
||
{
|
||
"description": "Try a wide variety of messages in the early hours surrounding an incident or event, to give a misleading account or impression.",
|
||
"meta": {
|
||
"external_id": "T0044",
|
||
"kill_chain": [
|
||
"tactics:Conduct Pump Priming"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0044.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "2ab07267-614d-538b-8fef-bc6ed250a483",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "3ce6759d-b144-5277-9798-2df09ce9a6c9",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "0fd25b71-ea11-51a3-bb18-545d5e818583",
|
||
"value": "Seed Distortions"
|
||
},
|
||
{
|
||
"description": "Use the fake experts that were set up during Establish Legitimacy. Pseudo-experts are disposable assets that often appear once and then disappear. Give \"credility\" to misinformation. Take advantage of credential bias",
|
||
"meta": {
|
||
"external_id": "T0045",
|
||
"kill_chain": [
|
||
"tactics:Conduct Pump Priming"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0045.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "a44e7e6c-f6ca-5ea4-9fe8-6dc798b2d729",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "2cd7227b-31be-5999-9f5f-927f9d8c3b6e",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "edc041f8-06ac-513a-a9f9-1353e38f3bcf",
|
||
"value": "Use Fake Experts"
|
||
},
|
||
{
|
||
"description": "Manipulate content engagement metrics (ie: Reddit & Twitter) to influence/impact news search results (e.g. Google), also elevates RT & Sputnik headline into Google news alert emails. aka \"Black-hat SEO\"",
|
||
"meta": {
|
||
"external_id": "T0046",
|
||
"kill_chain": [
|
||
"tactics:Conduct Pump Priming"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0046.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "5b9a74d0-bfa6-5bb9-9297-3938c067f783",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "68c3a917-fed7-539e-9cf6-091153658ef2",
|
||
"value": "Use Search Engine Optimisation"
|
||
},
|
||
{
|
||
"description": "Use political influence or the power of state to stop critical social media comments. Government requested/driven content take downs (see Google Transperancy reports).",
|
||
"meta": {
|
||
"external_id": "T0047",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0047.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "0eb2eaf4-0c53-527d-8744-70670f6a9736",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "deb56d12-fd4d-515a-9051-89a372d5d4bb",
|
||
"value": "Censor Social Media as a Political Force"
|
||
},
|
||
{
|
||
"description": "Threatening or harassing believers of opposing narratives refers to the use of intimidation techniques, including cyberbullying and doxing, to discourage opponents from voicing their dissent. An influence operation may threaten or harass believers of the opposing narratives to deter individuals from posting or proliferating conflicting content.",
|
||
"meta": {
|
||
"external_id": "T0048",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0048.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "cb33d6fe-0327-58c1-93ad-10684fe9e099",
|
||
"value": "Harass"
|
||
},
|
||
{
|
||
"description": "Cancel culture refers to the phenomenon in which individuals collectively refrain from supporting an individual, organisation, business, or other entity, usually following a real or falsified controversy. An influence operation may exploit cancel culture by emphasising an adversary’s problematic or disputed behaviour and presenting its own content as an alternative.",
|
||
"meta": {
|
||
"external_id": "T0048.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0048.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "65c98713-cae5-5ae4-ae17-5902d7d1cfc4",
|
||
"value": "Boycott/\"Cancel\" Opponents"
|
||
},
|
||
{
|
||
"description": "Examples include social identities like gender, sexuality, race, ethnicity, religion, ability, nationality, etc. as well as roles and occupations like journalist or activist.",
|
||
"meta": {
|
||
"external_id": "T0048.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0048.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a40e4177-42f2-5be2-89cf-1dd4eadaad13",
|
||
"value": "Harass People Based on Identities"
|
||
},
|
||
{
|
||
"description": "Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content.",
|
||
"meta": {
|
||
"external_id": "T0048.003",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0048.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c1df0074-7e66-5b71-85cb-784b1be15c48",
|
||
"value": "Threaten to Dox"
|
||
},
|
||
{
|
||
"description": "Doxing refers to online harassment in which individuals publicly release private information about another individual, including names, addresses, employment information, pictures, family members, and other sensitive information. An influence operation may dox its opposition to encourage individuals aligned with operation narratives to harass the doxed individuals themselves or otherwise discourage the doxed individuals from posting or proliferating conflicting content.",
|
||
"meta": {
|
||
"external_id": "T0048.004",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0048.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "18e83c9c-8e16-55e2-a013-63e583e79e8e",
|
||
"value": "Dox"
|
||
},
|
||
{
|
||
"description": "Flooding sources of information (e.g. Social Media feeds) with a high volume of inauthentic content.\n\nThis can be done to control/shape online conversations, drown out opposing points of view, or make it harder to find legitimate information. \n\nBots and/or patriotic trolls are effective tools to achieve this effect.\n\nThis Technique previously used the name Flooding the Information Space.",
|
||
"meta": {
|
||
"external_id": "T0049",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "ae4b53ba-9dd6-53af-a624-d5929944117c",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "1d6622ba-a713-5133-9017-8eef36469936",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "ea1d787b-61f7-5fd6-8c52-54a64006e260",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "8b20ca17-c2d9-5879-bbf1-26de876c8e02",
|
||
"type": "detected-by"
|
||
},
|
||
{
|
||
"dest-uuid": "568f9e72-ca8c-54dd-976f-f9469bf026c1",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "ee7bc41a-9eb0-5732-924a-3885e1c3bee9",
|
||
"value": "Flood Information Space"
|
||
},
|
||
{
|
||
"description": "Use trolls to amplify narratives and/or manipulate narratives. Fake profiles/sockpuppets operating to support individuals/narratives from the entire political spectrum (left/right binary). Operating with increased emphasis on promoting local content and promoting real Twitter users generating their own, often divisive political content, as it's easier to amplify existing content than create new/original content. Trolls operate where ever there's a socially divisive issue (issues that can/are be politicized).",
|
||
"meta": {
|
||
"external_id": "T0049.001",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b126047b-eafa-50aa-891a-31250d13f50e",
|
||
"value": "Trolls Amplify and Manipulate"
|
||
},
|
||
{
|
||
"description": "Hashtags can be used by communities to collate information they post about particular topics (such as their interests, or current events) and users can find communities to join by exploring hashtags they’re interested in. \n\nThreat actors can flood an existing hashtag to try to ruin hashtag functionality, posting content unrelated to the hashtag alongside it, making it a less reliable source of relevant information. They may also try to flood existing hashtags with campaign content, with the intent of maximising exposure to users.\n\nThis Technique covers cases where threat actors flood existing hashtags with campaign content.\n\nThis Technique covers behaviours previously documented by T0019.002: Hijack Hashtags, which has since been deprecated. This Technique was previously called Hijack Existing Hashtag.",
|
||
"meta": {
|
||
"external_id": "T0049.002",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "885e8687-3598-5378-b0bf-f09b67c1696e",
|
||
"value": "Flood Existing Hashtag"
|
||
},
|
||
{
|
||
"description": "Automated forwarding and reposting refer to the proliferation of operation content using automated means, such as artificial intelligence or social media bots. An influence operation may use automated activity to increase content exposure without dedicating the resources, including personnel and time, traditionally required to forward and repost content. Use bots to amplify narratives above algorithm thresholds. Bots are automated/programmed profiles designed to amplify content (ie: automatically retweet or like) and give appearance it's more \"popular\" than it is. They can operate as a network, to function in a coordinated/orchestrated manner. In some cases (more so now) they are an inexpensive/disposable assets used for minimal deployment as bot detection tools improve and platforms are more responsive.",
|
||
"meta": {
|
||
"external_id": "T0049.003",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "78bd9a95-4aa4-5595-90de-839c65ff6542",
|
||
"value": "Bots Amplify via Automated Forwarding and Reposting"
|
||
},
|
||
{
|
||
"description": "Spamoflauge refers to the practice of disguising spam messages as legitimate. Spam refers to the use of electronic messaging systems to send out unrequested or unwanted messages in bulk. Simple methods of spamoflauge include replacing letters with numbers to fool keyword-based email spam filters, for example, \"you've w0n our jackp0t!\". Spamoflauge may extend to more complex techniques such as modifying the grammar or word choice of the language, casting messages as images which spam detectors cannot automatically read, or encapsulating messages in password protected attachments, such as .pdf or .zip files. Influence operations may use spamoflauge to avoid spam filtering systems and increase the likelihood of the target audience receiving operation messaging.",
|
||
"meta": {
|
||
"external_id": "T0049.004",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "36635199-0794-5cba-b494-5b54ebd0ca73",
|
||
"value": "Utilise Spamoflauge"
|
||
},
|
||
{
|
||
"description": "Swarming refers to the coordinated use of accounts to overwhelm the information space with operation content. Unlike information flooding, swarming centres exclusively around a specific event or actor rather than a general narrative. Swarming relies on “horizontal communication” between information assets rather than a top-down, vertical command-and-control approach.",
|
||
"meta": {
|
||
"external_id": "T0049.005",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b25835fd-4936-580f-9e40-03728f38badf",
|
||
"value": "Conduct Swarming"
|
||
},
|
||
{
|
||
"description": "Keyword squatting refers to the creation of online content, such as websites, articles, or social media accounts, around a specific search engine-optimized term to overwhelm the search results of that term. An influence may keyword squat to increase content exposure to target audience members who query the exploited term in a search engine and manipulate the narrative around the term.",
|
||
"meta": {
|
||
"external_id": "T0049.006",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "864a3b1d-6a1f-50b0-adef-e46cc4a88933",
|
||
"value": "Conduct Keyword Squatting"
|
||
},
|
||
{
|
||
"description": "Inauthentic sites circulate cross-post stories and amplify narratives. Often these sites have no masthead, bylines or attribution.",
|
||
"meta": {
|
||
"external_id": "T0049.007",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.007.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d8a87575-9e25-5e93-8bf6-8489fe70b864",
|
||
"value": "Inauthentic Sites Amplify News and Narratives"
|
||
},
|
||
{
|
||
"description": "Information Pollution occurs when threat actors attempt to ruin a source of information by flooding it with lots of inauthentic or unreliable content, intending to make it harder for legitimate users to find the information they’re looking for. \n\nThis subtechnique's objective is to reduce exposure to target information, rather than promoting exposure to campaign content, for which the parent technique T0049 can be used. \n\nAnalysts will need to infer what the motive for flooding an information space was when deciding whether to use T0049 or T0049.008 to tag a case when an information space is flooded. If such inference is not possible, default to T0049.\n\nThis Technique previously used the ID T0019.",
|
||
"meta": {
|
||
"external_id": "T0049.008",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0049.008.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0bf3d2c3-db36-5175-99b0-6c82ad078937",
|
||
"value": "Generate Information Pollution"
|
||
},
|
||
{
|
||
"description": "Coordinate and promote real-world events across media platforms, e.g. rallies, protests, gatherings in support of incident narratives.",
|
||
"meta": {
|
||
"external_id": "T0057",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0057.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "a6b2f9bc-47fd-5303-8df8-0bb4ae4f4cbd",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "8e0b3604-c03c-5772-bccc-3a381ea6300a",
|
||
"type": "detected-by"
|
||
}
|
||
],
|
||
"uuid": "26c314bb-ed05-5dbe-b672-c16c2f0fff52",
|
||
"value": "Organise Events"
|
||
},
|
||
{
|
||
"description": "Paying for physical action occurs when an influence operation pays individuals to act in the physical realm. An influence operation may pay for physical action to create specific situations and frame them in a way that supports operation narratives, for example, paying a group of people to burn a car to later post an image of the burning car and frame it as an act of protest.",
|
||
"meta": {
|
||
"external_id": "T0057.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0057.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c4f3903c-0a5f-5764-ab76-a7d3a4ee0afb",
|
||
"value": "Pay for Physical Action"
|
||
},
|
||
{
|
||
"description": "Symbolic action refers to activities specifically intended to advance an operation’s narrative by signalling something to the audience, for example, a military parade supporting a state’s narrative of military superiority. An influence operation may use symbolic action to create falsified evidence supporting operation narratives in the physical information space.",
|
||
"meta": {
|
||
"external_id": "T0057.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0057.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "055b66cb-0745-5f85-83c9-d9fb8e1684a2",
|
||
"value": "Conduct Symbolic Action"
|
||
},
|
||
{
|
||
"description": "Play the long game refers to two phenomena: 1. To plan messaging and allow it to grow organically without conducting your own amplification. This is methodical and slow and requires years for the message to take hold 2. To develop a series of seemingly disconnected messaging narratives that eventually combine into a new narrative.",
|
||
"meta": {
|
||
"external_id": "T0059",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0059.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "2a8e8fa2-6ac4-5e0b-b1fb-818362987687",
|
||
"value": "Play the Long Game"
|
||
},
|
||
{
|
||
"description": "continue narrative or message amplification after the main incident work has finished",
|
||
"meta": {
|
||
"external_id": "T0060",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0060.md"
|
||
]
|
||
},
|
||
"related": [
|
||
{
|
||
"dest-uuid": "80730d54-1dd1-5da6-baae-052553fbf27f",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "7ef86cff-4401-518b-92fc-a0d88c23f280",
|
||
"type": "blocked-by"
|
||
},
|
||
{
|
||
"dest-uuid": "49f92a32-bac9-56af-ac97-3b09f23b8fa6",
|
||
"type": "blocked-by"
|
||
}
|
||
],
|
||
"uuid": "6eb04152-8342-563a-9b9c-1e73aae2cc24",
|
||
"value": "Continue to Amplify"
|
||
},
|
||
{
|
||
"description": "Sell mechandise refers to getting the message or narrative into physical space in the offline world while making money",
|
||
"meta": {
|
||
"external_id": "T0061",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0061.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e9208787-0c74-5517-bdd5-add8476beb6a",
|
||
"value": "Sell Merchandise"
|
||
},
|
||
{
|
||
"description": "Create or coopt broadcast capabilities (e.g. TV, radio etc).",
|
||
"meta": {
|
||
"external_id": "T0065",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0065.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8c763ea9-83ee-5ea6-91bb-5ab0dd981006",
|
||
"value": "Prepare Physical Broadcast Capabilities"
|
||
},
|
||
{
|
||
"description": "Plan to degrade an adversary’s image or ability to act. This could include preparation and use of harmful information about the adversary’s actions or reputation.",
|
||
"meta": {
|
||
"external_id": "T0066",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0066.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "30e32d3b-ece9-545b-b74f-82861e22c133",
|
||
"value": "Degrade Adversary"
|
||
},
|
||
{
|
||
"description": "Media attention on a story or event is heightened during a breaking news event, where unclear facts and incomplete information increase speculation, rumours, and conspiracy theories, which are all vulnerable to manipulation.",
|
||
"meta": {
|
||
"external_id": "T0068",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0068.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "df8d3fc5-efd3-54bf-baef-eaa6ec375f0f",
|
||
"value": "Respond to Breaking News Event or Active Crisis"
|
||
},
|
||
{
|
||
"description": "Create audience segmentations by features of interest to the influence campaign, including political affiliation, geographic location, income, demographics, and psychographics.",
|
||
"meta": {
|
||
"external_id": "T0072",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d9bbfde8-dda3-5f20-a9ed-fbf021ecd8c1",
|
||
"value": "Segment Audiences"
|
||
},
|
||
{
|
||
"description": "An influence operation may target populations in a specific geographic location, such as a region, state, or city. An influence operation may use geographic segmentation to Create Localised Content (see: Establish Legitimacy).",
|
||
"meta": {
|
||
"external_id": "T0072.001",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3c3edffe-de30-5b0c-8005-8916dd92eb1e",
|
||
"value": "Geographic Segmentation"
|
||
},
|
||
{
|
||
"description": "An influence operation may target populations based on demographic segmentation, including age, gender, and income. Demographic segmentation may be useful for influence operations aiming to change state policies that affect a specific population sector. For example, an influence operation attempting to influence Medicare funding in the United States would likely target U.S. voters over 65 years of age.",
|
||
"meta": {
|
||
"external_id": "T0072.002",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d19a9243-0fa0-5140-81c9-57442e8f7e25",
|
||
"value": "Demographic Segmentation"
|
||
},
|
||
{
|
||
"description": "An influence operation may target populations based on their income bracket, wealth, or other financial or economic division.",
|
||
"meta": {
|
||
"external_id": "T0072.003",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "163b9226-7923-527f-802f-8865450db2f5",
|
||
"value": "Economic Segmentation"
|
||
},
|
||
{
|
||
"description": "An influence operation may target populations based on psychographic segmentation, which uses audience values and decision-making processes. An operation may individually gather psychographic data with its own surveys or collection tools or externally purchase data from social media companies or online surveys, such as personality quizzes.",
|
||
"meta": {
|
||
"external_id": "T0072.004",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "474e292b-e866-5871-9ab6-395cc5aaa097",
|
||
"value": "Psychographic Segmentation"
|
||
},
|
||
{
|
||
"description": "An influence operation may target populations based on their political affiliations, especially when aiming to manipulate voting or change policy.",
|
||
"meta": {
|
||
"external_id": "T0072.005",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0072.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "77ab671a-d532-50b7-ac02-2008d331164f",
|
||
"value": "Political Segmentation"
|
||
},
|
||
{
|
||
"description": "Determining the target audiences (segments of the population) who will receive campaign narratives and artefacts intended to achieve the strategic ends.",
|
||
"meta": {
|
||
"external_id": "T0073",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0073.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "872a110b-66ad-5854-aae5-a9725d227a5c",
|
||
"value": "Determine Target Audiences"
|
||
},
|
||
{
|
||
"description": "These are the long-term end-states the campaign aims to bring about. They typically involve an advantageous position vis-a-vis competitors in terms of power or influence. The strategic goal may be to improve or simply to hold one’s position. Competition occurs in the public sphere in the domains of war, diplomacy, politics, economics, and ideology, and can play out between armed groups, nation-states, political parties, corporations, interest groups, or individuals. ",
|
||
"meta": {
|
||
"external_id": "T0074",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0074.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d88805d4-273a-50fb-a24a-63df92592e20",
|
||
"value": "Determine Strategic Ends"
|
||
},
|
||
{
|
||
"description": "Favourable position on the international stage in terms of great power politics or regional rivalry. Geopolitics plays out in the realms of foreign policy, national security, diplomacy, and intelligence. It involves nation-state governments, heads of state, foreign ministers, intergovernmental organisations, and regional security alliances.",
|
||
"meta": {
|
||
"external_id": "T0074.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0074.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3c362b89-6b61-5ea9-ba32-4873594ee92d",
|
||
"value": "Geopolitical Advantage"
|
||
},
|
||
{
|
||
"description": "Favourable position vis-à-vis national or sub-national political opponents such as political parties, interest groups, politicians, candidates. ",
|
||
"meta": {
|
||
"external_id": "T0074.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0074.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a3ba0d23-3e22-5fb3-a4fd-074ab5bdc05a",
|
||
"value": "Domestic Political Advantage"
|
||
},
|
||
{
|
||
"description": "Favourable position domestically or internationally in the realms of commerce, trade, finance, industry. Economics involves nation-states, corporations, banks, trade blocs, industry associations, cartels. ",
|
||
"meta": {
|
||
"external_id": "T0074.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0074.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9fc9578f-db6c-5505-ac66-dbdb6e887c6f",
|
||
"value": "Economic Advantage"
|
||
},
|
||
{
|
||
"description": "Favourable position domestically or internationally in the market for ideas, beliefs, and world views. Competition plays out among faith systems, political systems, and value systems. It can involve sub-national, national or supra-national movements. ",
|
||
"meta": {
|
||
"external_id": "T0074.004",
|
||
"kill_chain": [
|
||
"tactics:Plan Strategy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0074.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c3156aaf-caf1-5188-836c-d5742cfc89fa",
|
||
"value": "Ideological Advantage"
|
||
},
|
||
{
|
||
"description": "Push back against criticism by dismissing your critics. This might be arguing that the critics use a different standard for you than with other actors or themselves; or arguing that their criticism is biassed.",
|
||
"meta": {
|
||
"external_id": "T0075",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0075.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3c33a91e-af4c-545d-bf54-a15fab753a11",
|
||
"value": "Dismiss"
|
||
},
|
||
{
|
||
"description": "Plan to delegitimize the media landscape and degrade public trust in reporting, by discrediting credible sources. This makes it easier to promote influence operation content.",
|
||
"meta": {
|
||
"external_id": "T0075.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0075.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "be2a0989-a95f-5961-ba7d-0597078dca96",
|
||
"value": "Discredit Credible Sources"
|
||
},
|
||
{
|
||
"description": "Twist the narrative. Take information, or artefacts like images, and change the framing around them.",
|
||
"meta": {
|
||
"external_id": "T0076",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0076.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bd75892f-b84d-5b36-b2d9-34832832296b",
|
||
"value": "Distort"
|
||
},
|
||
{
|
||
"description": "Shift attention to a different narrative or actor, for instance by accusing critics of the same activity that they’ve accused you of (e.g. police brutality).",
|
||
"meta": {
|
||
"external_id": "T0077",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0077.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8c807754-1267-5662-99f4-02461410cb3d",
|
||
"value": "Distract"
|
||
},
|
||
{
|
||
"description": "Threaten the critic or narrator of events. For instance, threaten journalists or news outlets reporting on a story.",
|
||
"meta": {
|
||
"external_id": "T0078",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0078.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "28400a1a-58f1-51ee-9e96-2c763279b990",
|
||
"value": "Dismay"
|
||
},
|
||
{
|
||
"description": "Create conflict between subgroups, to widen divisions in a community",
|
||
"meta": {
|
||
"external_id": "T0079",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0079.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "45926a30-7c89-5c14-bf7b-86f8c9597d15",
|
||
"value": "Divide"
|
||
},
|
||
{
|
||
"description": "Mapping the target audience information environment analyses the information space itself, including social media analytics, web traffic, and media surveys. Mapping the information environment may help the influence operation determine the most realistic and popular information channels to reach its target audience. Mapping the target audience information environment aids influence operations in determining the most vulnerable areas of the information space to target with messaging.",
|
||
"meta": {
|
||
"external_id": "T0080",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "649af4be-031b-55db-ab45-d82b3cec27c2",
|
||
"value": "Map Target Audience Information Environment"
|
||
},
|
||
{
|
||
"description": "An influence operation may use social media analytics to determine which factors will increase the operation content’s exposure to its target audience on social media platforms, including views, interactions, and sentiment relating to topics and content types. The social media platform itself or a third-party tool may collect the metrics.",
|
||
"meta": {
|
||
"external_id": "T0080.001",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a20c76bd-0b45-53f6-8cc5-6bc8a17289cf",
|
||
"value": "Monitor Social Media Analytics"
|
||
},
|
||
{
|
||
"description": "An influence operation may evaluate its own or third-party media surveys to determine what type of content appeals to its target audience. Media surveys may provide insight into an audience’s political views, social class, general interests, or other indicators used to tailor operation messaging to its target audience.",
|
||
"meta": {
|
||
"external_id": "T0080.002",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "af0acad2-7020-56cb-9775-56f03bad5bcf",
|
||
"value": "Evaluate Media Surveys"
|
||
},
|
||
{
|
||
"description": "An influence operation may identify trending hashtags on social media platforms for later use in boosting operation content. A hashtag40 refers to a word or phrase preceded by the hash symbol (#) on social media used to identify messages and posts relating to a specific topic. All public posts that use the same hashtag are aggregated onto a centralised page dedicated to the word or phrase and sorted either chronologically or by popularity.",
|
||
"meta": {
|
||
"external_id": "T0080.003",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "269f9f9a-c8a4-5b68-8bf7-f09dd1dbd393",
|
||
"value": "Identify Trending Topics/Hashtags"
|
||
},
|
||
{
|
||
"description": "An influence operation may conduct web traffic analysis to determine which search engines, keywords, websites, and advertisements gain the most traction with its target audience.",
|
||
"meta": {
|
||
"external_id": "T0080.004",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8be163d6-9e22-5749-a11c-e1184ec64d33",
|
||
"value": "Conduct Web Traffic Analysis"
|
||
},
|
||
{
|
||
"description": "An influence operation may survey a target audience’s Internet availability and degree of media freedom to determine which target audience members will have access to operation content and on which platforms. An operation may face more difficulty targeting an information environment with heavy restrictions and media control than an environment with independent media, freedom of speech and of the press, and individual liberties.",
|
||
"meta": {
|
||
"external_id": "T0080.005",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0080.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e5b33222-ed53-5da5-9d12-778741c209e2",
|
||
"value": "Assess Degree/Type of Media Access"
|
||
},
|
||
{
|
||
"description": "Identifying social and technical vulnerabilities determines weaknesses within the target audience information environment for later exploitation. Vulnerabilities include decisive political issues, weak cybersecurity infrastructure, search engine data voids, and other technical and non technical weaknesses in the target information environment. Identifying social and technical vulnerabilities facilitates the later exploitation of the identified weaknesses to advance operation objectives.",
|
||
"meta": {
|
||
"external_id": "T0081",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6870e08f-8a82-592a-91be-71f732281a29",
|
||
"value": "Identify Social and Technical Vulnerabilities"
|
||
},
|
||
{
|
||
"description": "Find or plan to create areas (social media groups, search term groups, hashtag groups etc) where individuals only engage with people they agree with.",
|
||
"meta": {
|
||
"external_id": "T0081.001",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b6698222-4827-5b48-b0f4-b6d160cca97a",
|
||
"value": "Find Echo Chambers"
|
||
},
|
||
{
|
||
"description": "A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term.",
|
||
"meta": {
|
||
"external_id": "T0081.002",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ab5b0e25-01fa-5a41-9ad8-7445034cf952",
|
||
"value": "Identify Data Voids"
|
||
},
|
||
{
|
||
"description": "An influence operation may exploit existing racial, religious, demographic, or social prejudices to further polarise its target audience from the rest of the public.",
|
||
"meta": {
|
||
"external_id": "T0081.003",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0eefce18-09c4-513b-85a7-4441aa5df105",
|
||
"value": "Identify Existing Prejudices"
|
||
},
|
||
{
|
||
"description": "An influence operation may identify existing fissures to pit target populations against one another or facilitate a “divide-and-conquer\" approach to tailor operation narratives along the divides.",
|
||
"meta": {
|
||
"external_id": "T0081.004",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b3e586f5-98e3-556c-8d00-2d5be1482438",
|
||
"value": "Identify Existing Fissures"
|
||
},
|
||
{
|
||
"description": "An influence operation may assess preexisting conspiracy theories or suspicions in a population to identify existing narratives that support operational objectives.",
|
||
"meta": {
|
||
"external_id": "T0081.005",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "eb4cc97e-5620-5bf9-9b8b-1d6f5e00f81d",
|
||
"value": "Identify Existing Conspiracy Narratives/Suspicions"
|
||
},
|
||
{
|
||
"description": "A wedge issue is a divisive political issue, usually concerning a social phenomenon, that divides individuals along a defined line. An influence operation may exploit wedge issues by intentionally polarising the public along the wedge issue line and encouraging opposition between factions.",
|
||
"meta": {
|
||
"external_id": "T0081.006",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ac3f406b-c1dc-561a-ad27-c65c22a3a321",
|
||
"value": "Identify Wedge Issues"
|
||
},
|
||
{
|
||
"description": "An influence operation may identify or create a real or imaginary adversary to centre operation narratives against. A real adversary may include certain politicians or political parties while imaginary adversaries may include falsified “deep state”62 actors that, according to conspiracies, run the state behind public view.",
|
||
"meta": {
|
||
"external_id": "T0081.007",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.007.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "302d5e0a-375a-5fc6-a1da-0b33c9268af6",
|
||
"value": "Identify Target Audience Adversaries"
|
||
},
|
||
{
|
||
"description": "An influence operation may exploit existing weaknesses in a target’s media system. These weaknesses may include existing biases among media agencies, vulnerability to false news agencies on social media, or existing distrust of traditional media sources. An existing distrust among the public in the media system’s credibility holds high potential for exploitation by an influence operation when establishing alternative news agencies to spread operation content.",
|
||
"meta": {
|
||
"external_id": "T0081.008",
|
||
"kill_chain": [
|
||
"tactics:Target Audience Analysis"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0081.008.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "662f0d37-b90a-559f-8685-fa06a69be1cb",
|
||
"value": "Identify Media System Vulnerabilities"
|
||
},
|
||
{
|
||
"description": "Actors may develop new narratives to further strategic or tactical goals, especially when existing narratives adequately align with the campaign goals. New narratives provide more control in terms of crafting the message to achieve specific goals. However, new narratives may require more effort to disseminate than adapting or adopting existing narratives.",
|
||
"meta": {
|
||
"external_id": "T0082",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0082.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4896a448-be51-5423-89cd-efb6444b1c75",
|
||
"value": "Develop New Narratives"
|
||
},
|
||
{
|
||
"description": "An influence operation may seek to exploit the preexisting weaknesses, fears, and enemies of the target audience for integration into the operation’s narratives and overall strategy. Integrating existing vulnerabilities into the operational approach conserves resources by exploiting already weak areas of the target information environment instead of forcing the operation to create new vulnerabilities in the environment.",
|
||
"meta": {
|
||
"external_id": "T0083",
|
||
"kill_chain": [
|
||
"tactics:Develop Narratives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0083.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "f78a066b-d01b-5f14-8327-4e2856a187d2",
|
||
"value": "Integrate Target Audience Vulnerabilities into Narrative"
|
||
},
|
||
{
|
||
"description": "When an operation recycles content from its own previous operations or plagiarises from external operations. An operation may launder information to conserve resources that would have otherwise been utilised to develop new content.",
|
||
"meta": {
|
||
"external_id": "T0084",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0084.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7828596a-f1b5-563c-bd40-4a876b5cec58",
|
||
"value": "Reuse Existing Content"
|
||
},
|
||
{
|
||
"description": "Copypasta refers to a piece of text that has been copied and pasted multiple times across various online platforms. A copypasta’s final form may differ from its original source text as users add, delete, or otherwise edit the content as they repost the text.",
|
||
"meta": {
|
||
"external_id": "T0084.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0084.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "dba75e23-c7f8-504d-83a7-5771148e5951",
|
||
"value": "Use Copypasta"
|
||
},
|
||
{
|
||
"description": "An influence operation may take content from other sources without proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources.",
|
||
"meta": {
|
||
"external_id": "T0084.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0084.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "33787c2e-55c8-54a4-9d2d-541a35b5932e",
|
||
"value": "Plagiarise Content"
|
||
},
|
||
{
|
||
"description": "An influence operation may take authentic content from other sources and add deceptive labels or deceptively translate the content into other langauges.",
|
||
"meta": {
|
||
"external_id": "T0084.003",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0084.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a1f69093-a97c-561e-80ec-da8c93004205",
|
||
"value": "Deceptively Labelled or Translated"
|
||
},
|
||
{
|
||
"description": "An influence operation may take content from other sources with proper attribution. This content may be either misinformation content shared by others without malicious intent but now leveraged by the campaign as disinformation or disinformation content from other sources. Examples include the appropriation of content from one inauthentic news site to another inauthentic news site or network in ways that align with the originators licencing or terms of service.",
|
||
"meta": {
|
||
"external_id": "T0084.004",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0084.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "f941e002-c556-5621-a80e-c52a38c54bc9",
|
||
"value": "Appropriate Content"
|
||
},
|
||
{
|
||
"description": "Creating and editing false or misleading text-based artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign.",
|
||
"meta": {
|
||
"external_id": "T0085",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bff9c590-c655-5c15-ae4d-13d353a0d9a4",
|
||
"value": "Develop Text-Based Content"
|
||
},
|
||
{
|
||
"description": "AI-generated texts refers to synthetic text composed by computers using text-generating AI technology. Autonomous generation refers to content created by a bot without human input, also known as bot-created content generation. Autonomous generation represents the next step in automation after language generation and may lead to automated journalism. An influence operation may use read fakes or autonomous generation to quickly develop and distribute content to the target audience.",
|
||
"meta": {
|
||
"external_id": "T0085.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ed3754e6-bc15-5cf0-8a4b-8737b3814225",
|
||
"value": "Develop AI-Generated Text"
|
||
},
|
||
{
|
||
"description": "An influence operation may develop false or misleading news articles aligned to their campaign goals or narratives.",
|
||
"meta": {
|
||
"external_id": "T0085.003",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7bbdfe14-8294-54f7-9842-449f2db17a90",
|
||
"value": "Develop Inauthentic News Articles"
|
||
},
|
||
{
|
||
"description": "Produce text in the form of a document.",
|
||
"meta": {
|
||
"external_id": "T0085.004",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "5f8303e9-4956-589a-a4c6-6b929143f460",
|
||
"value": "Develop Document"
|
||
},
|
||
{
|
||
"description": "Produce text content in the form of a book. \n\nThis technique covers both e-books and physical books, however, the former is more easily deployed by threat actors given the lower cost to develop.",
|
||
"meta": {
|
||
"external_id": "T0085.005",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c363e714-6b46-5f44-8446-ab88fa5974e9",
|
||
"value": "Develop Book"
|
||
},
|
||
{
|
||
"description": "Opinion articles (aka “Op-Eds” or “Editorials”) are articles or regular columns flagged as “opinion” posted to news sources, and can be contributed by people outside the organisation. \n\nFlagging articles as opinions allow news organisations to distinguish them from the typical expectations of objective news reporting while distancing the presented opinion from the organisation or its employees.\n\nThe use of this technique is not by itself an indication of malicious or inauthentic content; Op-eds are a common format in media. However, threat actors exploit op-eds to, for example, submit opinion articles to local media to promote their narratives.\n\nExamples from the perspective of a news site involve publishing op-eds from perceived prestigious voices to give legitimacy to an inauthentic publication, or supporting causes by hosting op-eds from actors aligned with the organisation’s goals.",
|
||
"meta": {
|
||
"external_id": "T0085.006",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a3c5ef63-020b-5dd9-b8b1-303d6e0d2201",
|
||
"value": "Develop Opinion Article"
|
||
},
|
||
{
|
||
"description": "Create fake academic research. Example: fake social science research is often aimed at hot-button social issues such as gender, race and sexuality. Fake science research can target Climate Science debate or pseudoscience like anti-vaxx.\n\nThis Technique previously used the ID T0019.001",
|
||
"meta": {
|
||
"external_id": "T0085.007",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0085.007.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "130f70c4-5c39-5284-b604-b4711c6c41b8",
|
||
"value": "Create Fake Research"
|
||
},
|
||
{
|
||
"description": "Creating and editing false or misleading visual artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include photographing staged real-life situations, repurposing existing digital images, or using image creation and editing technologies.",
|
||
"meta": {
|
||
"external_id": "T0086",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9039269a-4975-52f8-92a8-f142978ffcef",
|
||
"value": "Develop Image-Based Content"
|
||
},
|
||
{
|
||
"description": "Memes are one of the most important single artefact types in all of computational propaganda. Memes in this framework denotes the narrow image-based definition. But that naming is no accident, as these items have most of the important properties of Dawkins' original conception as a self-replicating unit of culture. Memes pull together reference and commentary; image and narrative; emotion and message. Memes are a powerful tool and the heart of modern influence campaigns.",
|
||
"meta": {
|
||
"external_id": "T0086.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8c65e301-7dc0-5727-879b-288a643a992b",
|
||
"value": "Develop Memes"
|
||
},
|
||
{
|
||
"description": "Deepfakes refer to AI-generated falsified photos, videos, or soundbites. An influence operation may use deepfakes to depict an inauthentic situation by synthetically recreating an individual’s face, body, voice, and physical gestures.",
|
||
"meta": {
|
||
"external_id": "T0086.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0fa4f572-63c0-5a60-9e5e-2234e94f0ee6",
|
||
"value": "Develop AI-Generated Images (Deepfakes)"
|
||
},
|
||
{
|
||
"description": "Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event.",
|
||
"meta": {
|
||
"external_id": "T0086.003",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "69161c7b-a90f-5d96-a429-24a0d40d9973",
|
||
"value": "Deceptively Edit Images (Cheap Fakes)"
|
||
},
|
||
{
|
||
"description": "Image files that aggregate positive evidence (Joan Donovan)",
|
||
"meta": {
|
||
"external_id": "T0086.004",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0086.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b8a00aa5-9527-5128-a447-210d43bf11e2",
|
||
"value": "Aggregate Information into Evidence Collages"
|
||
},
|
||
{
|
||
"description": "Creating and editing false or misleading video artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include staging videos of purportedly real situations, repurposing existing video artefacts, or using AI-generated video creation and editing technologies (including deepfakes).",
|
||
"meta": {
|
||
"external_id": "T0087",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0087.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "97ef881f-9056-5390-8968-2b3d34d2cff8",
|
||
"value": "Develop Video-Based Content"
|
||
},
|
||
{
|
||
"description": "Deepfakes refer to AI-generated falsified photos, videos, or soundbites. An influence operation may use deepfakes to depict an inauthentic situation by synthetically recreating an individual’s face, body, voice, and physical gestures.",
|
||
"meta": {
|
||
"external_id": "T0087.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0087.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7a3328b8-0998-5bcd-9646-1e0f593802eb",
|
||
"value": "Develop AI-Generated Videos (Deepfakes)"
|
||
},
|
||
{
|
||
"description": "Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event.",
|
||
"meta": {
|
||
"external_id": "T0087.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0087.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "044465ed-375a-59b8-aece-347c73974cfb",
|
||
"value": "Deceptively Edit Video (Cheap Fakes)"
|
||
},
|
||
{
|
||
"description": "Creating and editing false or misleading audio artefacts, often aligned with one or more specific narratives, for use in a disinformation campaign. This may include creating completely new audio content, repurposing existing audio artefacts (including cheap fakes), or using AI-generated audio creation and editing technologies (including deepfakes).",
|
||
"meta": {
|
||
"external_id": "T0088",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0088.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "32f31f65-b210-57f8-a4e6-396d6f9676f0",
|
||
"value": "Develop Audio-Based Content"
|
||
},
|
||
{
|
||
"description": "Deepfakes refer to AI-generated falsified photos, videos, or soundbites. An influence operation may use deepfakes to depict an inauthentic situation by synthetically recreating an individual’s face, body, voice, and physical gestures.",
|
||
"meta": {
|
||
"external_id": "T0088.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0088.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "96c96c0a-1e24-5b80-a7c2-2f31767c5fc3",
|
||
"value": "Develop AI-Generated Audio (Deepfakes)"
|
||
},
|
||
{
|
||
"description": "Cheap fakes utilise less sophisticated measures of altering an image, video, or audio for example, slowing, speeding, or cutting footage to create a false context surrounding an image or event.",
|
||
"meta": {
|
||
"external_id": "T0088.002",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0088.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "482af0a0-50e3-57d6-99af-b8de290d1d00",
|
||
"value": "Deceptively Edit Audio (Cheap Fakes)"
|
||
},
|
||
{
|
||
"description": "Procuring documents that are not publicly available, by whatever means -- whether legal or illegal, highly-resourced or less so. These documents can include authentic non-public documents, authentic non-public documents have been altered, or inauthentic documents intended to appear as if they are authentic non-public documents. All of these types of documents can be \"leaked\" during later stages in the operation.",
|
||
"meta": {
|
||
"external_id": "T0089",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0089.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "31254ebe-90c8-5dc6-8ee2-2f27ceb732c3",
|
||
"value": "Obtain Private Documents"
|
||
},
|
||
{
|
||
"description": "Procure authentic documents that are not publicly available, by whatever means -- whether legal or illegal, highly-resourced or less so. These documents can be \"leaked\" during later stages in the operation.",
|
||
"meta": {
|
||
"external_id": "T0089.001",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0089.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0ac164e0-f9ea-55a6-ab2b-8d8710f30b1c",
|
||
"value": "Obtain Authentic Documents"
|
||
},
|
||
{
|
||
"description": "Alter authentic documents (public or non-public) to achieve campaign goals. The altered documents are intended to appear as if they are authentic and can be \"leaked\" during later stages in the operation.",
|
||
"meta": {
|
||
"external_id": "T0089.003",
|
||
"kill_chain": [
|
||
"tactics:Develop Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0089.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8214610e-69c5-509d-9b04-a393cdc586ec",
|
||
"value": "Alter Authentic Documents"
|
||
},
|
||
{
|
||
"description": "Inauthentic accounts include bot accounts, cyborg accounts, sockpuppet accounts, and anonymous accounts.",
|
||
"meta": {
|
||
"external_id": "T0090",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0090.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "fef2cb67-00a3-5141-88df-c3e6a2ae6d56",
|
||
"value": "Create Inauthentic Accounts"
|
||
},
|
||
{
|
||
"description": "Anonymous accounts or anonymous users refer to users that access network resources without providing a username or password. An influence operation may use anonymous accounts to spread content without direct attribution to the operation.",
|
||
"meta": {
|
||
"external_id": "T0090.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0090.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "f3927312-d6d3-5124-b831-5446c1fb5e6e",
|
||
"value": "Create Anonymous Accounts"
|
||
},
|
||
{
|
||
"description": "Cyborg accounts refer to partly manned, partly automated social media accounts. Cyborg accounts primarily act as bots, but a human operator periodically takes control of the account to engage with real social media users by responding to comments and posting original content. Influence operations may use cyborg accounts to reduce the amount of direct human input required to maintain a regular account but increase the apparent legitimacy of the cyborg account by occasionally breaking its bot-like behaviour with human interaction.",
|
||
"meta": {
|
||
"external_id": "T0090.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0090.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8fa7973f-e10d-5367-af06-76f9e0fc7fc7",
|
||
"value": "Create Cyborg Accounts"
|
||
},
|
||
{
|
||
"description": "Bots refer to autonomous internet users that interact with systems or other users while imitating traditional human behaviour. Bots use a variety of tools to stay active without direct human operation, including artificial intelligence and big data analytics. For example, an individual may programme a Twitter bot to retweet a tweet every time it contains a certain keyword or hashtag. An influence operation may use bots to increase its exposure and artificially promote its content across the internet without dedicating additional time or human resources. Amplifier bots promote operation content through reposts, shares, and likes to increase the content’s online popularity. Hacker bots are traditionally covert bots running on computer scripts that rarely engage with users and work primarily as agents of larger cyberattacks, such as a Distributed Denial of Service attacks. Spammer bots are programmed to post content on social media or in comment sections, usually as a supplementary tool. Impersonator bots102 pose as real people by mimicking human behaviour, complicating their detection.",
|
||
"meta": {
|
||
"external_id": "T0090.003",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0090.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "16b41179-d9f3-50ea-aedb-ed9e667d6249",
|
||
"value": "Create Bot Accounts"
|
||
},
|
||
{
|
||
"description": "Sockpuppet accounts refer to falsified accounts that either promote the influence operation’s own material or attack critics of the material online. Individuals who control sockpuppet accounts also man at least one other user account.67 Sockpuppet accounts help legitimise operation narratives by providing an appearance of external support for the material and discrediting opponents of the operation.",
|
||
"meta": {
|
||
"external_id": "T0090.004",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0090.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0e5ca353-ba01-5dec-95a4-19ca45cb7717",
|
||
"value": "Create Sockpuppet Accounts"
|
||
},
|
||
{
|
||
"description": "Operators recruit bad actors paying recruiting, or exerting control over individuals includes trolls, partisans, and contractors.",
|
||
"meta": {
|
||
"external_id": "T0091",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0091.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "981baf1f-f9ae-523b-a135-06b2b940e1ea",
|
||
"value": "Recruit Malign Actors"
|
||
},
|
||
{
|
||
"description": "Operators recruit paid contractor to support the campaign.",
|
||
"meta": {
|
||
"external_id": "T0091.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0091.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8278b8d9-e056-5d6d-827d-4752bb2d7833",
|
||
"value": "Recruit Contractors"
|
||
},
|
||
{
|
||
"description": "Operators recruit partisans (ideologically-aligned individuals) to support the campaign.",
|
||
"meta": {
|
||
"external_id": "T0091.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0091.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6c3ac844-a6fc-545d-9957-a1513949f639",
|
||
"value": "Recruit Partisans"
|
||
},
|
||
{
|
||
"description": "An influence operation may hire trolls, or human operators of fake accounts that aim to provoke others by posting and amplifying content about controversial issues. Trolls can serve to discredit an influence operation’s opposition or bring attention to the operation’s cause through debate. Classic trolls refer to regular people who troll for personal reasons, such as attention-seeking or boredom. Classic trolls may advance operation narratives by coincidence but are not directly affiliated with any larger operation. Conversely, hybrid trolls act on behalf of another institution, such as a state or financial organisation, and post content with a specific ideological goal. Hybrid trolls may be highly advanced and institutionalised or less organised and work for a single individual.",
|
||
"meta": {
|
||
"external_id": "T0091.003",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0091.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0ac30e0e-434d-510a-a2f8-1b330338134d",
|
||
"value": "Enlist Troll Accounts"
|
||
},
|
||
{
|
||
"description": "Operators build their own network, creating links between accounts -- whether authentic or inauthentic -- in order amplify and promote narratives and artefacts, and encourage further growth of ther network, as well as the ongoing sharing and engagement with operational content.",
|
||
"meta": {
|
||
"external_id": "T0092",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0092.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ef0c7e64-7702-5624-8318-d6f2d592433b",
|
||
"value": "Build Network"
|
||
},
|
||
{
|
||
"description": "Influence operations may establish organisations with legitimate or falsified hierarchies, staff, and content to structure operation assets, provide a sense of legitimacy to the operation, or provide institutional backing to operation activities.",
|
||
"meta": {
|
||
"external_id": "T0092.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0092.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bc78ce0a-1a9a-56b2-9e2d-77df7d14cf82",
|
||
"value": "Create Organisations"
|
||
},
|
||
{
|
||
"description": "A follow train is a group of people who follow each other on a social media platform, often as a way for an individual or campaign to grow its social media following. Follow trains may be a violation of platform Terms of Service. They are also known as follow-for-follow groups.",
|
||
"meta": {
|
||
"external_id": "T0092.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0092.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3d9be546-6fd4-5171-b418-f7dc7557f347",
|
||
"value": "Use Follow Trains"
|
||
},
|
||
{
|
||
"description": "When there is not an existing community or sub-group that meets a campaign's goals, an influence operation may seek to create a community or sub-group.",
|
||
"meta": {
|
||
"external_id": "T0092.003",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0092.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0462781b-c754-5d6a-8742-91cb02d81034",
|
||
"value": "Create Community or Sub-Group"
|
||
},
|
||
{
|
||
"description": "Operators acquire an existing network by paying, recruiting, or exerting control over the leaders of the existing network.",
|
||
"meta": {
|
||
"external_id": "T0093",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0093.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c1512f4a-9f4a-5b67-9f20-dbc40942d136",
|
||
"value": "Acquire/Recruit Network"
|
||
},
|
||
{
|
||
"description": "An influence operation may fund proxies, or external entities that work for the operation. An operation may recruit/train users with existing sympathies towards the operation’s narratives and/or goals as proxies. Funding proxies serves various purposes including: - Diversifying operation locations to complicate attribution - Reducing the workload for direct operation assets",
|
||
"meta": {
|
||
"external_id": "T0093.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0093.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "fb44dd38-07ef-5274-b3c9-c5e59afa1750",
|
||
"value": "Fund Proxies"
|
||
},
|
||
{
|
||
"description": "A botnet is a group of bots that can function in coordination with each other.",
|
||
"meta": {
|
||
"external_id": "T0093.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0093.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "750ed343-1ad9-5eb3-bbb4-08d680d47f53",
|
||
"value": "Acquire Botnets"
|
||
},
|
||
{
|
||
"description": "Operators deceptively insert social assets into existing networks as group members in order to influence the members of the network and the wider information environment that the network impacts.",
|
||
"meta": {
|
||
"external_id": "T0094",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0094.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bb12e908-0783-53cb-9b29-de4bc8786604",
|
||
"value": "Infiltrate Existing Networks"
|
||
},
|
||
{
|
||
"description": "When seeking to infiltrate an existing network, an influence operation may identify individuals and groups that might be susceptible to being co-opted or influenced.",
|
||
"meta": {
|
||
"external_id": "T0094.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0094.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "16aa2680-49bf-531c-a654-2e06dd852ac8",
|
||
"value": "Identify Susceptible Targets in Networks"
|
||
},
|
||
{
|
||
"description": "Butterfly attacks occur when operators pretend to be members of a certain social group, usually a group that struggles for representation. An influence operation may mimic a group to insert controversial statements into the discourse, encourage the spread of operation content, or promote harassment among group members. Unlike astroturfing, butterfly attacks aim to infiltrate and discredit existing grassroots movements, organisations, and media campaigns.",
|
||
"meta": {
|
||
"external_id": "T0094.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0094.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9748df5d-c55d-5f30-80c9-670bdf312ecd",
|
||
"value": "Utilise Butterfly Attacks"
|
||
},
|
||
{
|
||
"description": "An owned media asset refers to an agency or organisation through which an influence operation may create, develop, and host content and narratives. Owned media assets include websites, blogs, social media pages, forums, and other platforms that facilitate the creation and organisation of content.",
|
||
"meta": {
|
||
"external_id": "T0095",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0095.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9aff2d75-3898-56bc-b5ae-2d3566ab8de2",
|
||
"value": "Develop Owned Media Assets"
|
||
},
|
||
{
|
||
"description": "Using the services of large-scale content providers for creating and amplifying campaign artefacts at scale.",
|
||
"meta": {
|
||
"external_id": "T0096",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0096.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "845f886a-80e7-587a-a8c2-1473488d290e",
|
||
"value": "Leverage Content Farms"
|
||
},
|
||
{
|
||
"description": "An influence operation may create an organisation for creating and amplifying campaign artefacts at scale.",
|
||
"meta": {
|
||
"external_id": "T0096.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0096.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c07d2615-36a0-52cc-8cbb-84442420df07",
|
||
"value": "Create Content Farms"
|
||
},
|
||
{
|
||
"description": "An influence operation may outsource content creation to external companies to avoid attribution, increase the rate of content creation, or improve content quality, i.e., by employing an organisation that can create content in the target audience’s native language. Employed organisations may include marketing companies for tailored advertisements or external content farms for high volumes of targeted media.",
|
||
"meta": {
|
||
"external_id": "T0096.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0096.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ccbc4898-76ec-5bc3-a0d2-39473fb20c2f",
|
||
"value": "Outsource Content Creation to External Organisations"
|
||
},
|
||
{
|
||
"description": "Creating fake people, often with accounts across multiple platforms. These personas can be as simple as a name, can contain slightly more background like location, profile pictures, backstory, or can be effectively backstopped with indicators like fake identity documents.",
|
||
"meta": {
|
||
"external_id": "T0097",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0097.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7f984091-41b3-5e8f-b723-1d5eb9150d1d",
|
||
"value": "Create Personas"
|
||
},
|
||
{
|
||
"description": "People may produce evidence which supports the persona they are deploying (T0097) (aka “backstopping” the persona).\n\nThis Technique covers situations where evidence is developed or produced as part of an influence operation to increase the perceived legitimacy of a persona used during IO, including creating accounts for the same persona on multiple platforms.\n\nThe use of personas (T0097), and providing evidence to improve people’s perception of one’s persona (T0097.001), are not necessarily malicious or inauthentic. However, sometimes people use personas to increase the perceived legitimacy of narratives for malicious purposes.\n\nThis Technique was previously called Backstop Personas.",
|
||
"meta": {
|
||
"external_id": "T0097.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0097.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "2341584c-3ca5-5d2e-85f8-2b9c4da81268",
|
||
"value": "Produce Evidence for Persona"
|
||
},
|
||
{
|
||
"description": "Modern computational propaganda makes use of a cadre of imposter news sites spreading globally. These sites, sometimes motivated by concerns other than propaganda--for instance, click-based revenue--often have some superficial markers of authenticity, such as naming and site-design. But many can be quickly exposed with reference to their owenership, reporting history and adverstising details.",
|
||
"meta": {
|
||
"external_id": "T0098",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0098.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "abaff1d4-e7b1-597b-bb22-556f54a9602c",
|
||
"value": "Establish Inauthentic News Sites"
|
||
},
|
||
{
|
||
"description": "Create Inauthentic News Sites",
|
||
"meta": {
|
||
"external_id": "T0098.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0098.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b9dceeab-f5d8-50ae-ad8a-365d77fc4a3d",
|
||
"value": "Create Inauthentic News Sites"
|
||
},
|
||
{
|
||
"description": "Leverage Existing Inauthentic News Sites",
|
||
"meta": {
|
||
"external_id": "T0098.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0098.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "51648b8d-6019-5545-a67b-e2e1e4b901a2",
|
||
"value": "Leverage Existing Inauthentic News Sites"
|
||
},
|
||
{
|
||
"description": "An influence operation may prepare assets impersonating existing entities (both organisations and people) to further conceal its network identity and add a layer of legitimacy to its operation content. Existing entities may include authentic news outlets, public figures, organisations, or state entities. \n\nUsers will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. \n\nAn influence operation may use a wide variety of cyber techniques to impersonate a legitimate entity’s website or social media account. \n\nThis Technique was previously called Prepare Assets Impersonating Legitimate Entities.",
|
||
"meta": {
|
||
"external_id": "T0099",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9758be4b-0f4d-5438-bc2a-567bffb8cd57",
|
||
"value": "Impersonate Existing Entity"
|
||
},
|
||
{
|
||
"description": "An influence operation may prepare assets impersonating legitimate entities to further conceal its network identity and add a layer of legitimacy to its operation content. Users will more likely believe and less likely fact-check news from recognisable sources rather than unknown sites. Legitimate entities may include authentic news outlets, public figures, organisations, or state entities.",
|
||
"meta": {
|
||
"external_id": "T0099.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8eab0457-f145-56f7-aac6-d46ec8225570",
|
||
"value": "Spoof/Parody Account/Site"
|
||
},
|
||
{
|
||
"description": "A situation where a threat actor styles their online assets or content to mimic an existing organisation.\n\nThis can be done to take advantage of peoples’ trust in the organisation to increase narrative believability, to smear the organisation, or to make the organisation less trustworthy.",
|
||
"meta": {
|
||
"external_id": "T0099.003",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "87a87abc-4860-51e5-a3cb-527d763dd7b1",
|
||
"value": "Impersonate Existing Organisation"
|
||
},
|
||
{
|
||
"description": "A situation where a threat actor styles their online assets or content to mimic an existing media outlet.\n\nThis can be done to take advantage of peoples’ trust in the outlet to increase narrative believability, to smear the outlet, or to make the outlet less trustworthy.",
|
||
"meta": {
|
||
"external_id": "T0099.004",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6d757126-920d-5bd3-8eeb-c555e9f6482e",
|
||
"value": "Impersonate Existing Media Outlet"
|
||
},
|
||
{
|
||
"description": "A situation where a threat actor styles their online assets or content to impersonate an official (including government officials, organisation officials, etc).",
|
||
"meta": {
|
||
"external_id": "T0099.005",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "90a440e1-5618-5406-9ce3-2e61cf6c5e77",
|
||
"value": "Impersonate Existing Official"
|
||
},
|
||
{
|
||
"description": "A situation where a threat actor styles their online assets or content to impersonate an influencer or celebrity, typically to exploit users’ existing faith in the impersonated target.",
|
||
"meta": {
|
||
"external_id": "T0099.006",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0099.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c2714def-dd7a-5091-818a-0c219af8135f",
|
||
"value": "Impersonate Existing Influencer"
|
||
},
|
||
{
|
||
"description": "An influence operation may co-opt trusted sources by infiltrating or repurposing a source to reach a target audience through existing, previously reliable networks. Co-opted trusted sources may include: - National or local new outlets - Research or academic publications - Online blogs or websites",
|
||
"meta": {
|
||
"external_id": "T0100",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0100.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "052ea05b-d892-5987-8017-0efad3d88a27",
|
||
"value": "Co-Opt Trusted Sources"
|
||
},
|
||
{
|
||
"description": "Co-Opt Trusted Individuals",
|
||
"meta": {
|
||
"external_id": "T0100.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0100.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8592f95a-a576-5c9f-8f62-66089345255a",
|
||
"value": "Co-Opt Trusted Individuals"
|
||
},
|
||
{
|
||
"description": "Co-Opt Grassroots Groups",
|
||
"meta": {
|
||
"external_id": "T0100.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0100.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8b9308aa-c65d-5e00-bb60-f93873611283",
|
||
"value": "Co-Opt Grassroots Groups"
|
||
},
|
||
{
|
||
"description": "Co-opt Influencers",
|
||
"meta": {
|
||
"external_id": "T0100.003",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0100.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7e763150-56e9-50e0-a180-3faf14734574",
|
||
"value": "Co-Opt Influencers"
|
||
},
|
||
{
|
||
"description": "Localised content refers to content that appeals to a specific community of individuals, often in defined geographic areas. An operation may create localised content using local language and dialects to resonate with its target audience and blend in with other local news and social media. Localised content may help an operation increase legitimacy, avoid detection, and complicate external attribution.",
|
||
"meta": {
|
||
"external_id": "T0101",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0101.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a2355290-e41e-5210-b03c-6ef88d4b61c2",
|
||
"value": "Create Localised Content"
|
||
},
|
||
{
|
||
"description": "An echo chamber refers to an internet subgroup, often along ideological lines, where individuals only engage with “others with which they are already in agreement.” A filter bubble refers to an algorithm's placement of an individual in content that they agree with or regularly engage with, possibly entrapping the user into a bubble of their own making. An operation may create these isolated areas of the internet by match existing groups, or aggregating individuals into a single target audience based on shared interests, politics, values, demographics, and other characteristics. Echo chambers and filter bubbles help to reinforce similar biases and content to the same target audience members.",
|
||
"meta": {
|
||
"external_id": "T0102",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0102.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d4e6d8d6-125c-58cf-924f-960e17a795bf",
|
||
"value": "Leverage Echo Chambers/Filter Bubbles"
|
||
},
|
||
{
|
||
"description": "Use existing Echo Chambers/Filter Bubbles",
|
||
"meta": {
|
||
"external_id": "T0102.001",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0102.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bfa744ce-4cbb-5cc3-9cb5-406783d5d5d9",
|
||
"value": "Use Existing Echo Chambers/Filter Bubbles"
|
||
},
|
||
{
|
||
"description": "Create Echo Chambers/Filter Bubbles",
|
||
"meta": {
|
||
"external_id": "T0102.002",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0102.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "1a8c5e95-d053-5cf1-98c9-7e33b04708ab",
|
||
"value": "Create Echo Chambers/Filter Bubbles"
|
||
},
|
||
{
|
||
"description": "A data void refers to a word or phrase that results in little, manipulative, or low-quality search engine data. Data voids are hard to detect and relatively harmless until exploited by an entity aiming to quickly proliferate false or misleading information during a phenomenon that causes a high number of individuals to query the term or phrase. In the Plan phase, an influence operation may identify data voids for later exploitation in the operation. A 2019 report by Michael Golebiewski identifies five types of data voids. (1) “Breaking news” data voids occur when a keyword gains popularity during a short period of time, allowing an influence operation to publish false content before legitimate news outlets have an opportunity to publish relevant information. (2) An influence operation may create a “strategic new terms” data void by creating their own terms and publishing information online before promoting their keyword to the target audience. (3) An influence operation may publish content on “outdated terms” that have decreased in popularity, capitalising on most search engines’ preferences for recency. (4) “Fragmented concepts” data voids separate connections between similar ideas, isolating segment queries to distinct search engine results. (5) An influence operation may use “problematic queries” that previously resulted in disturbing or inappropriate content to promote messaging until mainstream media recontextualizes the term.",
|
||
"meta": {
|
||
"external_id": "T0102.003",
|
||
"kill_chain": [
|
||
"tactics:Microtarget"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0102.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "62a656a7-9e5f-58e3-b563-9396006fadc3",
|
||
"value": "Exploit Data Voids"
|
||
},
|
||
{
|
||
"description": "A livestream refers to an online broadcast capability that allows for real-time communication to closed or open networks.",
|
||
"meta": {
|
||
"external_id": "T0103",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0103.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "aead2978-a869-5fc7-96f6-f9c55baf2e09",
|
||
"value": "Livestream"
|
||
},
|
||
{
|
||
"description": "A video livestream refers to an online video broadcast capability that allows for real-time communication to closed or open networks.",
|
||
"meta": {
|
||
"external_id": "T0103.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0103.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b8200b83-54c4-5448-86a8-08fa1223b470",
|
||
"value": "Video Livestream"
|
||
},
|
||
{
|
||
"description": "An audio livestream refers to an online audio broadcast capability that allows for real-time communication to closed or open networks.",
|
||
"meta": {
|
||
"external_id": "T0103.002",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0103.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "880869e4-2576-5a33-bea0-f35bb71fcdc0",
|
||
"value": "Audio Livestream"
|
||
},
|
||
{
|
||
"description": "Social media are interactive digital channels that facilitate the creation and sharing of information, ideas, interests, and other forms of expression through virtual communities and networks.",
|
||
"meta": {
|
||
"external_id": "T0104",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "012be2cf-7aed-5ac4-8fb5-ad7ffff73ea0",
|
||
"value": "Social Networks"
|
||
},
|
||
{
|
||
"description": "Examples include Facebook, Twitter, LinkedIn, etc.",
|
||
"meta": {
|
||
"external_id": "T0104.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "79364323-1d9e-5e29-8bd8-d0bc7bf32f30",
|
||
"value": "Mainstream Social Networks"
|
||
},
|
||
{
|
||
"description": "“Dating App” refers to any platform (or platform feature) in which the ostensive purpose is for users to develop a physical/romantic relationship with other users.\n\nThreat Actors can exploit users’ quest for love to trick them into doing things like revealing sensitive information or giving them money.\n\nExamples include Tinder, Bumble, Grindr, Facebook Dating, Tantan, Badoo, Plenty of Fish, hinge, LOVOO, OkCupid, happn, and Mamba.",
|
||
"meta": {
|
||
"external_id": "T0104.002",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "96b1a88b-ea2d-51ad-a473-1669e956d387",
|
||
"value": "Dating App"
|
||
},
|
||
{
|
||
"description": "Social networks that are not open to people outside of family, friends, neighbours, or co-workers. Non-work-related examples include Couple, FamilyWall, 23snaps, and Nextdoor. Some of the larger social network platforms enable closed communities: examples are Instagram Close Friends and Twitter (X) Circle. Work-related examples of private social networks include LinkedIn, Facebook Workplace, and enterprise communication platforms such as Slack or Microsoft Teams.",
|
||
"meta": {
|
||
"external_id": "T0104.003",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ebcad87c-1217-5d90-8f6f-43d078a3d461",
|
||
"value": "Private/Closed Social Networks"
|
||
},
|
||
{
|
||
"description": "Examples include smaller and niche networks including Gettr, Truth Social, Parler, etc.",
|
||
"meta": {
|
||
"external_id": "T0104.004",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7f80d0ec-c3d9-501f-9688-780ed4fa3720",
|
||
"value": "Interest-Based Networks"
|
||
},
|
||
{
|
||
"description": "Use a dedicated, existing hashtag for the campaign/incident.",
|
||
"meta": {
|
||
"external_id": "T0104.005",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6e852d19-6582-5713-bdf0-18a68ee50bd8",
|
||
"value": "Use Hashtags"
|
||
},
|
||
{
|
||
"description": "Create a campaign/incident specific hashtag.",
|
||
"meta": {
|
||
"external_id": "T0104.006",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0104.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "732d47a6-ba6a-56d4-828c-6e6612d9c95d",
|
||
"value": "Create Dedicated Hashtag"
|
||
},
|
||
{
|
||
"description": "Media sharing networks refer to services whose primary function is the hosting and sharing of specific forms of media. Examples include Instagram, Snapchat, TikTok, Youtube, SoundCloud.",
|
||
"meta": {
|
||
"external_id": "T0105",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0105.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d201dc16-622a-5da2-b82a-9924607f2e24",
|
||
"value": "Media Sharing Networks"
|
||
},
|
||
{
|
||
"description": "Examples include Instagram, Snapchat, Flickr, etc",
|
||
"meta": {
|
||
"external_id": "T0105.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0105.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "727b8c48-8a62-5804-a1af-fd0b6ec71699",
|
||
"value": "Photo Sharing"
|
||
},
|
||
{
|
||
"description": "Examples include Youtube, TikTok, ShareChat, Rumble, etc",
|
||
"meta": {
|
||
"external_id": "T0105.002",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0105.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "84e96b27-ea09-5a88-9ad7-d6420cc06ee8",
|
||
"value": "Video Sharing"
|
||
},
|
||
{
|
||
"description": "Examples include podcasting apps, Soundcloud, etc.",
|
||
"meta": {
|
||
"external_id": "T0105.003",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0105.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0f5bce10-d1d9-5270-9b54-0214e2353724",
|
||
"value": "Audio Sharing"
|
||
},
|
||
{
|
||
"description": "Platforms for finding, discussing, and sharing information and opinions. Examples include Reddit, Quora, Digg, message boards, interest-based discussion forums, etc.",
|
||
"meta": {
|
||
"external_id": "T0106",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0106.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "1f4ef9c4-e3f3-5981-a4c9-9aed559323d0",
|
||
"value": "Discussion Forums"
|
||
},
|
||
{
|
||
"description": "Examples include the Chans",
|
||
"meta": {
|
||
"external_id": "T0106.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0106.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "12fb075d-f148-5eab-ae24-94799f055750",
|
||
"value": "Anonymous Message Boards"
|
||
},
|
||
{
|
||
"description": "Platforms for searching, sharing, and curating content and media. Examples include Pinterest, Flipboard, etc.",
|
||
"meta": {
|
||
"external_id": "T0107",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0107.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "cc4df2aa-7a91-53a3-816f-c1d9340801ea",
|
||
"value": "Bookmarking and Content Curation"
|
||
},
|
||
{
|
||
"description": "Examples include WordPress, Blogger, Weebly, Tumblr, Medium, etc.",
|
||
"meta": {
|
||
"external_id": "T0108",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0108.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "274821cc-3f7a-5785-8712-0f46a5e2903b",
|
||
"value": "Blogging and Publishing Networks"
|
||
},
|
||
{
|
||
"description": "Platforms for finding, reviewing, and sharing information about brands, products, services, restaurants, travel destinations, etc. Examples include Yelp, TripAdvisor, etc.",
|
||
"meta": {
|
||
"external_id": "T0109",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0109.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "64d83292-f532-5aca-b76e-69e4741d4a6e",
|
||
"value": "Consumer Review Networks"
|
||
},
|
||
{
|
||
"description": "Leveraging formal, traditional, diplomatic channels to communicate with foreign governments (written documents, meetings, summits, diplomatic visits, etc). This type of diplomacy is conducted by diplomats of one nation with diplomats and other officials of another nation or international organisation.",
|
||
"meta": {
|
||
"external_id": "T0110",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0110.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "46aedae0-4850-5af6-8db4-ad5665ecd2a4",
|
||
"value": "Formal Diplomatic Channels"
|
||
},
|
||
{
|
||
"description": "Examples include TV, Newspaper, Radio, etc.",
|
||
"meta": {
|
||
"external_id": "T0111",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0111.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "5cb9a5f0-e6a6-57e8-9cc4-262c807281fa",
|
||
"value": "Traditional Media"
|
||
},
|
||
{
|
||
"description": "TV",
|
||
"meta": {
|
||
"external_id": "T0111.001",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0111.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7c5bb87d-d038-5a46-9069-6cb8d01a19e7",
|
||
"value": "TV"
|
||
},
|
||
{
|
||
"description": "Newspaper",
|
||
"meta": {
|
||
"external_id": "T0111.002",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0111.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "21fa5ba1-9782-5cad-8903-7abb955ed9b1",
|
||
"value": "Newspaper"
|
||
},
|
||
{
|
||
"description": "Radio",
|
||
"meta": {
|
||
"external_id": "T0111.003",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0111.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6d83b061-da10-5693-837c-960285176c0b",
|
||
"value": "Radio"
|
||
},
|
||
{
|
||
"description": "Delivering content and narratives via email. This can include using list management or high-value individually targeted messaging.",
|
||
"meta": {
|
||
"external_id": "T0112",
|
||
"kill_chain": [
|
||
"tactics:Select Channels and Affordances"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0112.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "32ec2894-3a89-5b14-be34-77289f1106ca",
|
||
"value": "Email"
|
||
},
|
||
{
|
||
"description": "Commercial analytic firms collect data on target audience activities and evaluate the data to detect trends, such as content receiving high click-rates. An influence operation may employ commercial analytic firms to facilitate external collection on its target audience, complicating attribution efforts and better tailoring the content to audience preferences.",
|
||
"meta": {
|
||
"external_id": "T0113",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0113.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d6a72ed4-28f9-5736-b8a6-459679026513",
|
||
"value": "Employ Commercial Analytic Firms"
|
||
},
|
||
{
|
||
"description": "Delivering content via any form of paid media or advertising.",
|
||
"meta": {
|
||
"external_id": "T0114",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0114.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "51639828-5e65-5f32-9858-7020166d26dd",
|
||
"value": "Deliver Ads"
|
||
},
|
||
{
|
||
"description": "Social Media",
|
||
"meta": {
|
||
"external_id": "T0114.001",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0114.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "9c655aa6-1474-5ab9-8eff-519df00fe41b",
|
||
"value": "Social Media"
|
||
},
|
||
{
|
||
"description": "Delivering content by posting via owned media (assets that the operator controls).",
|
||
"meta": {
|
||
"external_id": "T0115",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0115.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e41d7f0f-d913-5973-b8a3-385b39e78ebd",
|
||
"value": "Post Content"
|
||
},
|
||
{
|
||
"description": "Memes are one of the most important single artefact types in all of computational propaganda. Memes in this framework denotes the narrow image-based definition. But that naming is no accident, as these items have most of the important properties of Dawkins' original conception as a self-replicating unit of culture. Memes pull together reference and commentary; image and narrative; emotion and message. Memes are a powerful tool and the heart of modern influence campaigns.",
|
||
"meta": {
|
||
"external_id": "T0115.001",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0115.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "986815f4-a31d-57bd-8782-9039044af3af",
|
||
"value": "Share Memes"
|
||
},
|
||
{
|
||
"description": "Post Violative Content to Provoke Takedown and Backlash.",
|
||
"meta": {
|
||
"external_id": "T0115.002",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0115.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "70a3dd8d-c492-5b80-a77c-21f05a72a8c4",
|
||
"value": "Post Violative Content to Provoke Takedown and Backlash"
|
||
},
|
||
{
|
||
"description": "Direct posting refers to a method of posting content via a one-way messaging service, where the recipient cannot directly respond to the poster’s messaging. An influence operation may post directly to promote operation narratives to the target audience without allowing opportunities for fact-checking or disagreement, creating a false sense of support for the narrative.",
|
||
"meta": {
|
||
"external_id": "T0115.003",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0115.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "344ef4f6-8020-5493-871e-b7015d53bfae",
|
||
"value": "One-Way Direct Posting"
|
||
},
|
||
{
|
||
"description": "Delivering content by replying or commenting via owned media (assets that the operator controls).",
|
||
"meta": {
|
||
"external_id": "T0116",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0116.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "df724dcc-0d26-5c3b-aec1-b3c82f509f07",
|
||
"value": "Comment or Reply on Content"
|
||
},
|
||
{
|
||
"description": "Use government-paid social media commenters, astroturfers, chat bots (programmed to reply to specific key words/hashtags) influence online conversations, product reviews, web-site comment forums.",
|
||
"meta": {
|
||
"external_id": "T0116.001",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0116.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c5d17eaa-9f30-5b38-a54a-ddc853981e53",
|
||
"value": "Post Inauthentic Social Media Comment"
|
||
},
|
||
{
|
||
"description": "Deliver content by attracting the attention of traditional media (earned media).",
|
||
"meta": {
|
||
"external_id": "T0117",
|
||
"kill_chain": [
|
||
"tactics:Deliver Content"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0117.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "40c341c1-873c-5cbe-bac6-eaeed322d74e",
|
||
"value": "Attract Traditional Media"
|
||
},
|
||
{
|
||
"description": "An influence operation may amplify existing narratives that align with its narratives to support operation objectives.",
|
||
"meta": {
|
||
"external_id": "T0118",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0118.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "69fe11a4-89b8-5c78-8872-7f7bc7a870f1",
|
||
"value": "Amplify Existing Narrative"
|
||
},
|
||
{
|
||
"description": "Cross-posting refers to posting the same message to multiple internet discussions, social media platforms or accounts, or news groups at one time. An influence operation may post content online in multiple communities and platforms to increase the chances of content exposure to the target audience.",
|
||
"meta": {
|
||
"external_id": "T0119",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0119.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "324248a7-3a0c-5689-8f0e-770d6d6f2dd7",
|
||
"value": "Cross-Posting"
|
||
},
|
||
{
|
||
"description": "An influence operation may post content across groups to spread narratives and content to new communities within the target audiences or to new target audiences.",
|
||
"meta": {
|
||
"external_id": "T0119.001",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0119.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d6cb6d4e-f75a-50af-b629-bea934659403",
|
||
"value": "Post across Groups"
|
||
},
|
||
{
|
||
"description": "An influence operation may post content across platforms to spread narratives and content to new communities within the target audiences or to new target audiences. Posting across platforms can also remove opposition and context, helping the narrative spread with less opposition on the cross-posted platform.",
|
||
"meta": {
|
||
"external_id": "T0119.002",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0119.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7dfb83d1-507f-517e-912f-6deefee4ce3f",
|
||
"value": "Post across Platform"
|
||
},
|
||
{
|
||
"description": "Post Across Disciplines",
|
||
"meta": {
|
||
"external_id": "T0119.003",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0119.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "32ad368e-ac64-59bb-921a-80fdff8eed09",
|
||
"value": "Post across Disciplines"
|
||
},
|
||
{
|
||
"description": "Incentivizing content sharing refers to actions that encourage users to share content themselves, reducing the need for the operation itself to post and promote its own content.",
|
||
"meta": {
|
||
"external_id": "T0120",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0120.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e8a91999-4d28-5d96-a427-d67c23a9c661",
|
||
"value": "Incentivize Sharing"
|
||
},
|
||
{
|
||
"description": "Use Affiliate Marketing Programmes",
|
||
"meta": {
|
||
"external_id": "T0120.001",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0120.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "cd41b90c-5c59-5c1f-9824-515e9394d546",
|
||
"value": "Use Affiliate Marketing Programmes"
|
||
},
|
||
{
|
||
"description": "Use Contests and Prizes",
|
||
"meta": {
|
||
"external_id": "T0120.002",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0120.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7fcb8b90-f534-5a4e-8321-d1610916eaa0",
|
||
"value": "Use Contests and Prizes"
|
||
},
|
||
{
|
||
"description": "Manipulating a platform algorithm refers to conducting activity on a platform in a way that intentionally targets its underlying algorithm. After analysing a platform’s algorithm (see: Select Platforms), an influence operation may use a platform in a way that increases its content exposure, avoids content removal, or otherwise benefits the operation’s strategy. For example, an influence operation may use bots to amplify its posts so that the platform’s algorithm recognises engagement with operation content and further promotes the content on user timelines.",
|
||
"meta": {
|
||
"external_id": "T0121",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0121.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0f36a79a-aa9a-5792-9a5e-5587fd626ee3",
|
||
"value": "Manipulate Platform Algorithm"
|
||
},
|
||
{
|
||
"description": "Bypassing content blocking refers to actions taken to circumvent network security measures that prevent users from accessing certain servers, resources, or other online spheres. An influence operation may bypass content blocking to proliferate its content on restricted areas of the internet. Common strategies for bypassing content blocking include: - Altering IP addresses to avoid IP filtering - Using a Virtual Private Network (VPN) to avoid IP filtering - Using a Content Delivery Network (CDN) to avoid IP filtering - Enabling encryption to bypass packet inspection blocking - Manipulating text to avoid filtering by keywords - Posting content on multiple platforms to avoid platform-specific removals - Using local facilities or modified DNS servers to avoid DNS filtering",
|
||
"meta": {
|
||
"external_id": "T0121.001",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0121.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "df60a404-a336-5fe0-8194-4c7605b0504c",
|
||
"value": "Bypass Content Blocking"
|
||
},
|
||
{
|
||
"description": "Direct users to alternative platforms refers to encouraging users to move from the platform on which they initially viewed operation content and engage with content on alternate information channels, including separate social media channels and inauthentic websites. An operation may drive users to alternative platforms to diversify its information channels and ensure the target audience knows where to access operation content if the initial platform suspends, flags, or otherwise removes original operation assets and content.",
|
||
"meta": {
|
||
"external_id": "T0122",
|
||
"kill_chain": [
|
||
"tactics:Maximise Exposure"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0122.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "18930995-fc3c-530b-8e6c-ae8fef68a4df",
|
||
"value": "Direct Users to Alternative Platforms"
|
||
},
|
||
{
|
||
"description": "Controlling the information environment through offensive cyberspace operations uses cyber tools and techniques to alter the trajectory of content in the information space to either prioritise operation messaging or block opposition messaging.",
|
||
"meta": {
|
||
"external_id": "T0123",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0123.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8264209e-287a-535e-b502-a0c59483a667",
|
||
"value": "Control Information Environment through Offensive Cyberspace Operations"
|
||
},
|
||
{
|
||
"description": "Deleting opposing content refers to the removal of content that conflicts with operational narratives from selected platforms. An influence operation may delete opposing content to censor contradictory information from the target audience, allowing operation narratives to take priority in the information space.",
|
||
"meta": {
|
||
"external_id": "T0123.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0123.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e65250eb-08b4-5bc5-b3b5-d0f426470755",
|
||
"value": "Delete Opposing Content"
|
||
},
|
||
{
|
||
"description": "Content blocking refers to actions taken to restrict internet access or render certain areas of the internet inaccessible. An influence operation may restrict content based on both network and content attributes.",
|
||
"meta": {
|
||
"external_id": "T0123.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0123.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8afe697e-f8f5-5b71-81e3-1d81d89b754b",
|
||
"value": "Block Content"
|
||
},
|
||
{
|
||
"description": "Destroying information generation capabilities refers to actions taken to limit, degrade, or otherwise incapacitate an actor’s ability to generate conflicting information. An influence operation may destroy an actor’s information generation capabilities by physically dismantling the information infrastructure, disconnecting resources needed for information generation, or redirecting information generation personnel. An operation may destroy an adversary’s information generation capabilities to limit conflicting content exposure to the target audience and crowd the information space with its own narratives.",
|
||
"meta": {
|
||
"external_id": "T0123.003",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0123.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "55d0c38e-4e38-56c9-b864-962c976b2a62",
|
||
"value": "Destroy Information Generation Capabilities"
|
||
},
|
||
{
|
||
"description": "A server redirect, also known as a URL redirect, occurs when a server automatically forwards a user from one URL to another using server-side or client-side scripting languages. An influence operation may conduct a server redirect to divert target audience members from one website to another without their knowledge. The redirected website may pose as a legitimate source, host malware, or otherwise aid operation objectives.",
|
||
"meta": {
|
||
"external_id": "T0123.004",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0123.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "27fe7183-604f-5b93-a55f-0e9b6a10dd8c",
|
||
"value": "Conduct Server Redirect"
|
||
},
|
||
{
|
||
"description": "Operators can suppress the opposition by exploiting platform content moderation tools and processes like reporting non-violative content to platforms for takedown and goading opposition actors into taking actions that result in platform action or target audience disapproval.",
|
||
"meta": {
|
||
"external_id": "T0124",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0124.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "57788034-088b-5c4d-b0b3-25dcea8f2973",
|
||
"value": "Suppress Opposition"
|
||
},
|
||
{
|
||
"description": "Reporting opposing content refers to notifying and providing an instance of a violation of a platform’s guidelines and policies for conduct on the platform. In addition to simply reporting the content, an operation may leverage copyright regulations to trick social media and web platforms into removing opposing content by manipulating the content to appear in violation of copyright laws. Reporting opposing content facilitates the suppression of contradictory information and allows operation narratives to take priority in the information space.",
|
||
"meta": {
|
||
"external_id": "T0124.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0124.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "de589f8f-a86c-5cc4-bd1b-fb522555b718",
|
||
"value": "Report Non-Violative Opposing Content"
|
||
},
|
||
{
|
||
"description": "Goad people into actions that violate terms of service or will lead to having their content or accounts taken down.",
|
||
"meta": {
|
||
"external_id": "T0124.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0124.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "5ebcb2f6-22b0-5c8a-9b40-d764b736210f",
|
||
"value": "Goad People into Harmful Action (Stop Hitting Yourself)"
|
||
},
|
||
{
|
||
"description": "Exploit Platform TOS/Content Moderation",
|
||
"meta": {
|
||
"external_id": "T0124.003",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0124.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "393644ea-39c6-59c4-976f-7c2088167f14",
|
||
"value": "Exploit Platform TOS/Content Moderation"
|
||
},
|
||
{
|
||
"description": "Platform filtering refers to the decontextualization of information as claims cross platforms (from Joan Donovan https://www.hks.harvard.edu/publications/disinformation-design-use-evidence-collages-and-platform-filtering-media-manipulation)",
|
||
"meta": {
|
||
"external_id": "T0125",
|
||
"kill_chain": [
|
||
"tactics:Drive Online Harms"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0125.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c56168d8-5f79-57d4-8cf2-a3575bd7e598",
|
||
"value": "Platform Filtering"
|
||
},
|
||
{
|
||
"description": "Operation encourages attendance at existing real world event.",
|
||
"meta": {
|
||
"external_id": "T0126",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0126.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "cf67a0f0-ae79-59bb-afe2-1eda9f99e8e4",
|
||
"value": "Encourage Attendance at Events"
|
||
},
|
||
{
|
||
"description": "Call to action to attend an event",
|
||
"meta": {
|
||
"external_id": "T0126.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0126.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e52a27b8-48f8-527d-9859-84b198d61864",
|
||
"value": "Call to Action to Attend"
|
||
},
|
||
{
|
||
"description": "Facilitate logistics or support for travel, food, housing, etc.",
|
||
"meta": {
|
||
"external_id": "T0126.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0126.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "829b1f45-d835-53c8-94e5-4ff3c87fc39c",
|
||
"value": "Facilitate Logistics or Support for Attendance"
|
||
},
|
||
{
|
||
"description": "Physical violence refers to the use of force to injure, abuse, damage, or destroy. An influence operation may conduct or encourage physical violence to discourage opponents from promoting conflicting content or draw attention to operation narratives using shock value.",
|
||
"meta": {
|
||
"external_id": "T0127",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0127.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "db32bcd3-a2ee-58ac-bc71-33f1af810a98",
|
||
"value": "Physical Violence"
|
||
},
|
||
{
|
||
"description": "An influence operation may directly Conduct Physical Violence to achieve campaign goals.",
|
||
"meta": {
|
||
"external_id": "T0127.001",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0127.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4c7437f5-1759-527a-b7e1-53de1a65abb2",
|
||
"value": "Conduct Physical Violence"
|
||
},
|
||
{
|
||
"description": "An influence operation may Encourage others to engage in Physical Violence to achieve campaign goals.",
|
||
"meta": {
|
||
"external_id": "T0127.002",
|
||
"kill_chain": [
|
||
"tactics:Drive Offline Activity"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0127.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7dc74bbe-4d75-55f7-951c-bdd766e2efa6",
|
||
"value": "Encourage Physical Violence"
|
||
},
|
||
{
|
||
"description": "Conceal the identity or provenance of campaign information assets such as accounts, channels, pages etc. to avoid takedown and attribution.",
|
||
"meta": {
|
||
"external_id": "T0128",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e9efb6c7-93bf-5bce-a6c7-f01bb8d8a3f8",
|
||
"value": "Conceal Information Assets"
|
||
},
|
||
{
|
||
"description": "An operation may use pseudonyms, or fake names, to mask the identity of operational accounts, channels, pages etc., publish anonymous content, or otherwise use falsified personas to conceal the identity of the operation. An operation may coordinate pseudonyms across multiple platforms, for example, by writing an article under a pseudonym and then posting a link to the article on social media on an account, channel, or page with the same falsified name.",
|
||
"meta": {
|
||
"external_id": "T0128.001",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4e7db4e0-23e4-5931-bf81-2c60081bb44f",
|
||
"value": "Use Pseudonyms"
|
||
},
|
||
{
|
||
"description": "Concealing network identity aims to hide the existence an influence operation’s network completely. Unlike concealing sponsorship, concealing network identity denies the existence of any sort of organisation.",
|
||
"meta": {
|
||
"external_id": "T0128.002",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "caa69e11-fc2b-580d-a6cb-a9bf28308b71",
|
||
"value": "Conceal Network Identity"
|
||
},
|
||
{
|
||
"description": "Distancing reputable individuals from the operation occurs when enlisted individuals, such as celebrities or subject matter experts, actively disengage themselves from operation activities and messaging. Individuals may distance themselves from the operation by deleting old posts or statements, unfollowing operation information assets, or otherwise detaching themselves from the operation’s timeline. An influence operation may want reputable individuals to distance themselves from the operation to reduce operation exposure, particularly if the operation aims to remove all evidence.",
|
||
"meta": {
|
||
"external_id": "T0128.003",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ef1633ed-1970-54e9-9fcc-60693beb0500",
|
||
"value": "Distance Reputable Individuals from Operation"
|
||
},
|
||
{
|
||
"description": "Laundering occurs when an influence operation acquires control of previously legitimate information assets such as accounts, channels, pages etc. from third parties through sale or exchange and often in contravention of terms of use. Influence operations use laundered assets to reach target audience members from within an existing information community and to complicate attribution.",
|
||
"meta": {
|
||
"external_id": "T0128.004",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8f9b7ca8-e697-520e-a477-f0ba0509bfcd",
|
||
"value": "Launder Information Assets"
|
||
},
|
||
{
|
||
"description": "Changing names or brand names of information assets such as accounts, channels, pages etc. An operation may change the names or brand names of its assets throughout an operation to avoid detection or alter the names of newly acquired or repurposed assets to fit operational narratives.",
|
||
"meta": {
|
||
"external_id": "T0128.005",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0128.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "234c3805-31b1-585b-8c39-94c35315860d",
|
||
"value": "Change Names of Information Assets"
|
||
},
|
||
{
|
||
"description": "Conceal the campaign's operational activity to avoid takedown and attribution.",
|
||
"meta": {
|
||
"external_id": "T0129",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7c57a7c5-28eb-550d-bdf5-12be2396acb7",
|
||
"value": "Conceal Operational Activity"
|
||
},
|
||
{
|
||
"description": "An influence operation may mix its own operation content with legitimate news or external unrelated content to disguise operational objectives, narratives, or existence. For example, an operation may generate \"lifestyle\" or \"cuisine\" content alongside regular operation content.",
|
||
"meta": {
|
||
"external_id": "T0129.002",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b7751384-967b-5260-89c8-0301868810f5",
|
||
"value": "Generate Content Unrelated to Narrative"
|
||
},
|
||
{
|
||
"description": "Breaking association with content occurs when an influence operation actively separates itself from its own content. An influence operation may break association with content by unfollowing, unliking, or unsharing its content, removing attribution from its content, or otherwise taking actions that distance the operation from its messaging. An influence operation may break association with its content to complicate attribution or regain credibility for a new operation.",
|
||
"meta": {
|
||
"external_id": "T0129.003",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3cf39d60-3b40-5739-b7e7-c6cd3474a9ee",
|
||
"value": "Break Association with Content"
|
||
},
|
||
{
|
||
"description": "URL deletion occurs when an influence operation completely removes its website registration, rendering the URL inaccessible. An influence operation may delete its URLs to complicate attribution or remove online documentation that the operation ever occurred.",
|
||
"meta": {
|
||
"external_id": "T0129.004",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4f4ae59d-332d-52d5-8c18-cfd6bfc9da97",
|
||
"value": "Delete URLs"
|
||
},
|
||
{
|
||
"description": "Coordinate on encrypted/ closed networks",
|
||
"meta": {
|
||
"external_id": "T0129.005",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6f546799-5edd-5356-a976-a1df70f5ca32",
|
||
"value": "Coordinate on Encrypted/Closed Networks"
|
||
},
|
||
{
|
||
"description": "Without \"smoking gun\" proof (and even with proof), incident creator can or will deny involvement. This technique also leverages the attacker advantages outlined in \"Demand insurmountable proof\", specifically the asymmetric disadvantage for truth-tellers in a \"firehose of misinformation\" environment.",
|
||
"meta": {
|
||
"external_id": "T0129.006",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "1646a166-55f0-54c8-a5cc-9e0ca4779974",
|
||
"value": "Deny Involvement"
|
||
},
|
||
{
|
||
"description": "Deleting accounts and account activity occurs when an influence operation removes its online social media assets, including social media accounts, posts, likes, comments, and other online artefacts. An influence operation may delete its accounts and account activity to complicate attribution or remove online documentation that the operation ever occurred.",
|
||
"meta": {
|
||
"external_id": "T0129.007",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.007.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "abf940cd-1f31-5ca7-a2ef-2714c54a3c2a",
|
||
"value": "Delete Accounts/Account Activity"
|
||
},
|
||
{
|
||
"description": "An influence operation may redirect its falsified or typosquatted URLs to legitimate websites to increase the operation's appearance of legitimacy, complicate attribution, and avoid detection.",
|
||
"meta": {
|
||
"external_id": "T0129.008",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.008.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "4c7aca7d-c1d2-5262-b374-d28675ddd402",
|
||
"value": "Redirect URLs"
|
||
},
|
||
{
|
||
"description": "Removing post origins refers to the elimination of evidence that indicates the initial source of operation content, often to complicate attribution. An influence operation may remove post origins by deleting watermarks, renaming files, or removing embedded links in its content.",
|
||
"meta": {
|
||
"external_id": "T0129.009",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.009.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "1192d06d-4766-599f-987f-f6eb292f1b5c",
|
||
"value": "Remove Post Origins"
|
||
},
|
||
{
|
||
"description": "Misattributed activity refers to incorrectly attributed operation activity. For example, a state sponsored influence operation may conduct operation activity in a way that mimics another state so that external entities misattribute activity to the incorrect state. An operation may misattribute their activities to complicate attribution, avoid detection, or frame an adversary for negative behaviour.",
|
||
"meta": {
|
||
"external_id": "T0129.010",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0129.010.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "5b9fee14-a5d4-56e3-a8b1-7031ef414e78",
|
||
"value": "Misattribute Activity"
|
||
},
|
||
{
|
||
"description": "Conceal the campaign's infrastructure to avoid takedown and attribution.",
|
||
"meta": {
|
||
"external_id": "T0130",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e19140c7-5296-574a-8350-5b1d5be04630",
|
||
"value": "Conceal Infrastructure"
|
||
},
|
||
{
|
||
"description": "Concealing sponsorship aims to mislead or obscure the identity of the hidden sponsor behind an operation rather than entity publicly running the operation. Operations that conceal sponsorship may maintain visible falsified groups, news outlets, non-profits, or other organisations, but seek to mislead or obscure the identity sponsoring, funding, or otherwise supporting these entities. Influence operations may use a variety of techniques to mask the location of their social media accounts to complicate attribution and conceal evidence of foreign interference. Operation accounts may set their location to a false place, often the location of the operation’s target audience, and post in the region’s language",
|
||
"meta": {
|
||
"external_id": "T0130.001",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bd222921-2ce7-5198-aebe-794cbc81b5db",
|
||
"value": "Conceal Sponsorship"
|
||
},
|
||
{
|
||
"description": "Hosting refers to services through which storage and computing resources are provided to an individual or organisation for the accommodation and maintenance of one or more websites and related services. Services may include web hosting, file sharing, and email distribution. Bulletproof hosting refers to services provided by an entity, such as a domain hosting or web hosting firm, that allows its customer considerable leniency in use of the service. An influence operation may utilise bulletproof hosting to maintain continuity of service for suspicious, illegal, or disruptive operation activities that stricter hosting services would limit, report, or suspend.",
|
||
"meta": {
|
||
"external_id": "T0130.002",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "64cfd678-c279-59af-89ef-fce2be1f6b26",
|
||
"value": "Utilise Bulletproof Hosting"
|
||
},
|
||
{
|
||
"description": "Use Shell Organisations to conceal sponsorship.",
|
||
"meta": {
|
||
"external_id": "T0130.003",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e33a8453-d3c1-53a7-9568-8fb65ffe8a47",
|
||
"value": "Use Shell Organisations"
|
||
},
|
||
{
|
||
"description": "Use Cryptocurrency to conceal sponsorship. Examples include Bitcoin, Monero, and Etherium.",
|
||
"meta": {
|
||
"external_id": "T0130.004",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6d422b33-be0a-5d5e-8556-f6db54f506d9",
|
||
"value": "Use Cryptocurrency"
|
||
},
|
||
{
|
||
"description": "Obfuscate Payment",
|
||
"meta": {
|
||
"external_id": "T0130.005",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0130.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "97c3035f-9c01-51a9-8f00-0b28b12d89bd",
|
||
"value": "Obfuscate Payment"
|
||
},
|
||
{
|
||
"description": "Exploiting weaknesses in platforms' terms of service and content moderation policies to avoid takedowns and platform actions.",
|
||
"meta": {
|
||
"external_id": "T0131",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0131.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "636c3c7c-c98a-50dd-9b98-607d163a3a94",
|
||
"value": "Exploit TOS/Content Moderation"
|
||
},
|
||
{
|
||
"description": "Make incident content visible for a long time, e.g. by exploiting platform terms of service, or placing it where it's hard to remove or unlikely to be removed.",
|
||
"meta": {
|
||
"external_id": "T0131.001",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0131.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7897332f-fb75-509f-8cf5-005da7bd14cf",
|
||
"value": "Legacy Web Content"
|
||
},
|
||
{
|
||
"description": "Post Borderline Content",
|
||
"meta": {
|
||
"external_id": "T0131.002",
|
||
"kill_chain": [
|
||
"tactics:Persist in the Information Environment"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0131.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "98cdfd25-6d66-5dfe-8303-a97d2f6d44dd",
|
||
"value": "Post Borderline Content"
|
||
},
|
||
{
|
||
"description": "A metric used to determine the accomplishment of actions. “Are the actions being executed as planned?”",
|
||
"meta": {
|
||
"external_id": "T0132",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0132.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "68f1e82e-f3ae-5975-aec8-a396c204ed39",
|
||
"value": "Measure Performance"
|
||
},
|
||
{
|
||
"description": "Measure the performance individuals in achieving campaign goals",
|
||
"meta": {
|
||
"external_id": "T0132.001",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0132.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7e712446-36ee-584f-a832-c98f8fa6d912",
|
||
"value": "People Focused"
|
||
},
|
||
{
|
||
"description": "Measure the performance of campaign content",
|
||
"meta": {
|
||
"external_id": "T0132.002",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0132.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "145dc4d2-ab1f-5128-a7bf-d7d835b0a8fa",
|
||
"value": "Content Focused"
|
||
},
|
||
{
|
||
"description": "View Focused",
|
||
"meta": {
|
||
"external_id": "T0132.003",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0132.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "79368272-a235-5d84-aeb3-70d337dcfffb",
|
||
"value": "View Focused"
|
||
},
|
||
{
|
||
"description": "A metric used to measure a current system state. “Are we on track to achieve the intended new system state within the planned timescale?”",
|
||
"meta": {
|
||
"external_id": "T0133",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "26789434-54f0-5a93-a769-4810af285679",
|
||
"value": "Measure Effectiveness"
|
||
},
|
||
{
|
||
"description": "Monitor and evaluate behaviour changes from misinformation incidents.",
|
||
"meta": {
|
||
"external_id": "T0133.001",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3cf4d2ba-2ba4-58c0-915d-c9781f4b4979",
|
||
"value": "Behaviour Changes"
|
||
},
|
||
{
|
||
"description": "Measure current system state with respect to the effectiveness of campaign content.",
|
||
"meta": {
|
||
"external_id": "T0133.002",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "90ed2d0b-1260-50ed-8a3d-8a71fbda4c8e",
|
||
"value": "Content"
|
||
},
|
||
{
|
||
"description": "Measure current system state with respect to the effectiveness of influencing awareness.",
|
||
"meta": {
|
||
"external_id": "T0133.003",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "2a5f3d2c-9b1e-5aa5-a817-f9af6adf454d",
|
||
"value": "Awareness"
|
||
},
|
||
{
|
||
"description": "Measure current system state with respect to the effectiveness of influencing knowledge.",
|
||
"meta": {
|
||
"external_id": "T0133.004",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "df8b6793-cb28-5445-bbdb-c72bf5ff73fa",
|
||
"value": "Knowledge"
|
||
},
|
||
{
|
||
"description": "Measure current system state with respect to the effectiveness of influencing action/attitude.",
|
||
"meta": {
|
||
"external_id": "T0133.005",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0133.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "f9ae2f58-1c32-5e54-9bfd-27b3618a60e6",
|
||
"value": "Action/Attitude"
|
||
},
|
||
{
|
||
"description": "Ensuring that Key Performance Indicators are identified and tracked, so that the performance and effectiveness of campaigns, and elements of campaigns, can be measured, during and after their execution.",
|
||
"meta": {
|
||
"external_id": "T0134",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0134.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e13d8a29-e9ef-5bf5-bcbc-372edc418d5d",
|
||
"value": "Measure Effectiveness Indicators (or KPIs)"
|
||
},
|
||
{
|
||
"description": "Monitor and evaluate message reach in misinformation incidents.",
|
||
"meta": {
|
||
"external_id": "T0134.001",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0134.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "22e518b6-db32-50db-bf96-5a19b6604b8c",
|
||
"value": "Message Reach"
|
||
},
|
||
{
|
||
"description": "Monitor and evaluate social media engagement in misinformation incidents.",
|
||
"meta": {
|
||
"external_id": "T0134.002",
|
||
"kill_chain": [
|
||
"tactics:Assess Effectiveness"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0134.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "e9ff0ba4-19ba-5ae7-9fd4-49ac50a8a7b2",
|
||
"value": "Social Media Engagement"
|
||
},
|
||
{
|
||
"description": "Weaken, debilitate, or subvert a target or their actions. An influence operation may be designed to disparage an opponent; sabotage an opponent’s systems or processes; compromise an opponent’s relationships or support system; impair an opponent’s capability; or thwart an opponent’s initiative. ",
|
||
"meta": {
|
||
"external_id": "T0135",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0135.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0141e703-9b91-55b5-b262-506eb215f6e8",
|
||
"value": "Undermine"
|
||
},
|
||
{
|
||
"description": "Denigrate, disparage, or discredit an opponent. This is a common tactical objective in political campaigns with a larger strategic goal. It differs from efforts to harm a target through defamation. If there is no ulterior motive and the sole aim is to cause harm to the target, then choose sub-technique “Defame” of technique “Cause Harm” instead.",
|
||
"meta": {
|
||
"external_id": "T0135.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0135.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a1a4b880-fd5a-5f6e-a649-3caf0e1395fc",
|
||
"value": "Smear"
|
||
},
|
||
{
|
||
"description": "Prevent the successful outcome of a policy, operation, or initiative. Actors conduct influence operations to stymie or foil proposals, plans, or courses of action which are not in their interest. ",
|
||
"meta": {
|
||
"external_id": "T0135.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0135.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "13212ee6-9714-5a65-a1e2-6fa5e30b5f73",
|
||
"value": "Thwart"
|
||
},
|
||
{
|
||
"description": "Sabotage, destroy, or damage a system, process, or relationship. The classic example is the Soviet strategy of “active measures” involving deniable covert activities such as political influence, the use of front organisations, the orchestration of domestic unrest, and the spread of disinformation. ",
|
||
"meta": {
|
||
"external_id": "T0135.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0135.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0b45e223-773a-533f-83f0-fbc928fe8e77",
|
||
"value": "Subvert"
|
||
},
|
||
{
|
||
"description": "To cause a target audience to divide into two completely opposing groups. This is a special case of subversion. To divide and conquer is an age-old approach to subverting and overcoming an enemy.",
|
||
"meta": {
|
||
"external_id": "T0135.004",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0135.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "674d2dbc-d75f-5c3e-964a-e4fd3010dd4f",
|
||
"value": "Polarise"
|
||
},
|
||
{
|
||
"description": "Grow or maintain the base of support for the actor, ally, or action. This includes hard core recruitment, managing alliances, and generating or maintaining sympathy among a wider audience, including reputation management and public relations. Sub-techniques assume support for actor (self) unless otherwise specified. ",
|
||
"meta": {
|
||
"external_id": "T0136",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "92f8589a-028b-5504-8b71-bb847c45155b",
|
||
"value": "Cultivate Support"
|
||
},
|
||
{
|
||
"description": "Preserve a positive perception in the public’s mind following an accusation or adverse event. When accused of a wrongful act, an actor may engage in denial, counter accusations, whataboutism, or conspiracy theories to distract public attention and attempt to maintain a positive image. ",
|
||
"meta": {
|
||
"external_id": "T0136.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0846475e-2669-52e3-b1a0-9da43455379e",
|
||
"value": "Defend Reputaton"
|
||
},
|
||
{
|
||
"description": "To convince others to exonerate you of a perceived wrongdoing. When an actor finds it untenable to deny doing something, they may attempt to exonerate themselves with disinformation which claims the action was reasonable. This is a special case of “Defend Reputation”. ",
|
||
"meta": {
|
||
"external_id": "T0136.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "eb9eddd7-ec69-57cf-9858-7699328de606",
|
||
"value": "Justify Action"
|
||
},
|
||
{
|
||
"description": "Raise the morale of those who support the organisation or group. Invigorate constituents with zeal for the mission or activity. Terrorist groups, political movements, and cults may indoctrinate their supporters with ideologies that are based on warped versions of religion or cause harm to others. ",
|
||
"meta": {
|
||
"external_id": "T0136.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "367a49af-493d-5f32-af61-94ac25f12ef4",
|
||
"value": "Energise Supporters"
|
||
},
|
||
{
|
||
"description": "Elevate the estimation of the actor in the public’s mind. Improve their image or standing. Public relations professionals use persuasive overt communications to achieve this goal; manipulators use covert disinformation. ",
|
||
"meta": {
|
||
"external_id": "T0136.004",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "53f0923a-2e3d-5d42-b520-1218f962dc68",
|
||
"value": "Boost Reputation"
|
||
},
|
||
{
|
||
"description": "Elevate or fortify the public backing for a policy, operation, or idea. Domestic and foreign actors can use artificial means to fabricate or amplify public support for a proposal or action. ",
|
||
"meta": {
|
||
"external_id": "T0136.005",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7da024d9-24d2-595a-becb-4a792e885b80",
|
||
"value": "Cultvate Support for Initiative"
|
||
},
|
||
{
|
||
"description": "Elevate or fortify the public backing for a partner. Governments may interfere in other countries’ elections by covertly favouring a party or candidate aligned with their interests. They may also mount an influence operation to bolster the reputation of an ally under attack. ",
|
||
"meta": {
|
||
"external_id": "T0136.006",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "06bed5fe-853f-57ce-a6de-4174b6ab58d2",
|
||
"value": "Cultivate Support for Ally"
|
||
},
|
||
{
|
||
"description": "Motivate followers to join or subscribe as members of the team. Organisations may mount recruitment drives that use propaganda to entice sympathisers to sign up. ",
|
||
"meta": {
|
||
"external_id": "T0136.007",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.007.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "78cd1801-a560-5417-abf2-dc5c617950e2",
|
||
"value": "Recruit Members"
|
||
},
|
||
{
|
||
"description": "Improve personal standing within a community. Gain fame, approbation, or notoriety. Conspiracy theorists, those with special access, and ideologues can gain prominence in a community by propagating disinformation, leaking confidential documents, or spreading hate. ",
|
||
"meta": {
|
||
"external_id": "T0136.008",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0136.008.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7f0f4d69-8634-52b4-aad8-61d8445acdb7",
|
||
"value": "Increase Prestige"
|
||
},
|
||
{
|
||
"description": "Profit from disinformation, conspiracy theories, or online harm. In some cases, the sole objective is financial gain, in other cases the objective is both financial and political. Making money may also be a way to sustain a political campaign. ",
|
||
"meta": {
|
||
"external_id": "T0137",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a9da70ec-419b-5fee-a66e-b55f0d5f483b",
|
||
"value": "Make Money"
|
||
},
|
||
{
|
||
"description": "Earn income from digital advertisements published alongside inauthentic content. Conspiratorial, false, or provocative content drives internet traffic. Content owners earn money from impressions of, or clicks on, or conversions of ads published on their websites, social media profiles, or streaming services, or ads published when their content appears in search engine results. Fraudsters simulate impressions, clicks, and conversions, or they spin up inauthentic sites or social media profiles just to generate ad revenue. Conspiracy theorists and political operators generate ad revenue as a byproduct of their operation or as a means of sustaining their campaign. ",
|
||
"meta": {
|
||
"external_id": "T0137.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "a25ebac4-85ff-5106-926b-b3c9ca1dfc86",
|
||
"value": "Generate Ad Revenue"
|
||
},
|
||
{
|
||
"description": "Defraud a target or trick a target into doing something that benefits the attacker. A typical scam is where a fraudster convinces a target to pay for something without the intention of ever delivering anything in return. Alternatively, the fraudster may promise benefits which never materialise, such as a fake cure. Criminals often exploit a fear or crisis or generate a sense of urgency. They may use deepfakes to impersonate authority figures or individuals in distress. ",
|
||
"meta": {
|
||
"external_id": "T0137.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ef11bcd5-f638-55cb-a6e7-599fbbecdc80",
|
||
"value": "Scam"
|
||
},
|
||
{
|
||
"description": "Solicit donations for a cause. Popular conspiracy theorists can attract financial contributions from their followers. Fighting back against the establishment is a popular crowdfunding narrative. ",
|
||
"meta": {
|
||
"external_id": "T0137.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "8c512fc6-92a0-5d2f-8b9b-d5e21283f365",
|
||
"value": "Raise Funds"
|
||
},
|
||
{
|
||
"description": "Offer products for sale under false pretences. Campaigns may hijack or create causes built on disinformation to sell promotional merchandise. Or charlatans may amplify victims’ unfounded fears to sell them items of questionable utility such as supplements or survival gear. ",
|
||
"meta": {
|
||
"external_id": "T0137.004",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.004.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b502d30b-0ad0-5abe-bba6-04298b660e26",
|
||
"value": "Sell Items under False Pretences"
|
||
},
|
||
{
|
||
"description": "Coerce money or favours from a target by threatening to expose or corrupt information. Ransomware criminals typically demand money. Intelligence agencies demand national secrets. Sexual predators demand favours. The leverage may be critical, sensitive, or embarrassing information. ",
|
||
"meta": {
|
||
"external_id": "T0137.005",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.005.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "d174f433-fcf2-5ad7-be1c-098b373849c1",
|
||
"value": "Extort"
|
||
},
|
||
{
|
||
"description": "Artificially inflate or deflate the price of stocks or other financial instruments and then trade on these to make profit. The most common securities fraud schemes are called “pump and dump” and “poop and scoop”. ",
|
||
"meta": {
|
||
"external_id": "T0137.006",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0137.006.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "bc85e12f-6663-567c-a422-180252963838",
|
||
"value": "Manipulate Stocks"
|
||
},
|
||
{
|
||
"description": "Persuade, impel, or provoke the target to behave in a specific manner favourable to the attacker. Some common behaviours are joining, subscribing, voting, buying, demonstrating, fighting, retreating, resigning, boycotting.",
|
||
"meta": {
|
||
"external_id": "T0138",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0138.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "b0b363b2-8dc8-5be1-86f3-6da1b08427ae",
|
||
"value": "Motivate to Act"
|
||
},
|
||
{
|
||
"description": "Inspire, animate, or exhort a target to act. An actor can use propaganda, disinformation, or conspiracy theories to stimulate a target to act in its interest. ",
|
||
"meta": {
|
||
"external_id": "T0138.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0138.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "7e4979e2-a6ce-5c9c-a153-2c0cdcefee24",
|
||
"value": "Encourage"
|
||
},
|
||
{
|
||
"description": "Instigate, incite, or arouse a target to act. Social media manipulators exploit moral outrage to propel targets to spread hate, take to the streets to protest, or engage in acts of violence. ",
|
||
"meta": {
|
||
"external_id": "T0138.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0138.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "ce2c3d20-781c-5f85-a329-633bfd0b735d",
|
||
"value": "Provoke"
|
||
},
|
||
{
|
||
"description": "Force target to take an action or to stop taking an action it has already started. Actors can use the threat of reputational damage alongside military or economic threats to compel a target.",
|
||
"meta": {
|
||
"external_id": "T0138.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0138.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "df4308e1-d324-57dc-b2e5-63dd8c4f884b",
|
||
"value": "Compel"
|
||
},
|
||
{
|
||
"description": "Discourage, deter, or inhibit the target from actions which would be unfavourable to the attacker. The actor may want the target to refrain from voting, buying, fighting, or supplying. ",
|
||
"meta": {
|
||
"external_id": "T0139",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0139.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "854d0c3d-ea59-5e49-bc38-bee72958a0fb",
|
||
"value": "Dissuade from Acting"
|
||
},
|
||
{
|
||
"description": "To make a target disinclined or reluctant to act. Manipulators use disinformation to cause targets to question the utility, legality, or morality of taking an action. ",
|
||
"meta": {
|
||
"external_id": "T0139.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0139.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "841f2f99-397b-5834-87a0-69e1d62cc68f",
|
||
"value": "Discourage"
|
||
},
|
||
{
|
||
"description": "Intimidate or incentivise target into remaining silent or prevent target from speaking out. A threat actor may cow a target into silence as a special case of deterrence. Or they may buy the target’s silence. Or they may repress or restrict the target’s speech. ",
|
||
"meta": {
|
||
"external_id": "T0139.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0139.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "403d3951-1a59-5c34-b0da-08f6781b9562",
|
||
"value": "Silence"
|
||
},
|
||
{
|
||
"description": "Prevent target from taking an action for fear of the consequences. Deterrence occurs in the mind of the target, who fears they will be worse off if they take an action than if they don’t. When making threats, aggressors may bluff, feign irrationality, or engage in brinksmanship.",
|
||
"meta": {
|
||
"external_id": "T0139.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0139.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "29b445b6-6d90-5b67-af56-3d78a0cd1343",
|
||
"value": "Deter"
|
||
},
|
||
{
|
||
"description": "Persecute, malign, or inflict pain upon a target. The objective of a campaign may be to cause fear or emotional distress in a target. In some cases, harm is instrumental to achieving a primary objective, as in coercion, repression, or intimidation. In other cases, harm may be inflicted for the satisfaction of the perpetrator, as in revenge or sadistic cruelty. ",
|
||
"meta": {
|
||
"external_id": "T0140",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0140.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "0f9fbfeb-5b2f-5aa1-91fa-133841b458c7",
|
||
"value": "Cause Harm"
|
||
},
|
||
{
|
||
"description": "Attempt to damage the target’s personal reputation by impugning their character. This can range from subtle attempts to misrepresent or insinuate, to obvious attempts to denigrate or disparage, to blatant attempts to malign or vilify. Slander applies to oral expression. Libel applies to written or pictorial material. Defamation is often carried out by online trolls. The sole aim here is to cause harm to the target. If the threat actor uses defamation as a means of undermining the target, then choose sub-technique “Smear” of technique “Undermine” instead. ",
|
||
"meta": {
|
||
"external_id": "T0140.001",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0140.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "79dd50a8-0b49-59f1-a820-1c76656cd836",
|
||
"value": "Defame"
|
||
},
|
||
{
|
||
"description": "Coerce, bully, or frighten the target. An influence operation may use intimidation to compel the target to act against their will. Or the goal may be to frighten or even terrify the target into silence or submission. In some cases, the goal is simply to make the victim suffer. ",
|
||
"meta": {
|
||
"external_id": "T0140.002",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0140.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "3d2bdd06-fdcc-5c08-b71e-6aec4315cc2b",
|
||
"value": "Intimidate"
|
||
},
|
||
{
|
||
"description": "Publish and/or propagate demeaning, derisive, or humiliating content targeting an individual or group of individuals with the intent to cause emotional, psychological, or physical distress. Hate speech can cause harm directly or incite others to harm the target. It often aims to stigmatise the target by singling out immutable characteristics such as colour, race, religion, national or ethnic origin, gender, gender identity, sexual orientation, age, disease, or mental or physical disability. Thus, promoting hatred online may involve racism, antisemitism, Islamophobia, xenophobia, sexism, misogyny, homophobia, transphobia, ageism, ableism, or any combination thereof. Motivations for hate speech range from group preservation to ideological superiority to the unbridled infliction of suffering. ",
|
||
"meta": {
|
||
"external_id": "T0140.003",
|
||
"kill_chain": [
|
||
"tactics:Plan Objectives"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0140.003.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "823c3b54-8eac-5772-8e1c-b7fd55bbe518",
|
||
"value": "Spread Hate"
|
||
},
|
||
{
|
||
"description": "Threat Actors may take over existing assets not owned by them through nefarious means, such as using technical exploits, hacking, purchasing compromised accounts from the dark web, or social engineering.",
|
||
"meta": {
|
||
"external_id": "T0141",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0141.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c863835c-366c-58c1-b405-68f632632540",
|
||
"value": "Acquire Compromised Asset"
|
||
},
|
||
{
|
||
"description": "Threat Actors can take over existing users’ accounts to distribute campaign content. \n\nThe actor may maintain the asset’s previous identity to capitalise on the perceived legitimacy its previous owner had cultivated.\n\nThe actor may completely rebrand the account to exploit its existing reach, or relying on the account’s history to avoid more stringent automated content moderation rules applied to new accounts.\n\nSee also [Mitre ATT&CK’s T1586 Compromise Accounts](https://attack.mitre.org/techniques/T1586/) for more technical information on how threat actors may achieve this objective.\n\nThis Technique was previously called Compromise Legitimate Accounts, and used the ID T0011.",
|
||
"meta": {
|
||
"external_id": "T0141.001",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0141.001.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "6c78a4cc-99ff-5dda-9fd2-0ed060b478ad",
|
||
"value": "Acquire Compromised Account"
|
||
},
|
||
{
|
||
"description": "Threat Actors may take over existing websites to publish or amplify inauthentic narratives. This includes the defacement of websites, and cases where websites’ personas are maintained to add credence to threat actors’ narratives.\n\nSee also [Mitre ATT&CK’s T1584 Compromise Infrastructure](https://attack.mitre.org/techniques/T1584/) for more technical information on how threat actors may achieve this objective.",
|
||
"meta": {
|
||
"external_id": "T0141.002",
|
||
"kill_chain": [
|
||
"tactics:Establish Assets"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0141.002.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "66c253b1-d644-5dca-9954-805693489ed4",
|
||
"value": "Acquire Compromised Website"
|
||
},
|
||
{
|
||
"description": "This technique, sometimes known as \"astroturfing\", occurs when an influence operation disguises itself as a grassroots movement or organisation that supports operation narratives. \n\nAstroturfing aims to increase the appearance of popular support for an evolving grassroots movement in contrast to \"Utilise Butterfly Attacks\", which aims to discredit an existing grassroots movement. \n\nThis Technique was previously called Astroturfing, and used the ID T0099.001",
|
||
"meta": {
|
||
"external_id": "T0142",
|
||
"kill_chain": [
|
||
"tactics:Establish Legitimacy"
|
||
],
|
||
"refs": [
|
||
"https://github.com/DISARMFoundation/DISARMframeworks/blob/main/generated_pages/techniques/T0142.md"
|
||
]
|
||
},
|
||
"related": [],
|
||
"uuid": "c52f5e7a-5a13-5859-9bb0-1620dec4dde2",
|
||
"value": "Fabricate Grassroots Movement"
|
||
}
|
||
],
|
||
"version": 2
|
||
}
|