mirror of
https://github.com/MISP/misp-galaxy.git
synced 2024-11-26 16:57:18 +00:00
update
This commit is contained in:
parent
917a01920a
commit
0f3ad79069
4 changed files with 441 additions and 262 deletions
154
tools/mkdocs/main.py
Normal file
154
tools/mkdocs/main.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
from modules.universe import Universe
|
||||
|
||||
import multiprocessing
|
||||
from multiprocessing import Pool
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
|
||||
sys.setrecursionlimit(10000)
|
||||
|
||||
FILES_TO_IGNORE = []
|
||||
CLUSTER_PATH = "../../clusters"
|
||||
SITE_PATH = "./site/docs"
|
||||
GALAXY_PATH = "../../galaxies"
|
||||
|
||||
def save_cluster_relationships(cluster_data):
|
||||
# Unpack cluster data
|
||||
galaxy_name, cluster_name, cluster = cluster_data
|
||||
|
||||
# Assuming `universe.get_relationships_with_levels` and `cluster.save_relationships`
|
||||
# are methods that can be called like this.
|
||||
relationships = universe.get_relationships_with_levels(cluster)
|
||||
cluster.save_relationships(relationships)
|
||||
print(f"Processed {galaxy_name}, {cluster_name}")
|
||||
|
||||
def get_deprecated_galaxy_files():
|
||||
deprecated_galaxy_files = []
|
||||
for f in os.listdir(GALAXY_PATH):
|
||||
with open(os.path.join(GALAXY_PATH, f)) as fr:
|
||||
galaxy_json = json.load(fr)
|
||||
if "namespace" in galaxy_json and galaxy_json["namespace"] == "deprecated":
|
||||
deprecated_galaxy_files.append(f)
|
||||
|
||||
return deprecated_galaxy_files
|
||||
|
||||
def cluster_transform_to_link(cluster):
|
||||
placeholder = "__TMP__"
|
||||
section = (
|
||||
cluster
|
||||
.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
)
|
||||
return f"[{cluster.value} ({cluster.uuid})](../../{cluster.galaxy.json_file_name}/index.md#{section})"
|
||||
|
||||
def galaxy_transform_to_link(galaxy):
|
||||
return f"[{galaxy.galaxy_name}](../../{galaxy.json_file_name}/index.md)"
|
||||
|
||||
def generate_relations_table(relationships):
|
||||
markdown = "|Cluster A | Galaxy A | Cluster B | Galaxy B | Level { .graph } |\n"
|
||||
markdown += "|--- | --- | --- | --- | --- | ---|\n"
|
||||
for from_cluster, to_cluster, level in relationships:
|
||||
from_galaxy = from_cluster.galaxy.galaxy_name
|
||||
to_galaxy = to_cluster.galaxy.galaxy_name
|
||||
markdown += f"{cluster_transform_to_link(from_cluster)} | {galaxy_transform_to_link(from_galaxy)} | {cluster_transform_to_link(to_cluster)} | {galaxy_transform_to_link(to_galaxy)} | {level}\n"
|
||||
return markdown
|
||||
|
||||
def generate_index_page(galaxies):
|
||||
index_output = "# MISP Galaxy\n\nThe MISP galaxy offers a streamlined approach for representing large entities, known as clusters, which can be linked to MISP events or attributes. Each cluster consists of one or more elements, represented as key-value pairs. MISP galaxy comes with a default knowledge base, encompassing areas like Threat Actors, Tools, Ransomware, and ATT&CK matrices. However, users have the flexibility to modify, update, replace, or share these elements according to their needs.\n\nClusters and vocabularies within MISP galaxy can be utilized in their original form or as a foundational knowledge base. The distribution settings for each cluster can be adjusted, allowing for either restricted or wide dissemination.\n\nAdditionally, MISP galaxies enable the representation of existing standards like the MITRE ATT&CK™ framework, as well as custom matrices.\n\nThe aim is to provide a core set of clusters for organizations embarking on analysis, which can be further tailored to include localized, private information or additional, shareable data.\n\nClusters serve as an open and freely accessible knowledge base, which can be utilized and expanded within [MISP](https://www.misp-project.org/) or other threat intelligence platforms.\n\n![Overview of the integration of MISP galaxy in the MISP Threat Intelligence Sharing Platform](https://raw.githubusercontent.com/MISP/misp-galaxy/aa41337fd78946a60aef3783f58f337d2342430a/doc/images/galaxy.png)\n\n## Publicly available clusters\n"
|
||||
for galaxy in galaxies:
|
||||
index_output += f"- [{galaxy.galaxy_name}](./{galaxy.json_file_name}/index.md)\n"
|
||||
index_output += "## Statistics\n\nYou can find some statistics about MISP galaxies [here](./statistics.md).\n"
|
||||
index_output += "# Contributing\n\nIn the dynamic realm of threat intelligence, a variety of models and approaches exist to systematically organize, categorize, and delineate threat actors, hazards, or activity groups. We embrace innovative methodologies for articulating threat intelligence. The galaxy model is particularly versatile, enabling you to leverage and integrate methodologies that you trust and are already utilizing within your organization or community.\n\nWe encourage collaboration and contributions to the [MISP Galaxy JSON files](https://github.com/MISP/misp-galaxy/). Feel free to fork the project, enhance existing elements or clusters, or introduce new ones. Your insights are valuable - share them with us through a pull-request.\n"
|
||||
return index_output
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_time = time.time()
|
||||
universe = Universe()
|
||||
|
||||
FILES_TO_IGNORE.extend(get_deprecated_galaxy_files())
|
||||
galaxies_fnames = []
|
||||
for f in os.listdir(CLUSTER_PATH):
|
||||
if ".json" in f and f not in FILES_TO_IGNORE:
|
||||
galaxies_fnames.append(f)
|
||||
galaxies_fnames.sort()
|
||||
|
||||
# Create the universe of clusters and galaxies
|
||||
for galaxy in galaxies_fnames:
|
||||
with open(os.path.join(CLUSTER_PATH, galaxy)) as fr:
|
||||
galaxy_json = json.load(fr)
|
||||
universe.add_galaxy(galaxy_name=galaxy_json["name"], json_file_name=galaxy, authors=galaxy_json["authors"], description=galaxy_json["description"])
|
||||
for cluster in galaxy_json["values"]:
|
||||
universe.add_cluster(
|
||||
galaxy_name=galaxy_json.get("name", None),
|
||||
uuid=cluster.get("uuid", None),
|
||||
description=cluster.get("description", None),
|
||||
value=cluster.get("value", None),
|
||||
meta=cluster.get("meta", None)
|
||||
)
|
||||
|
||||
|
||||
# Define the relationships between clusters
|
||||
for galaxy in galaxies_fnames:
|
||||
with open(os.path.join(CLUSTER_PATH, galaxy)) as fr:
|
||||
galaxy_json = json.load(fr)
|
||||
for cluster in galaxy_json["values"]:
|
||||
if "related" in cluster:
|
||||
for related in cluster["related"]:
|
||||
universe.define_relationship(cluster["uuid"], related["dest-uuid"])
|
||||
|
||||
|
||||
# # Save relationships to clusters
|
||||
# for galaxy in universe.galaxies.values():
|
||||
# for cluster in galaxy.clusters.values():
|
||||
# cluster.save_relationships(universe.get_relationships_with_levels(cluster))
|
||||
|
||||
tasks = []
|
||||
for galaxy_name, galaxy in universe.galaxies.items():
|
||||
for cluster_name, cluster in galaxy.clusters.items():
|
||||
tasks.append((galaxy_name, cluster_name, cluster))
|
||||
|
||||
with Pool(processes=multiprocessing.cpu_count()) as pool:
|
||||
pool.map(save_cluster_relationships, tasks)
|
||||
print("All clusters processed.")
|
||||
|
||||
print(f"Finished relations in {time.time() - start_time} seconds")
|
||||
|
||||
# Write output
|
||||
if not os.path.exists(SITE_PATH):
|
||||
os.mkdir(SITE_PATH)
|
||||
|
||||
with open(os.path.join(SITE_PATH, "index.md"), "w") as index:
|
||||
index.write(generate_index_page(universe.galaxies.values()))
|
||||
|
||||
for galaxy in universe.galaxies.values():
|
||||
galaxy.write_entry(SITE_PATH)
|
||||
|
||||
for galaxy in universe.galaxies.values():
|
||||
if not os.path.exists(GALAXY_PATH):
|
||||
os.mkdir(GALAXY_PATH)
|
||||
relation_path = os.path.join(GALAXY_PATH, "relations")
|
||||
if not os.path.exists(relation_path):
|
||||
os.mkdir(relation_path)
|
||||
with open(os.path.join(relation_path, ".pages"), "w") as index:
|
||||
index.write(f"hide: true\n")
|
||||
|
||||
for cluster in galaxy.clusters.values():
|
||||
if cluster.relationships:
|
||||
with open(os.path.join(relation_path, f"{cluster.uuid}.md"), "w") as index:
|
||||
index.write(generate_relations_table(cluster.relationships))
|
||||
|
||||
print(f"Finished in {time.time() - start_time} seconds")
|
||||
|
||||
# relationships = universe.get_relationships_with_levels("Banker", "f0ec2df5-2e38-4df3-970d-525352006f2e")
|
||||
# print(relationships)
|
||||
|
||||
|
||||
# markdown_table = generate_markdown_table(relationships)
|
||||
# print(markdown_table)
|
|
@ -1,246 +1,109 @@
|
|||
import os
|
||||
import validators
|
||||
|
||||
|
||||
class Cluster:
|
||||
def __init__(
|
||||
self, description, uuid, date, value, related_list, meta, galaxy
|
||||
):
|
||||
self.description = description
|
||||
def __init__(self, uuid, galaxy, description=None, value=None, meta=None):
|
||||
self.uuid = uuid
|
||||
self.date = date
|
||||
self.description = description
|
||||
self.value = value
|
||||
self.related_list = related_list
|
||||
self.meta = meta
|
||||
self.galaxy = galaxy
|
||||
|
||||
self.entry = ""
|
||||
self.statistics = None
|
||||
self.galaxy = galaxy # Reference to the Galaxy object this cluster belongs to
|
||||
self.outbound_relationships = set()
|
||||
self.inbound_relationships = set()
|
||||
self.relationships = set()
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.uuid < other.uuid
|
||||
def add_outbound_relationship(self, cluster):
|
||||
self.outbound_relationships.add(cluster)
|
||||
|
||||
def set_statistics(self, statistics):
|
||||
self.statistics = statistics
|
||||
def add_inbound_relationship(self, cluster):
|
||||
self.inbound_relationships.add(cluster)
|
||||
|
||||
def save_relationships(self, relationships):
|
||||
self.relationships = relationships
|
||||
|
||||
def generate_entry(self):
|
||||
entry = ""
|
||||
entry += self._create_title_entry()
|
||||
entry += self._create_description_entry()
|
||||
entry += self._create_synonyms_entry()
|
||||
entry += self._create_uuid_entry()
|
||||
entry += self._create_refs_entry()
|
||||
entry += self._create_associated_metadata_entry()
|
||||
if self.relationships:
|
||||
entry += self._create_related_entry()
|
||||
return entry
|
||||
|
||||
def _create_title_entry(self):
|
||||
self.entry += f"## {self.value}\n"
|
||||
self.entry += f"\n"
|
||||
entry = ""
|
||||
entry += f"## {self.value}\n"
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_description_entry(self):
|
||||
entry = ""
|
||||
if self.description:
|
||||
self.entry += f"{self.description}\n"
|
||||
entry += f"{self.description}\n"
|
||||
return entry
|
||||
|
||||
def _create_synonyms_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict) and self.meta.get("synonyms"):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Synonyms"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f' "synonyms" in the meta part typically refer to alternate names or labels that are associated with a particular {self.value}.\n\n'
|
||||
self.entry += f" | Known Synonyms |\n"
|
||||
self.entry += f" |---------------------|\n"
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Synonyms"\n'
|
||||
entry += f"\n"
|
||||
entry += f' "synonyms" in the meta part typically refer to alternate names or labels that are associated with a particular {self.value}.\n\n'
|
||||
entry += f" | Known Synonyms |\n"
|
||||
entry += f" |---------------------|\n"
|
||||
synonyms_count = 0
|
||||
for synonym in sorted(self.meta["synonyms"]):
|
||||
synonyms_count += 1
|
||||
self.entry += f" | `{synonym}` |\n"
|
||||
self.statistics.synonyms_count_dict[self.uuid] = synonyms_count
|
||||
entry += f" | `{synonym}` |\n"
|
||||
return entry
|
||||
|
||||
def _create_uuid_entry(self):
|
||||
entry = ""
|
||||
if self.uuid:
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? tip "Internal MISP references"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" UUID `{self.uuid}` which can be used as unique global reference for `{self.value}` in MISP communities and other software using the MISP galaxy\n"
|
||||
self.entry += f"\n"
|
||||
entry += f"\n"
|
||||
entry += f'??? tip "Internal MISP references"\n'
|
||||
entry += f"\n"
|
||||
entry += f" UUID `{self.uuid}` which can be used as unique global reference for `{self.value}` in MISP communities and other software using the MISP galaxy\n"
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_refs_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict) and self.meta.get("refs"):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "External references"\n'
|
||||
self.entry += f"\n"
|
||||
entry += f"\n"
|
||||
entry += f'??? info "External references"\n'
|
||||
entry += f"\n"
|
||||
|
||||
for ref in self.meta["refs"]:
|
||||
if validators.url(ref):
|
||||
self.entry += f" - [{ref}]({ref}) - :material-archive: :material-arrow-right: [webarchive](https://web.archive.org/web/*/{ref})\n"
|
||||
entry += f" - [{ref}]({ref}) - :material-archive: :material-arrow-right: [webarchive](https://web.archive.org/web/*/{ref})\n"
|
||||
else:
|
||||
self.entry += f" - {ref}\n"
|
||||
entry += f" - {ref}\n"
|
||||
|
||||
self.entry += f"\n"
|
||||
entry += f"\n"
|
||||
return entry
|
||||
|
||||
def _create_associated_metadata_entry(self):
|
||||
entry = ""
|
||||
if isinstance(self.meta, dict):
|
||||
excluded_meta = ["synonyms", "refs"]
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Associated metadata"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" |Metadata key {{ .no-filter }} |Value|\n"
|
||||
self.entry += f" |-----------------------------------|-----|\n"
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Associated metadata"\n'
|
||||
entry += f"\n"
|
||||
entry += f" |Metadata key {{ .no-filter }} |Value|\n"
|
||||
entry += f" |-----------------------------------|-----|\n"
|
||||
for meta in sorted(self.meta.keys()):
|
||||
if meta not in excluded_meta:
|
||||
self.entry += f" | {meta} | {self.meta[meta]} |\n"
|
||||
|
||||
def get_related_clusters(
|
||||
self, cluster_dict, depth=-1, visited=None, level=1, related_private_clusters={}
|
||||
):
|
||||
empty_uuids = 0
|
||||
|
||||
if visited is None:
|
||||
visited = {}
|
||||
|
||||
related_clusters = []
|
||||
if depth == 0 or not self.related_list:
|
||||
return related_clusters
|
||||
|
||||
if self.uuid in visited and visited[self.uuid] <= level:
|
||||
return related_clusters
|
||||
else:
|
||||
visited[self.uuid] = level
|
||||
|
||||
for cluster in self.related_list:
|
||||
dest_uuid = cluster["dest-uuid"]
|
||||
|
||||
# Cluster is private
|
||||
if dest_uuid not in cluster_dict:
|
||||
# Check if UUID is empty
|
||||
if not dest_uuid:
|
||||
empty_uuids += 1
|
||||
continue
|
||||
self.statistics.private_relations_count += 1
|
||||
if dest_uuid not in self.statistics.private_clusters:
|
||||
self.statistics.private_clusters.append(dest_uuid)
|
||||
if dest_uuid in related_private_clusters:
|
||||
related_clusters.append(
|
||||
(
|
||||
self,
|
||||
related_private_clusters[dest_uuid],
|
||||
level,
|
||||
)
|
||||
)
|
||||
else:
|
||||
related_clusters.append(
|
||||
(
|
||||
self,
|
||||
Cluster(
|
||||
value="Private Cluster",
|
||||
uuid=dest_uuid,
|
||||
date=None,
|
||||
description=None,
|
||||
related_list=None,
|
||||
meta=None,
|
||||
galaxy=None,
|
||||
),
|
||||
level,
|
||||
)
|
||||
)
|
||||
related_private_clusters[dest_uuid] = related_clusters[-1][1]
|
||||
continue
|
||||
|
||||
related_cluster = cluster_dict[dest_uuid]
|
||||
|
||||
self.statistics.public_relations_count += 1
|
||||
|
||||
related_clusters.append((self, related_cluster, level))
|
||||
|
||||
if (depth > 1 or depth == -1) and (
|
||||
cluster["dest-uuid"] not in visited
|
||||
or visited[cluster["dest-uuid"]] > level + 1
|
||||
):
|
||||
new_depth = depth - 1 if depth > 1 else -1
|
||||
if cluster["dest-uuid"] in cluster_dict:
|
||||
related_clusters += cluster_dict[
|
||||
cluster["dest-uuid"]
|
||||
].get_related_clusters(
|
||||
cluster_dict,
|
||||
new_depth,
|
||||
visited,
|
||||
level + 1,
|
||||
related_private_clusters,
|
||||
)
|
||||
|
||||
if empty_uuids > 0:
|
||||
self.statistics.empty_uuids_dict[self.value] = empty_uuids
|
||||
|
||||
return self._remove_duplicates(related_clusters)
|
||||
|
||||
def _remove_duplicates(self, related_clusters):
|
||||
cluster_dict = {}
|
||||
for cluster in related_clusters:
|
||||
key = tuple(sorted((cluster[0], cluster[1])))
|
||||
|
||||
if key in cluster_dict:
|
||||
if cluster_dict[key][2] > cluster[2]:
|
||||
cluster_dict[key] = cluster
|
||||
else:
|
||||
cluster_dict[key] = cluster
|
||||
related_clusters = list(cluster_dict.values())
|
||||
|
||||
return related_clusters
|
||||
entry += f" | {meta} | {self.meta[meta]} |\n"
|
||||
return entry
|
||||
|
||||
def _create_related_entry(self):
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Related clusters"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" To see the related clusters, click [here](./relations/{self.uuid}.md).\n"
|
||||
|
||||
def _get_related_entry(self, relations):
|
||||
output = ""
|
||||
output += f"## Related clusters for {self.value}\n"
|
||||
output += f"\n"
|
||||
output += f"| Cluster A | Galaxy A | Cluster B | Galaxy B | Level {{ .graph }} |\n"
|
||||
output += f"|-----------|----------|-----------|----------|-------------------|\n"
|
||||
for relation in relations:
|
||||
placeholder = "__TMP__"
|
||||
|
||||
cluster_a_section = (
|
||||
relation[0]
|
||||
.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
cluster_b_section = (
|
||||
relation[1]
|
||||
.value.lower()
|
||||
.replace(" - ", placeholder) # Replace " - " first
|
||||
.replace(" ", "-")
|
||||
.replace("/", "")
|
||||
.replace(":", "")
|
||||
.replace(placeholder, "-")
|
||||
) # Replace the placeholder with "-"
|
||||
|
||||
if cluster_b_section != "private-cluster":
|
||||
output += f"| [{relation[0].value} ({relation[0].uuid})](../../{relation[0].galaxy.json_file_name}/index.md#{cluster_a_section}) | [{relation[0].galaxy.name}](../../{relation[0].galaxy.json_file_name}/index.md) | [{relation[1].value} ({relation[1].uuid})](../../{relation[1].galaxy.json_file_name}/index.md#{cluster_b_section}) | [{relation[1].galaxy.name}](../../{relation[1].galaxy.json_file_name}/index.md) | {relation[2]} |\n"
|
||||
else:
|
||||
output += f"| [{relation[0].value} ({relation[0].uuid})](../../{relation[0].galaxy.json_file_name}/index.md#{cluster_a_section}) | [{relation[0].galaxy.name}](../../{relation[0].galaxy.json_file_name}/index.md) |{relation[1].value} ({relation[1].uuid}) | unknown | {relation[2]} |\n"
|
||||
return output
|
||||
|
||||
def create_entry(self, cluster_dict, path):
|
||||
if not self.statistics:
|
||||
raise ValueError("Statistics not set")
|
||||
self._create_title_entry()
|
||||
self._create_description_entry()
|
||||
self._create_synonyms_entry()
|
||||
self._create_uuid_entry()
|
||||
self._create_refs_entry()
|
||||
self._create_associated_metadata_entry()
|
||||
if self.related_list:
|
||||
self._create_related_entry()
|
||||
self._write_relations(cluster_dict, path)
|
||||
return self.entry
|
||||
|
||||
def _write_relations(self, cluster_dict, path):
|
||||
related_clusters = self.get_related_clusters(cluster_dict)
|
||||
self.statistics.relation_count_dict[self.uuid] = len(related_clusters)
|
||||
galaxy_path = os.path.join(path, self.galaxy.json_file_name)
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
relation_path = os.path.join(galaxy_path, "relations")
|
||||
if not os.path.exists(relation_path):
|
||||
os.mkdir(relation_path)
|
||||
with open(os.path.join(relation_path, ".pages"), "w") as index:
|
||||
index.write(f"hide: true\n")
|
||||
with open(os.path.join(relation_path, f"{self.uuid}.md"), "w") as index:
|
||||
index.write(self._get_related_entry(related_clusters))
|
||||
entry = ""
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Related clusters"\n'
|
||||
entry += f"\n"
|
||||
entry += f" To see the related clusters, click [here](./relations/{self.uuid}.md).\n"
|
||||
return entry
|
|
@ -3,72 +3,66 @@ from typing import List
|
|||
import os
|
||||
|
||||
class Galaxy:
|
||||
def __init__(
|
||||
self, cluster_list: List[dict], authors, description, name, json_file_name
|
||||
):
|
||||
self.cluster_list = cluster_list
|
||||
def __init__(self, galaxy_name: str, json_file_name: str, authors: List[str], description: str):
|
||||
self.galaxy_name = galaxy_name
|
||||
self.json_file_name = json_file_name
|
||||
self.authors = authors
|
||||
self.description = description
|
||||
self.name = name
|
||||
self.json_file_name = json_file_name
|
||||
self.clusters = self._create_clusters()
|
||||
self.entry = ""
|
||||
|
||||
self.clusters = {} # Maps uuid to Cluster objects
|
||||
|
||||
def add_cluster(self, uuid, description, value, meta):
|
||||
if uuid not in self.clusters:
|
||||
self.clusters[uuid] = Cluster(uuid=uuid, galaxy=self, description=description, value=value, meta=meta)
|
||||
|
||||
def write_entry(self, path):
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
with open(os.path.join(path, f"{self.galaxy_name}.md"), "w") as index:
|
||||
index.write(self.generate_entry())
|
||||
|
||||
def generate_entry(self):
|
||||
entry = ""
|
||||
entry += self._create_metadata_entry()
|
||||
entry += self._create_title_entry()
|
||||
entry += self._create_description_entry()
|
||||
entry += self._create_authors_entry()
|
||||
entry += self._create_clusters_entry()
|
||||
return entry
|
||||
|
||||
def _create_metadata_entry(self):
|
||||
self.entry += "---\n"
|
||||
self.entry += f"title: {self.name}\n"
|
||||
entry = ""
|
||||
entry += "---\n"
|
||||
entry += f"title: {self.galaxy_name}\n"
|
||||
meta_description = self.description.replace('"', "-")
|
||||
self.entry += f"description: {meta_description}\n"
|
||||
self.entry += "---\n"
|
||||
entry += f"description: {meta_description}\n"
|
||||
entry += "---\n"
|
||||
return entry
|
||||
|
||||
def _create_title_entry(self):
|
||||
self.entry += f"# {self.name}\n"
|
||||
entry = ""
|
||||
entry += f"# {self.galaxy_name}\n"
|
||||
return entry
|
||||
|
||||
def _create_description_entry(self):
|
||||
self.entry += f"{self.description}\n"
|
||||
entry = ""
|
||||
entry += f"{self.description}\n"
|
||||
return entry
|
||||
|
||||
def _create_authors_entry(self):
|
||||
entry = ""
|
||||
if self.authors:
|
||||
self.entry += f"\n"
|
||||
self.entry += f'??? info "Authors"\n'
|
||||
self.entry += f"\n"
|
||||
self.entry += f" | Authors and/or Contributors|\n"
|
||||
self.entry += f" |----------------------------|\n"
|
||||
entry += f"\n"
|
||||
entry += f'??? info "Authors"\n'
|
||||
entry += f"\n"
|
||||
entry += f" | Authors and/or Contributors|\n"
|
||||
entry += f" |----------------------------|\n"
|
||||
for author in self.authors:
|
||||
self.entry += f" |{author}|\n"
|
||||
entry += f" |{author}|\n"
|
||||
return entry
|
||||
|
||||
def _create_clusters(self):
|
||||
clusters = []
|
||||
for cluster in self.cluster_list:
|
||||
clusters.append(
|
||||
Cluster(
|
||||
value=cluster.get("value", None),
|
||||
description=cluster.get("description", None),
|
||||
uuid=cluster.get("uuid", None),
|
||||
date=cluster.get("date", None),
|
||||
related_list=cluster.get("related", None),
|
||||
meta=cluster.get("meta", None),
|
||||
galaxy=self,
|
||||
)
|
||||
)
|
||||
return clusters
|
||||
|
||||
def _create_clusters_entry(self, cluster_dict, path):
|
||||
for cluster in self.clusters:
|
||||
self.entry += cluster.create_entry(cluster_dict, path)
|
||||
|
||||
def create_entry(self, cluster_dict, path):
|
||||
self._create_metadata_entry()
|
||||
self._create_title_entry()
|
||||
self._create_description_entry()
|
||||
self._create_authors_entry()
|
||||
self._create_clusters_entry(cluster_dict, path)
|
||||
return self.entry
|
||||
|
||||
def write_entry(self, path, cluster_dict):
|
||||
self.create_entry(cluster_dict, path)
|
||||
galaxy_path = os.path.join(path, self.json_file_name)
|
||||
if not os.path.exists(galaxy_path):
|
||||
os.mkdir(galaxy_path)
|
||||
with open(os.path.join(galaxy_path, "index.md"), "w") as index:
|
||||
index.write(self.entry)
|
||||
def _create_clusters_entry(self):
|
||||
entry = ""
|
||||
for cluster in self.clusters.values():
|
||||
entry += cluster.generate_entry()
|
||||
return entry
|
168
tools/mkdocs/modules/universe.py
Normal file
168
tools/mkdocs/modules/universe.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
from modules.galaxy import Galaxy
|
||||
from modules.cluster import Cluster
|
||||
|
||||
from collections import defaultdict, deque
|
||||
|
||||
|
||||
class Universe:
|
||||
def __init__(self):
|
||||
self.galaxies = {} # Maps galaxy_name to Galaxy objects
|
||||
|
||||
def add_galaxy(self, galaxy_name, json_file_name, authors, description):
|
||||
if galaxy_name not in self.galaxies:
|
||||
self.galaxies[galaxy_name] = Galaxy(galaxy_name=galaxy_name, json_file_name=json_file_name, authors=authors, description=description)
|
||||
|
||||
def add_cluster(self, galaxy_name, uuid, description, value, meta):
|
||||
if galaxy_name in self.galaxies:
|
||||
self.galaxies[galaxy_name].add_cluster(uuid=uuid, description=description, value=value, meta=meta)
|
||||
|
||||
def define_relationship(self, cluster_a_id, cluster_b_id):
|
||||
cluster_a = None
|
||||
cluster_b = None
|
||||
|
||||
# Search for Cluster A and Cluster B in all galaxies
|
||||
for galaxy in self.galaxies.values():
|
||||
if cluster_a_id in galaxy.clusters:
|
||||
cluster_a = galaxy.clusters[cluster_a_id]
|
||||
if cluster_b_id in galaxy.clusters:
|
||||
cluster_b = galaxy.clusters[cluster_b_id]
|
||||
if cluster_a and cluster_b: # Both clusters found
|
||||
break
|
||||
|
||||
# If both clusters are found, define the relationship
|
||||
if cluster_a and cluster_b:
|
||||
cluster_a.add_outbound_relationship(cluster_b)
|
||||
cluster_b.add_inbound_relationship(cluster_a)
|
||||
else:
|
||||
# If Cluster B is not found, create a private cluster relationship for Cluster A
|
||||
if cluster_a:
|
||||
private_cluster = Cluster(uuid=cluster_b_id, galaxy=None)
|
||||
cluster_a.add_outbound_relationship(private_cluster)
|
||||
else:
|
||||
print("Cluster A not found in any galaxy")
|
||||
|
||||
# def get_relationships_with_levels(self, galaxy, cluster):
|
||||
# start_galaxy = self.galaxies[galaxy]
|
||||
# start_cluster = start_galaxy.clusters[cluster]
|
||||
|
||||
# def bfs_with_inbound_outbound(start_cluster):
|
||||
# visited = set() # To keep track of visited clusters
|
||||
# linked = set() # To keep track of linked clusters
|
||||
# queue = deque([(start_cluster, 0, 'outbound')]) # Include direction of relationship
|
||||
# relationships = []
|
||||
|
||||
# while queue:
|
||||
# current_cluster, level, direction = queue.popleft()
|
||||
# if (current_cluster, direction) not in visited: # Check visited with direction
|
||||
# visited.add((current_cluster, direction))
|
||||
|
||||
# # Process outbound relationships
|
||||
# if direction == 'outbound':
|
||||
# for to_cluster in current_cluster.outbound_relationships:
|
||||
# if (to_cluster, 'outbound') not in visited:
|
||||
# # relationships.append((current_cluster, to_cluster, level + 1, 'outbound'))
|
||||
# queue.append((to_cluster, level + 1, 'outbound'))
|
||||
# relationships.append((current_cluster, to_cluster, level + 1, 'outbound'))
|
||||
|
||||
|
||||
# # Process inbound relationships
|
||||
# for from_cluster in current_cluster.inbound_relationships:
|
||||
# if (from_cluster, 'inbound') not in visited:
|
||||
# relationships.append((from_cluster, current_cluster, level + 1, 'inbound'))
|
||||
# queue.append((from_cluster, level + 1, 'inbound'))
|
||||
|
||||
# return relationships
|
||||
|
||||
|
||||
# return bfs_with_inbound_outbound(start_cluster)
|
||||
|
||||
# def get_relationships_with_levels(self, galaxy, cluster):
|
||||
# start_galaxy = self.galaxies[galaxy]
|
||||
# start_cluster = start_galaxy.clusters[cluster]
|
||||
|
||||
# def bfs_with_inbound_outbound(start_cluster):
|
||||
# visited = set() # To keep track of visited clusters
|
||||
# relationships = defaultdict(lambda: (float('inf'), '')) # Store lowest level for each link
|
||||
|
||||
# queue = deque([(start_cluster, 0, 'outbound')]) # Include direction of relationship
|
||||
|
||||
# while queue:
|
||||
# print(f"Queue: {[c.uuid for c, l, d in queue]}")
|
||||
# current_cluster, level, direction = queue.popleft()
|
||||
# if (current_cluster, direction) not in visited: # Check visited with direction
|
||||
# visited.add((current_cluster, direction))
|
||||
|
||||
# if current_cluster.uuid == "a5a067c9-c4d7-4f33-8e6f-01b903f89908":
|
||||
# print(f"Current cluster: {current_cluster.uuid}, Level: {level}, Direction: {direction}")
|
||||
# print(f"outbound relationships: {[x.uuid for x in current_cluster.outbound_relationships]}")
|
||||
|
||||
|
||||
# # Process outbound relationships
|
||||
# if direction == 'outbound':
|
||||
# for to_cluster in current_cluster.outbound_relationships:
|
||||
# if (to_cluster, 'outbound') not in visited:
|
||||
# queue.append((to_cluster, level + 1, 'outbound'))
|
||||
|
||||
# link = frozenset([current_cluster, to_cluster])
|
||||
# if relationships[link][0] > level + 1:
|
||||
# relationships[link] = (level + 1, 'outbound')
|
||||
|
||||
# # Process inbound relationships
|
||||
# for from_cluster in current_cluster.inbound_relationships:
|
||||
# if (from_cluster, 'inbound') not in visited:
|
||||
# queue.append((from_cluster, level + 1, 'inbound'))
|
||||
|
||||
# link = frozenset([from_cluster, current_cluster])
|
||||
# if relationships[link][0] > level + 1:
|
||||
# relationships[link] = (level + 1, 'inbound')
|
||||
|
||||
# # Convert defaultdict to list of tuples for compatibility with your existing structure
|
||||
# processed_relationships = []
|
||||
# for link, (lvl, dir) in relationships.items():
|
||||
# clusters = list(link)
|
||||
# if dir == 'outbound':
|
||||
# processed_relationships.append((clusters[0], clusters[1], lvl, dir))
|
||||
# else:
|
||||
# processed_relationships.append((clusters[1], clusters[0], lvl, dir))
|
||||
|
||||
# return processed_relationships
|
||||
|
||||
# return bfs_with_inbound_outbound(start_cluster)
|
||||
|
||||
def get_relationships_with_levels(self, start_cluster):
|
||||
|
||||
def bfs_with_undirected_relationships(start_cluster):
|
||||
visited = set() # Tracks whether a cluster has been visited
|
||||
relationships = defaultdict(lambda: float('inf')) # Tracks the lowest level for each cluster pair
|
||||
|
||||
queue = deque([(start_cluster, 0)]) # Queue of (cluster, level)
|
||||
|
||||
while queue:
|
||||
current_cluster, level = queue.popleft()
|
||||
if current_cluster not in visited:
|
||||
visited.add(current_cluster)
|
||||
|
||||
# Process all relationships regardless of direction
|
||||
neighbors = current_cluster.outbound_relationships.union(current_cluster.inbound_relationships)
|
||||
for neighbor in neighbors:
|
||||
link = frozenset([current_cluster, neighbor])
|
||||
if level + 1 < relationships[link]:
|
||||
relationships[link] = level + 1
|
||||
if neighbor not in visited:
|
||||
queue.append((neighbor, level + 1))
|
||||
|
||||
# Convert the defaultdict to a list of tuples, ignoring direction
|
||||
processed_relationships = []
|
||||
for link, lvl in relationships.items():
|
||||
# Extract clusters from the frozenset; direction is irrelevant
|
||||
clusters = list(link)
|
||||
|
||||
# Arbitrarily choose the first cluster as 'source' for consistency
|
||||
try:
|
||||
processed_relationships.append((clusters[0], clusters[1], lvl))
|
||||
except:
|
||||
processed_relationships.append((clusters[0], Cluster(uuid=0, galaxy=None), lvl))
|
||||
|
||||
return processed_relationships
|
||||
|
||||
return bfs_with_undirected_relationships(start_cluster)
|
Loading…
Reference in a new issue