2024-06-26 13:30:47 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2024-07-02 09:41:17 +00:00
|
|
|
import functools
|
|
|
|
import time
|
2024-06-26 13:30:47 +00:00
|
|
|
from pathlib import Path
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
from typing import Union
|
2024-07-02 09:41:17 +00:00
|
|
|
import jq
|
2024-07-02 11:46:15 +00:00
|
|
|
|
2024-06-26 13:30:47 +00:00
|
|
|
import db
|
2024-07-04 09:21:10 +00:00
|
|
|
from inject_evaluator import eval_data_filtering, eval_query_mirror, eval_query_search
|
2024-06-26 13:30:47 +00:00
|
|
|
import misp_api
|
2024-07-10 08:14:44 +00:00
|
|
|
from appConfig import logger
|
2024-07-02 11:46:15 +00:00
|
|
|
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
ACTIVE_EXERCISES_DIR = "active_exercises"
|
|
|
|
|
2024-07-02 09:41:17 +00:00
|
|
|
def debounce_check_active_tasks(debounce_seconds: int = 1):
|
|
|
|
func_last_execution_time = {}
|
|
|
|
def decorator(func):
|
|
|
|
@functools.wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
user_id = args[0]
|
|
|
|
now = time.time()
|
|
|
|
key = f"{user_id}_{func.__name__}"
|
|
|
|
if key not in func_last_execution_time:
|
|
|
|
func_last_execution_time[key] = now
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
elif now >= func_last_execution_time[key] + debounce_seconds:
|
|
|
|
func_last_execution_time[key] = now
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
else:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.debug(f">> Debounced for `{user_id}`")
|
2024-07-02 09:41:17 +00:00
|
|
|
return None
|
|
|
|
return wrapper
|
|
|
|
return decorator
|
|
|
|
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
def load_exercises() -> bool:
|
2024-07-01 09:21:01 +00:00
|
|
|
db.ALL_EXERCISES = read_exercise_dir()
|
2024-07-01 11:12:23 +00:00
|
|
|
if not is_validate_exercises(db.ALL_EXERCISES):
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.error('Issue while validating exercises')
|
2024-07-01 11:12:23 +00:00
|
|
|
return False
|
2024-06-26 13:30:47 +00:00
|
|
|
init_inject_flow()
|
|
|
|
init_exercises_tasks()
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def read_exercise_dir():
|
|
|
|
script_dir = Path(__file__).parent
|
|
|
|
target_dir = script_dir / ACTIVE_EXERCISES_DIR
|
|
|
|
json_files = target_dir.glob("*.json")
|
|
|
|
exercises = []
|
|
|
|
for json_file in json_files:
|
|
|
|
with open(json_file) as f:
|
|
|
|
parsed_exercise = json.load(f)
|
|
|
|
exercises.append(parsed_exercise)
|
|
|
|
return exercises
|
|
|
|
|
|
|
|
|
2024-07-08 08:21:59 +00:00
|
|
|
def backup_exercises_progress():
|
|
|
|
with open('backup.json', 'w') as f:
|
|
|
|
toBackup = {
|
|
|
|
'EXERCISES_STATUS': db.EXERCISES_STATUS,
|
|
|
|
'SELECTED_EXERCISES': db.SELECTED_EXERCISES,
|
|
|
|
'USER_ID_TO_EMAIL_MAPPING': db.USER_ID_TO_EMAIL_MAPPING,
|
|
|
|
'USER_ID_TO_AUTHKEY_MAPPING': db.USER_ID_TO_AUTHKEY_MAPPING,
|
|
|
|
}
|
|
|
|
json.dump(toBackup, f)
|
|
|
|
|
|
|
|
|
|
|
|
def restore_exercices_progress():
|
|
|
|
try:
|
2024-07-09 14:13:45 +00:00
|
|
|
|
2024-07-08 08:21:59 +00:00
|
|
|
with open('backup.json', 'r') as f:
|
|
|
|
data = json.load(f)
|
|
|
|
db.EXERCISES_STATUS = data['EXERCISES_STATUS']
|
|
|
|
db.SELECTED_EXERCISES = data['SELECTED_EXERCISES']
|
2024-07-09 12:20:28 +00:00
|
|
|
db.USER_ID_TO_EMAIL_MAPPING = {}
|
2024-07-09 14:13:45 +00:00
|
|
|
for user_id_str, email in data['USER_ID_TO_EMAIL_MAPPING'].items():
|
2024-07-09 12:20:28 +00:00
|
|
|
db.USER_ID_TO_EMAIL_MAPPING[int(user_id_str)] = email
|
|
|
|
db.USER_ID_TO_AUTHKEY_MAPPING = {}
|
2024-07-09 14:13:45 +00:00
|
|
|
for user_id_str, authkey in data['USER_ID_TO_AUTHKEY_MAPPING'].items():
|
2024-07-09 12:20:28 +00:00
|
|
|
db.USER_ID_TO_AUTHKEY_MAPPING[int(user_id_str)] = authkey
|
2024-07-08 08:21:59 +00:00
|
|
|
except:
|
|
|
|
logger.info('Could not restore exercise progress')
|
2024-07-10 10:19:12 +00:00
|
|
|
db.EXERCISES_STATUS = {}
|
|
|
|
db.SELECTED_EXERCISES = []
|
|
|
|
db.USER_ID_TO_EMAIL_MAPPING = {}
|
|
|
|
db.USER_ID_TO_AUTHKEY_MAPPING = {}
|
2024-07-08 08:21:59 +00:00
|
|
|
|
|
|
|
|
2024-07-01 11:12:23 +00:00
|
|
|
def is_validate_exercises(exercises: list) -> bool:
|
|
|
|
exercises_uuid = set()
|
|
|
|
tasks_uuid = set()
|
|
|
|
exercise_by_uuid = {}
|
|
|
|
task_by_uuid = {}
|
|
|
|
for exercise in exercises:
|
|
|
|
e_uuid = exercise['exercise']['uuid']
|
|
|
|
if e_uuid in exercises_uuid:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.error(f"Duplicated UUID {e_uuid}. ({exercise['exercise']['name']}, {exercise_by_uuid[e_uuid]['exercise']['name']})")
|
2024-07-01 11:12:23 +00:00
|
|
|
return False
|
|
|
|
exercises_uuid.add(e_uuid)
|
|
|
|
exercise_by_uuid[e_uuid] = exercise
|
|
|
|
for inject in exercise['injects']:
|
|
|
|
t_uuid = inject['uuid']
|
|
|
|
if t_uuid in tasks_uuid:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.error(f"Duplicated UUID {t_uuid}. ({inject['name']}, {task_by_uuid[t_uuid]['name']})")
|
2024-07-01 11:12:23 +00:00
|
|
|
return False
|
|
|
|
tasks_uuid.add(t_uuid)
|
|
|
|
task_by_uuid[t_uuid] = inject
|
2024-07-02 09:41:17 +00:00
|
|
|
|
|
|
|
for inject_evaluation in inject.get('inject_evaluation', []):
|
|
|
|
if inject_evaluation.get('evaluation_strategy', None) == 'data_filtering':
|
|
|
|
for evaluation in inject_evaluation.get('parameters', []):
|
|
|
|
jq_path = list(evaluation.keys())[0]
|
|
|
|
try:
|
|
|
|
jq.compile(jq_path)
|
|
|
|
except ValueError as e:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.error(f"[{t_uuid} :: {inject['name']}] Could not compile jq path `{jq_path}`\n", e)
|
2024-07-02 09:41:17 +00:00
|
|
|
return False
|
|
|
|
|
2024-07-01 11:12:23 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2024-06-26 13:30:47 +00:00
|
|
|
def init_inject_flow():
|
2024-07-01 09:21:01 +00:00
|
|
|
for exercise in db.ALL_EXERCISES:
|
2024-06-26 13:30:47 +00:00
|
|
|
for inject in exercise['injects']:
|
|
|
|
inject['exercise_uuid'] = exercise['exercise']['uuid']
|
|
|
|
db.INJECT_BY_UUID[inject['uuid']] = inject
|
|
|
|
|
2024-07-01 09:21:01 +00:00
|
|
|
for exercise in db.ALL_EXERCISES:
|
2024-06-26 13:30:47 +00:00
|
|
|
for inject_flow in exercise['inject_flow']:
|
|
|
|
db.INJECT_REQUIREMENTS_BY_INJECT_UUID[inject_flow['inject_uuid']] = inject_flow['requirements']
|
|
|
|
db.INJECT_SEQUENCE_BY_INJECT_UUID[inject_flow['inject_uuid']] = []
|
|
|
|
for sequence in inject_flow['sequence'].get('followed_by', []):
|
|
|
|
db.INJECT_SEQUENCE_BY_INJECT_UUID[inject_flow['inject_uuid']].append(sequence)
|
|
|
|
|
|
|
|
|
|
|
|
def init_exercises_tasks():
|
2024-07-01 09:21:01 +00:00
|
|
|
for exercise in db.ALL_EXERCISES:
|
2024-07-01 09:49:00 +00:00
|
|
|
max_score = 0
|
2024-06-26 13:30:47 +00:00
|
|
|
tasks = {}
|
|
|
|
for inject in exercise['injects']:
|
2024-07-01 09:49:00 +00:00
|
|
|
score = 0
|
|
|
|
try:
|
|
|
|
for inject_eval in inject['inject_evaluation']:
|
|
|
|
score += inject_eval['score_range'][1]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
max_score += score
|
2024-06-26 13:30:47 +00:00
|
|
|
tasks[inject['uuid']] = {
|
|
|
|
"name": inject['name'],
|
|
|
|
"uuid": inject['uuid'],
|
|
|
|
"completed_by_user": [],
|
2024-07-01 09:49:00 +00:00
|
|
|
"score": score,
|
2024-06-26 13:30:47 +00:00
|
|
|
}
|
|
|
|
db.EXERCISES_STATUS[exercise['exercise']['uuid']] = {
|
|
|
|
'uuid': exercise['exercise']['uuid'],
|
|
|
|
'name': exercise['exercise']['name'],
|
2024-07-01 09:49:00 +00:00
|
|
|
'tasks': tasks,
|
|
|
|
'max_score': max_score,
|
2024-06-26 13:30:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def get_exercises():
|
2024-07-01 09:21:01 +00:00
|
|
|
exercises = []
|
|
|
|
for exercise in db.ALL_EXERCISES:
|
2024-06-26 13:30:47 +00:00
|
|
|
tasks = []
|
|
|
|
for inject in exercise['injects']:
|
2024-07-01 09:49:00 +00:00
|
|
|
score = db.EXERCISES_STATUS[exercise['exercise']['uuid']]['tasks'][inject['uuid']]['score']
|
2024-07-04 06:32:28 +00:00
|
|
|
requirements = db.INJECT_REQUIREMENTS_BY_INJECT_UUID[inject['uuid']]
|
2024-06-26 13:30:47 +00:00
|
|
|
tasks.append(
|
|
|
|
{
|
|
|
|
"name": inject['name'],
|
|
|
|
"uuid": inject['uuid'],
|
2024-07-04 06:32:28 +00:00
|
|
|
"description": inject.get('description', ''),
|
2024-06-26 13:30:47 +00:00
|
|
|
"score": score,
|
2024-07-04 06:32:28 +00:00
|
|
|
"requirements": requirements,
|
2024-06-26 13:30:47 +00:00
|
|
|
}
|
|
|
|
)
|
2024-07-01 09:21:01 +00:00
|
|
|
exercises.append(
|
2024-06-26 13:30:47 +00:00
|
|
|
{
|
|
|
|
"name": exercise['exercise']['name'],
|
|
|
|
"uuid": exercise['exercise']['uuid'],
|
|
|
|
"description": exercise['exercise']['description'],
|
|
|
|
"level": exercise['exercise']['meta']['level'],
|
|
|
|
"priority": exercise['exercise']['meta'].get('priority', 50),
|
|
|
|
"tasks": tasks,
|
|
|
|
}
|
|
|
|
)
|
2024-07-01 09:21:01 +00:00
|
|
|
exercises = sorted(exercises, key=lambda d: d['priority'])
|
|
|
|
return exercises
|
|
|
|
|
|
|
|
|
|
|
|
def get_selected_exercises():
|
|
|
|
return db.SELECTED_EXERCISES
|
|
|
|
|
|
|
|
|
|
|
|
def change_exercise_selection(exercise_uuid: str, selected: bool):
|
|
|
|
if selected:
|
|
|
|
if exercise_uuid not in db.SELECTED_EXERCISES:
|
|
|
|
db.SELECTED_EXERCISES.append(exercise_uuid)
|
|
|
|
else:
|
|
|
|
if exercise_uuid in db.SELECTED_EXERCISES:
|
|
|
|
db.SELECTED_EXERCISES.remove(exercise_uuid)
|
|
|
|
|
|
|
|
|
|
|
|
def resetAllExerciseProgress():
|
|
|
|
for user_id in db.USER_ID_TO_EMAIL_MAPPING.keys():
|
|
|
|
for exercise_status in db.EXERCISES_STATUS.values():
|
|
|
|
for task in exercise_status['tasks'].values():
|
|
|
|
mark_task_incomplete(user_id, exercise_status['uuid'], task['uuid'])
|
2024-07-08 08:21:59 +00:00
|
|
|
backup_exercises_progress()
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_completed_tasks_for_user(user_id: int):
|
2024-07-09 14:13:45 +00:00
|
|
|
completion = get_completion_for_users().get(user_id, {})
|
2024-06-26 13:30:47 +00:00
|
|
|
completed_tasks = {}
|
|
|
|
for exec_uuid, tasks in completion.items():
|
|
|
|
completed_tasks[exec_uuid] = [task_uuid for task_uuid, completed in tasks.items() if completed]
|
|
|
|
return completed_tasks
|
|
|
|
|
|
|
|
def get_incomplete_tasks_for_user(user_id: int):
|
2024-07-09 14:13:45 +00:00
|
|
|
completion = get_completion_for_users().get(user_id, {})
|
2024-06-26 13:30:47 +00:00
|
|
|
incomplete_tasks = {}
|
|
|
|
for exec_uuid, tasks in completion.items():
|
|
|
|
incomplete_tasks[exec_uuid] = [task_uuid for task_uuid, completed in tasks.items() if not completed]
|
|
|
|
return incomplete_tasks
|
|
|
|
|
|
|
|
|
|
|
|
def get_available_tasks_for_user(user_id: int) -> list[str]:
|
|
|
|
available_tasks = []
|
|
|
|
completed = get_completed_tasks_for_user(user_id)
|
|
|
|
incomplete = get_incomplete_tasks_for_user(user_id)
|
|
|
|
for exec_uuid, tasks in incomplete.items():
|
|
|
|
for task_uuid in tasks:
|
|
|
|
requirements = db.INJECT_REQUIREMENTS_BY_INJECT_UUID[task_uuid]
|
|
|
|
requirement_met = 'inject_uuid' not in requirements or requirements['inject_uuid'] in completed[exec_uuid]
|
|
|
|
if requirement_met:
|
|
|
|
available_tasks.append(task_uuid)
|
|
|
|
return available_tasks
|
|
|
|
|
|
|
|
|
|
|
|
def get_model_action(data: dict):
|
|
|
|
if 'Log' in data:
|
|
|
|
data = data['Log']
|
|
|
|
if 'model' in data and 'action' in data:
|
|
|
|
return (data['model'], data['action'],)
|
|
|
|
return (None, None,)
|
|
|
|
|
|
|
|
def is_accepted_query(data: dict) -> bool:
|
|
|
|
model, action = get_model_action(data)
|
|
|
|
if model in ['Event', 'Attribute', 'Object', 'Tag',]:
|
2024-07-02 10:13:41 +00:00
|
|
|
if action in ['add', 'edit', 'delete', 'publish', 'tag']:
|
2024-06-28 15:14:05 +00:00
|
|
|
# # improved condition below. It blocks some queries
|
|
|
|
# if data['Log']['change'].startswith('attribute_count'):
|
|
|
|
# return False
|
2024-06-26 13:30:47 +00:00
|
|
|
if data['Log']['change'].startswith('Validation errors:'):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
if data.get('user_agent', None) == 'misp-exercise-dashboard':
|
|
|
|
return None
|
|
|
|
url = data.get('url', None)
|
|
|
|
if url is not None:
|
|
|
|
return url in [
|
|
|
|
'/attributes/restSearch',
|
|
|
|
'/events/restSearch',
|
|
|
|
]
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def get_completion_for_users():
|
|
|
|
completion_per_user = {int(user_id): {} for user_id in db.USER_ID_TO_EMAIL_MAPPING.keys()}
|
|
|
|
for exercise_status in db.EXERCISES_STATUS.values():
|
|
|
|
for user_id in completion_per_user.keys():
|
|
|
|
completion_per_user[int(user_id)][exercise_status['uuid']] = {}
|
|
|
|
|
|
|
|
for task in exercise_status['tasks'].values():
|
|
|
|
for user_id in completion_per_user.keys():
|
|
|
|
completion_per_user[int(user_id)][exercise_status['uuid']][task['uuid']] = False
|
2024-07-03 10:28:47 +00:00
|
|
|
for entry in task['completed_by_user']:
|
2024-07-10 10:15:23 +00:00
|
|
|
user_id = int(entry['user_id'])
|
|
|
|
if user_id in completion_per_user: # Ensure the user_id is known in USER_ID_TO_EMAIL_MAPPING
|
|
|
|
completion_per_user[user_id][exercise_status['uuid']][task['uuid']] = entry
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
return completion_per_user
|
|
|
|
|
|
|
|
|
|
|
|
def get_score_for_task_completion(tasks_completion: dict) -> int:
|
|
|
|
score = 0
|
|
|
|
for inject_uuid, completed in tasks_completion.items():
|
|
|
|
if not completed:
|
|
|
|
continue
|
|
|
|
inject = db.INJECT_BY_UUID[inject_uuid]
|
|
|
|
try:
|
|
|
|
for inject_eval in inject['inject_evaluation']:
|
|
|
|
score += inject_eval['score_range'][1]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return score
|
|
|
|
|
|
|
|
|
|
|
|
def mark_task_completed(user_id: int, exercise_uuid: str , task_uuid: str):
|
2024-07-03 10:28:47 +00:00
|
|
|
is_completed = any(filter(lambda x: x['user_id'] == user_id, db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user']))
|
|
|
|
if not is_completed:
|
|
|
|
db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user'].append({
|
|
|
|
'user_id': user_id,
|
2024-07-03 10:37:23 +00:00
|
|
|
'timestamp': time.time(),
|
|
|
|
'first_completion': False,
|
2024-07-03 10:28:47 +00:00
|
|
|
})
|
2024-07-03 10:37:23 +00:00
|
|
|
# Update who was the first to complete the task
|
|
|
|
first_completion_index = None
|
|
|
|
first_completion_time = time.time()
|
|
|
|
for i, entry in enumerate(db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user']):
|
|
|
|
db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user'][i]['first_completion'] = False
|
|
|
|
if entry['timestamp'] < first_completion_time:
|
|
|
|
first_completion_time = entry['timestamp']
|
|
|
|
first_completion_index = i
|
|
|
|
db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user'][first_completion_index]['first_completion'] = True
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def mark_task_incomplete(user_id: int, exercise_uuid: str , task_uuid: str):
|
2024-07-03 10:28:47 +00:00
|
|
|
completed_without_user = list(filter(lambda x: x['user_id'] != user_id, db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user']))
|
|
|
|
db.EXERCISES_STATUS[exercise_uuid]['tasks'][task_uuid]['completed_by_user'] = completed_without_user
|
2024-06-26 13:30:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_progress():
|
|
|
|
completion_for_users = get_completion_for_users()
|
|
|
|
progress = {}
|
2024-07-09 11:52:44 +00:00
|
|
|
for user_id in completion_for_users.keys():
|
2024-07-09 12:05:00 +00:00
|
|
|
if user_id not in db.USER_ID_TO_EMAIL_MAPPING:
|
2024-07-09 12:20:28 +00:00
|
|
|
print('unknown user id', user_id)
|
2024-07-09 12:05:00 +00:00
|
|
|
continue
|
2024-06-26 13:30:47 +00:00
|
|
|
progress[user_id] = {
|
|
|
|
'email': db.USER_ID_TO_EMAIL_MAPPING[user_id],
|
2024-07-09 12:38:23 +00:00
|
|
|
'user_id': user_id,
|
2024-06-26 13:30:47 +00:00
|
|
|
'exercises': {},
|
|
|
|
}
|
|
|
|
for exec_uuid, tasks_completion in completion_for_users[user_id].items():
|
|
|
|
progress[user_id]['exercises'][exec_uuid] = {
|
|
|
|
'tasks_completion': tasks_completion,
|
|
|
|
'score': get_score_for_task_completion(tasks_completion),
|
2024-07-01 09:49:00 +00:00
|
|
|
'max_score': db.EXERCISES_STATUS[exec_uuid]['max_score'],
|
2024-06-26 13:30:47 +00:00
|
|
|
}
|
|
|
|
return progress
|
|
|
|
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def check_inject(user_id: int, inject: dict, data: dict, context: dict) -> bool:
|
2024-06-26 13:30:47 +00:00
|
|
|
for inject_evaluation in inject['inject_evaluation']:
|
2024-07-04 12:49:26 +00:00
|
|
|
success = await inject_checker_router(user_id, inject_evaluation, data, context)
|
2024-06-26 13:30:47 +00:00
|
|
|
if not success:
|
2024-07-09 14:33:18 +00:00
|
|
|
logger.info(f"Task not completed[{user_id}]: {inject['uuid']}")
|
2024-06-26 13:30:47 +00:00
|
|
|
return False
|
|
|
|
mark_task_completed(user_id, inject['exercise_uuid'], inject['uuid'])
|
2024-07-09 14:33:18 +00:00
|
|
|
logger.info(f"Task success[{user_id}]: {inject['uuid']}")
|
2024-06-26 13:30:47 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def is_valid_evaluation_context(user_id: int, inject_evaluation: dict, data: dict, context: dict) -> bool:
|
2024-06-28 15:14:05 +00:00
|
|
|
if 'evaluation_context' not in inject_evaluation or len(inject_evaluation['evaluation_context']) == 0:
|
2024-06-26 13:30:47 +00:00
|
|
|
return True
|
|
|
|
if 'request_is_rest' in inject_evaluation['evaluation_context']:
|
|
|
|
if 'request_is_rest' in context:
|
|
|
|
if inject_evaluation['evaluation_context']['request_is_rest'] == context['request_is_rest']:
|
|
|
|
return True
|
|
|
|
else:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.debug('Request type does not match state of `request_is_rest`')
|
2024-06-26 13:30:47 +00:00
|
|
|
return False
|
|
|
|
else:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.debug('Unknown request type')
|
2024-06-26 13:30:47 +00:00
|
|
|
return False
|
2024-07-09 14:13:45 +00:00
|
|
|
return True
|
2024-06-26 13:30:47 +00:00
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def inject_checker_router(user_id: int, inject_evaluation: dict, data: dict, context: dict) -> bool:
|
2024-06-26 13:30:47 +00:00
|
|
|
if not is_valid_evaluation_context(user_id, inject_evaluation, data, context):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if 'evaluation_strategy' not in inject_evaluation:
|
|
|
|
return False
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
data_to_validate = await get_data_to_validate(user_id, inject_evaluation, data)
|
2024-06-26 13:30:47 +00:00
|
|
|
if data_to_validate is None:
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.debug('Could not fetch data to validate')
|
2024-06-26 13:30:47 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
if inject_evaluation['evaluation_strategy'] == 'data_filtering':
|
2024-07-04 09:21:10 +00:00
|
|
|
return eval_data_filtering(user_id, inject_evaluation, data_to_validate, context)
|
2024-07-04 06:46:08 +00:00
|
|
|
elif inject_evaluation['evaluation_strategy'] == 'query_mirror':
|
2024-06-26 13:30:47 +00:00
|
|
|
expected_data = data_to_validate['expected_data']
|
|
|
|
data_to_validate = data_to_validate['data_to_validate']
|
2024-07-04 09:21:10 +00:00
|
|
|
return eval_query_mirror(user_id, expected_data, data_to_validate, context)
|
|
|
|
elif inject_evaluation['evaluation_strategy'] == 'query_search':
|
|
|
|
return eval_query_search(user_id, inject_evaluation, data_to_validate, context)
|
2024-06-26 13:30:47 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def get_data_to_validate(user_id: int, inject_evaluation: dict, data: dict) -> Union[dict, list, str, None]:
|
2024-06-26 13:30:47 +00:00
|
|
|
data_to_validate = None
|
|
|
|
if inject_evaluation['evaluation_strategy'] == 'data_filtering':
|
|
|
|
event_id = parse_event_id_from_log(data)
|
2024-07-04 12:49:26 +00:00
|
|
|
data_to_validate = await fetch_data_for_data_filtering(event_id=event_id)
|
2024-07-04 06:46:08 +00:00
|
|
|
elif inject_evaluation['evaluation_strategy'] == 'query_mirror':
|
2024-06-26 13:30:47 +00:00
|
|
|
perfomed_query = parse_performed_query_from_log(data)
|
2024-07-04 12:49:26 +00:00
|
|
|
data_to_validate = await fetch_data_for_query_mirror(user_id, inject_evaluation, perfomed_query)
|
2024-07-04 09:21:10 +00:00
|
|
|
elif inject_evaluation['evaluation_strategy'] == 'query_search':
|
2024-07-04 12:49:26 +00:00
|
|
|
data_to_validate = await fetch_data_for_query_search(user_id, inject_evaluation)
|
2024-06-26 13:30:47 +00:00
|
|
|
return data_to_validate
|
|
|
|
|
|
|
|
|
|
|
|
def parse_event_id_from_log(data: dict) -> Union[int, None]:
|
|
|
|
event_id_from_change_field_regex = r".*event_id \(.*\) => \((\d+)\).*"
|
2024-06-28 15:14:05 +00:00
|
|
|
event_id_from_title_field_regex = r".*from Event \((\d+)\).*"
|
2024-06-26 13:30:47 +00:00
|
|
|
if 'Log' in data:
|
|
|
|
log = data['Log']
|
2024-06-28 15:14:05 +00:00
|
|
|
if 'model' in log and 'model_id' in log and log['model'] == 'Event':
|
|
|
|
return int(log['model_id'])
|
2024-06-26 13:30:47 +00:00
|
|
|
if 'change' in log:
|
|
|
|
event_id_search = re.search(event_id_from_change_field_regex, log['change'], re.IGNORECASE)
|
2024-06-28 15:14:05 +00:00
|
|
|
if event_id_search is not None:
|
|
|
|
event_id = event_id_search.group(1)
|
|
|
|
return event_id
|
|
|
|
if 'title' in log:
|
|
|
|
event_id_search = re.search(event_id_from_title_field_regex, log['title'], re.IGNORECASE)
|
|
|
|
if event_id_search is not None:
|
|
|
|
event_id = event_id_search.group(1)
|
|
|
|
return event_id
|
2024-06-26 13:30:47 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def parse_performed_query_from_log(data: dict) -> Union[dict, None]:
|
|
|
|
performed_query = {
|
|
|
|
'request_method': '',
|
|
|
|
'url': '',
|
|
|
|
'payload': {},
|
|
|
|
}
|
|
|
|
if 'request' in data:
|
|
|
|
request = data['request']
|
|
|
|
performed_query['request_method'] = data.get('request_method', None)
|
|
|
|
performed_query['url'] = data.get('url', None)
|
|
|
|
if request.startswith('application/json\n\n'):
|
|
|
|
query_raw = request.replace('application/json\n\n', '')
|
|
|
|
try:
|
|
|
|
payload = json.loads(query_raw)
|
|
|
|
performed_query['payload'] = payload
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if performed_query['request_method'] is not None and performed_query['url'] is not None:
|
|
|
|
return performed_query
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def fetch_data_for_data_filtering(event_id=None) -> Union[None, dict]:
|
2024-06-26 13:30:47 +00:00
|
|
|
data = None
|
|
|
|
if event_id is not None:
|
2024-07-04 12:49:26 +00:00
|
|
|
data = await misp_api.getEvent(event_id)
|
2024-06-26 13:30:47 +00:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def fetch_data_for_query_mirror(user_id: int, inject_evaluation: dict, perfomed_query: dict) -> Union[None, dict]:
|
2024-06-26 13:30:47 +00:00
|
|
|
data = None
|
|
|
|
authkey = db.USER_ID_TO_AUTHKEY_MAPPING[user_id]
|
|
|
|
if perfomed_query is not None:
|
|
|
|
if 'evaluation_context' not in inject_evaluation and 'query_context' not in inject_evaluation['evaluation_context']:
|
|
|
|
return None
|
|
|
|
query_context = inject_evaluation['evaluation_context']['query_context']
|
|
|
|
expected_method = query_context['request_method']
|
|
|
|
expected_url = query_context['url']
|
|
|
|
expected_payload = inject_evaluation['parameters'][0]
|
2024-07-04 12:49:26 +00:00
|
|
|
expected_data = await misp_api.doRestQuery(authkey, expected_method, expected_url, expected_payload)
|
|
|
|
data_to_validate = await misp_api.doRestQuery(authkey, perfomed_query['request_method'], perfomed_query['url'], perfomed_query['payload'])
|
2024-06-26 13:30:47 +00:00
|
|
|
data = {
|
|
|
|
'expected_data' : expected_data,
|
|
|
|
'data_to_validate' : data_to_validate,
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2024-07-04 12:49:26 +00:00
|
|
|
async def fetch_data_for_query_search(user_id: int, inject_evaluation: dict) -> Union[None, dict]:
|
2024-07-04 09:21:10 +00:00
|
|
|
authkey = db.USER_ID_TO_AUTHKEY_MAPPING[user_id]
|
|
|
|
if 'evaluation_context' not in inject_evaluation and 'query_context' not in inject_evaluation['evaluation_context']:
|
|
|
|
return None
|
|
|
|
query_context = inject_evaluation['evaluation_context']['query_context']
|
|
|
|
search_method = query_context['request_method']
|
|
|
|
search_url = query_context['url']
|
2024-07-04 12:49:26 +00:00
|
|
|
search_payload = query_context['payload']
|
|
|
|
search_data = await misp_api.doRestQuery(authkey, search_method, search_url, search_payload)
|
2024-07-04 09:21:10 +00:00
|
|
|
return search_data
|
|
|
|
|
|
|
|
|
2024-07-02 13:05:44 +00:00
|
|
|
@debounce_check_active_tasks(debounce_seconds=2)
|
2024-07-04 12:49:26 +00:00
|
|
|
async def check_active_tasks(user_id: int, data: dict, context: dict) -> bool:
|
2024-06-26 13:30:47 +00:00
|
|
|
succeeded_once = False
|
|
|
|
available_tasks = get_available_tasks_for_user(user_id)
|
|
|
|
for task_uuid in available_tasks:
|
|
|
|
inject = db.INJECT_BY_UUID[task_uuid]
|
2024-07-01 09:21:01 +00:00
|
|
|
if inject['exercise_uuid'] not in db.SELECTED_EXERCISES:
|
|
|
|
continue
|
2024-07-02 11:46:15 +00:00
|
|
|
logger.debug(f"[{task_uuid}] :: checking: {inject['name']}")
|
2024-07-04 12:49:26 +00:00
|
|
|
completed = await check_inject(user_id, inject, data, context)
|
2024-06-26 13:30:47 +00:00
|
|
|
if completed:
|
|
|
|
succeeded_once = True
|
|
|
|
return succeeded_once
|