Compare commits
No commits in common. "95998e1ff90aaf1066c08478f5906ae4a1e31043" and "cad62bd3e2016613297445bff70b0a3eb7ad4c5a" have entirely different histories.
95998e1ff9
...
cad62bd3e2
6 changed files with 25 additions and 47 deletions
|
@ -2,5 +2,4 @@ pyzmq
|
|||
python-socketio
|
||||
eventlet
|
||||
requests
|
||||
requests-cache
|
||||
jq
|
11
config.py
11
config.py
|
@ -12,14 +12,3 @@ live_logs_accepted_scope = {
|
|||
'attributes': ['add', 'edit', 'delete', 'restSearch',],
|
||||
'tags': '*',
|
||||
}
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger('misp-exercise-dashboard')
|
||||
format = '[%(levelname)s] %(asctime)s - %(message)s'
|
||||
formatter = logging.Formatter(format)
|
||||
logging.basicConfig(filename='misp-exercise-dashboard.log', encoding='utf-8', level=logging.DEBUG, format=format)
|
||||
# create console handler and set level to debug
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
|
24
exercise.py
24
exercise.py
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
import functools
|
||||
import time
|
||||
from collections import defaultdict
|
||||
|
@ -8,13 +9,10 @@ import json
|
|||
import re
|
||||
from typing import Union
|
||||
import jq
|
||||
|
||||
import db
|
||||
from inject_evaluator import eval_data_filtering, eval_query_comparison
|
||||
import misp_api
|
||||
import config
|
||||
from config import logger
|
||||
|
||||
|
||||
ACTIVE_EXERCISES_DIR = "active_exercises"
|
||||
|
||||
|
@ -33,7 +31,7 @@ def debounce_check_active_tasks(debounce_seconds: int = 1):
|
|||
func_last_execution_time[key] = now
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
logger.debug(f">> Debounced for `{user_id}`")
|
||||
print(f">> Debounced for `{user_id}`")
|
||||
return None
|
||||
return wrapper
|
||||
return decorator
|
||||
|
@ -42,7 +40,7 @@ def debounce_check_active_tasks(debounce_seconds: int = 1):
|
|||
def load_exercises() -> bool:
|
||||
db.ALL_EXERCISES = read_exercise_dir()
|
||||
if not is_validate_exercises(db.ALL_EXERCISES):
|
||||
logger.error('Issue while validating exercises')
|
||||
print('Issue while validating exercises')
|
||||
return False
|
||||
init_inject_flow()
|
||||
init_exercises_tasks()
|
||||
|
@ -69,14 +67,14 @@ def is_validate_exercises(exercises: list) -> bool:
|
|||
for exercise in exercises:
|
||||
e_uuid = exercise['exercise']['uuid']
|
||||
if e_uuid in exercises_uuid:
|
||||
logger.error(f"Duplicated UUID {e_uuid}. ({exercise['exercise']['name']}, {exercise_by_uuid[e_uuid]['exercise']['name']})")
|
||||
print(f"Duplicated UUID {e_uuid}. ({exercise['exercise']['name']}, {exercise_by_uuid[e_uuid]['exercise']['name']})")
|
||||
return False
|
||||
exercises_uuid.add(e_uuid)
|
||||
exercise_by_uuid[e_uuid] = exercise
|
||||
for inject in exercise['injects']:
|
||||
t_uuid = inject['uuid']
|
||||
if t_uuid in tasks_uuid:
|
||||
logger.error(f"Duplicated UUID {t_uuid}. ({inject['name']}, {task_by_uuid[t_uuid]['name']})")
|
||||
print(f"Duplicated UUID {t_uuid}. ({inject['name']}, {task_by_uuid[t_uuid]['name']})")
|
||||
return False
|
||||
tasks_uuid.add(t_uuid)
|
||||
task_by_uuid[t_uuid] = inject
|
||||
|
@ -88,7 +86,7 @@ def is_validate_exercises(exercises: list) -> bool:
|
|||
try:
|
||||
jq.compile(jq_path)
|
||||
except ValueError as e:
|
||||
logger.error(f"[{t_uuid} :: {inject['name']}] Could not compile jq path `{jq_path}`\n", e)
|
||||
print(f"[{t_uuid} :: {inject['name']}] Could not compile jq path `{jq_path}`\n", e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -300,7 +298,7 @@ def check_inject(user_id: int, inject: dict, data: dict, context: dict) -> bool:
|
|||
if not success:
|
||||
return False
|
||||
mark_task_completed(user_id, inject['exercise_uuid'], inject['uuid'])
|
||||
logger.info(f"Task success: {inject['uuid']}")
|
||||
print(f"Task success: {inject['uuid']}")
|
||||
return True
|
||||
|
||||
|
||||
|
@ -312,10 +310,10 @@ def is_valid_evaluation_context(user_id: int, inject_evaluation: dict, data: dic
|
|||
if inject_evaluation['evaluation_context']['request_is_rest'] == context['request_is_rest']:
|
||||
return True
|
||||
else:
|
||||
logger.debug('Request type does not match state of `request_is_rest`')
|
||||
print('Request type does not match state of `request_is_rest`')
|
||||
return False
|
||||
else:
|
||||
logger.debug('Unknown request type')
|
||||
print('Unknown request type')
|
||||
return False
|
||||
return False
|
||||
|
||||
|
@ -328,7 +326,7 @@ def inject_checker_router(user_id: int, inject_evaluation: dict, data: dict, con
|
|||
|
||||
data_to_validate = get_data_to_validate(user_id, inject_evaluation, data)
|
||||
if data_to_validate is None:
|
||||
logger.debug('Could not fetch data to validate')
|
||||
print('Could not fetch data to validate')
|
||||
return False
|
||||
|
||||
if inject_evaluation['evaluation_strategy'] == 'data_filtering':
|
||||
|
@ -427,7 +425,7 @@ def check_active_tasks(user_id: int, data: dict, context: dict) -> bool:
|
|||
inject = db.INJECT_BY_UUID[task_uuid]
|
||||
if inject['exercise_uuid'] not in db.SELECTED_EXERCISES:
|
||||
continue
|
||||
logger.debug(f"[{task_uuid}] :: checking: {inject['name']}")
|
||||
print(f"[{task_uuid}] :: checking: {inject['name']}")
|
||||
completed = check_inject(user_id, inject, data, context)
|
||||
if completed:
|
||||
succeeded_once = True
|
||||
|
|
|
@ -3,7 +3,6 @@ from typing import Union
|
|||
import jq
|
||||
import re
|
||||
import operator
|
||||
from config import logger
|
||||
|
||||
|
||||
# .Event.Attribute[] | select(.value == "evil.exe") | .Tag
|
||||
|
@ -134,7 +133,7 @@ def eval_data_filtering(user_id: int, inject_evaluation: dict, data: dict) -> bo
|
|||
for evaluation_path, evaluation_config in evaluation_params.items():
|
||||
data_to_validate = jq_extract(evaluation_path, data, evaluation_config.get('extract_type', 'first'))
|
||||
if data_to_validate is None:
|
||||
logger.debug('Could not extract data')
|
||||
print('Could not extract data')
|
||||
return False
|
||||
if not condition_satisfied(evaluation_config, data_to_validate):
|
||||
return False
|
||||
|
|
17
misp_api.py
17
misp_api.py
|
@ -1,21 +1,14 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
from datetime import timedelta
|
||||
from typing import Union
|
||||
from urllib.parse import urljoin
|
||||
import requests # type: ignore
|
||||
import requests.adapters # type: ignore
|
||||
from requests_cache import CachedSession
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning # type: ignore
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
from config import misp_url, misp_apikey, misp_skipssl, logger
|
||||
|
||||
requestSession = CachedSession(cache_name='misp_cache', expire_after=timedelta(seconds=5))
|
||||
adapterCache = requests.adapters.HTTPAdapter(pool_connections=50, pool_maxsize=50)
|
||||
requestSession.mount('https://', adapterCache)
|
||||
requestSession.mount('http://', adapterCache)
|
||||
from config import misp_url, misp_apikey, misp_skipssl
|
||||
|
||||
|
||||
def get(url, data={}, api_key=misp_apikey):
|
||||
|
@ -27,9 +20,9 @@ def get(url, data={}, api_key=misp_apikey):
|
|||
}
|
||||
full_url = urljoin(misp_url, url)
|
||||
try:
|
||||
response = requestSession.get(full_url, data=data, headers=headers, verify=not misp_skipssl)
|
||||
response = requests.get(full_url, data=data, headers=headers, verify=not misp_skipssl)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.info('Could not perform request on MISP.', e)
|
||||
print('Could not perform request on MISP.', e)
|
||||
return None
|
||||
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
||||
|
||||
|
@ -43,9 +36,9 @@ def post(url, data={}, api_key=misp_apikey):
|
|||
}
|
||||
full_url = urljoin(misp_url, url)
|
||||
try:
|
||||
response = requestSession.post(full_url, data=json.dumps(data), headers=headers, verify=not misp_skipssl)
|
||||
response = requests.post(full_url, data=json.dumps(data), headers=headers, verify=not misp_skipssl)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.info('Could not perform request on MISP.', e)
|
||||
print('Could not perform request on MISP.', e)
|
||||
return None
|
||||
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
||||
|
||||
|
|
14
server.py
14
server.py
|
@ -14,7 +14,6 @@ import exercise as exercise_model
|
|||
import notification as notification_model
|
||||
import db
|
||||
import config
|
||||
from config import logger
|
||||
import misp_api
|
||||
|
||||
|
||||
|
@ -58,11 +57,11 @@ app = socketio.WSGIApp(sio, static_files={
|
|||
|
||||
@sio.event
|
||||
def connect(sid, environ):
|
||||
logger.debug("Client connected: " + sid)
|
||||
print("Client connected: ", sid)
|
||||
|
||||
@sio.event
|
||||
def disconnect(sid):
|
||||
logger.debug("Client disconnected: " + sid)
|
||||
print("Client disconnected: ", sid)
|
||||
|
||||
@sio.event
|
||||
def get_exercises(sid):
|
||||
|
@ -110,7 +109,7 @@ def toggle_verbose_mode(sid, payload):
|
|||
|
||||
@sio.on('*')
|
||||
def any_event(event, sid, data={}):
|
||||
logger.info('>> Unhandled event', event)
|
||||
print('>> Unhandled event', event)
|
||||
|
||||
def handleMessage(topic, s, message):
|
||||
data = json.loads(message)
|
||||
|
@ -181,18 +180,19 @@ def forward_zmq_to_socketio():
|
|||
while True:
|
||||
message = zsocket.recv_string()
|
||||
topic, s, m = message.partition(" ")
|
||||
handleMessage(topic, s, m)
|
||||
try:
|
||||
ZMQ_MESSAGE_COUNT += 1
|
||||
handleMessage(topic, s, m)
|
||||
# handleMessage(topic, s, m)
|
||||
except Exception as e:
|
||||
logger.error('Error handling message', e)
|
||||
print('Error handling message', e)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
exercises_loaded = exercise_model.load_exercises()
|
||||
if not exercises_loaded:
|
||||
logger.critical('Could not load exercises')
|
||||
print('Could not load exercises')
|
||||
sys.exit(1)
|
||||
|
||||
# Start the forwarding in a separate thread
|
||||
|
|
Loading…
Reference in a new issue