Compare commits
2 commits
cad62bd3e2
...
95998e1ff9
Author | SHA1 | Date | |
---|---|---|---|
|
95998e1ff9 | ||
|
af6df877e7 |
6 changed files with 47 additions and 25 deletions
|
@ -2,4 +2,5 @@ pyzmq
|
||||||
python-socketio
|
python-socketio
|
||||||
eventlet
|
eventlet
|
||||||
requests
|
requests
|
||||||
|
requests-cache
|
||||||
jq
|
jq
|
11
config.py
11
config.py
|
@ -12,3 +12,14 @@ live_logs_accepted_scope = {
|
||||||
'attributes': ['add', 'edit', 'delete', 'restSearch',],
|
'attributes': ['add', 'edit', 'delete', 'restSearch',],
|
||||||
'tags': '*',
|
'tags': '*',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger('misp-exercise-dashboard')
|
||||||
|
format = '[%(levelname)s] %(asctime)s - %(message)s'
|
||||||
|
formatter = logging.Formatter(format)
|
||||||
|
logging.basicConfig(filename='misp-exercise-dashboard.log', encoding='utf-8', level=logging.DEBUG, format=format)
|
||||||
|
# create console handler and set level to debug
|
||||||
|
ch = logging.StreamHandler()
|
||||||
|
ch.setLevel(logging.INFO)
|
||||||
|
ch.setFormatter(formatter)
|
||||||
|
logger.addHandler(ch)
|
||||||
|
|
24
exercise.py
24
exercise.py
|
@ -1,6 +1,5 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
@ -9,10 +8,13 @@ import json
|
||||||
import re
|
import re
|
||||||
from typing import Union
|
from typing import Union
|
||||||
import jq
|
import jq
|
||||||
|
|
||||||
import db
|
import db
|
||||||
from inject_evaluator import eval_data_filtering, eval_query_comparison
|
from inject_evaluator import eval_data_filtering, eval_query_comparison
|
||||||
import misp_api
|
import misp_api
|
||||||
import config
|
import config
|
||||||
|
from config import logger
|
||||||
|
|
||||||
|
|
||||||
ACTIVE_EXERCISES_DIR = "active_exercises"
|
ACTIVE_EXERCISES_DIR = "active_exercises"
|
||||||
|
|
||||||
|
@ -31,7 +33,7 @@ def debounce_check_active_tasks(debounce_seconds: int = 1):
|
||||||
func_last_execution_time[key] = now
|
func_last_execution_time[key] = now
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
else:
|
else:
|
||||||
print(f">> Debounced for `{user_id}`")
|
logger.debug(f">> Debounced for `{user_id}`")
|
||||||
return None
|
return None
|
||||||
return wrapper
|
return wrapper
|
||||||
return decorator
|
return decorator
|
||||||
|
@ -40,7 +42,7 @@ def debounce_check_active_tasks(debounce_seconds: int = 1):
|
||||||
def load_exercises() -> bool:
|
def load_exercises() -> bool:
|
||||||
db.ALL_EXERCISES = read_exercise_dir()
|
db.ALL_EXERCISES = read_exercise_dir()
|
||||||
if not is_validate_exercises(db.ALL_EXERCISES):
|
if not is_validate_exercises(db.ALL_EXERCISES):
|
||||||
print('Issue while validating exercises')
|
logger.error('Issue while validating exercises')
|
||||||
return False
|
return False
|
||||||
init_inject_flow()
|
init_inject_flow()
|
||||||
init_exercises_tasks()
|
init_exercises_tasks()
|
||||||
|
@ -67,14 +69,14 @@ def is_validate_exercises(exercises: list) -> bool:
|
||||||
for exercise in exercises:
|
for exercise in exercises:
|
||||||
e_uuid = exercise['exercise']['uuid']
|
e_uuid = exercise['exercise']['uuid']
|
||||||
if e_uuid in exercises_uuid:
|
if e_uuid in exercises_uuid:
|
||||||
print(f"Duplicated UUID {e_uuid}. ({exercise['exercise']['name']}, {exercise_by_uuid[e_uuid]['exercise']['name']})")
|
logger.error(f"Duplicated UUID {e_uuid}. ({exercise['exercise']['name']}, {exercise_by_uuid[e_uuid]['exercise']['name']})")
|
||||||
return False
|
return False
|
||||||
exercises_uuid.add(e_uuid)
|
exercises_uuid.add(e_uuid)
|
||||||
exercise_by_uuid[e_uuid] = exercise
|
exercise_by_uuid[e_uuid] = exercise
|
||||||
for inject in exercise['injects']:
|
for inject in exercise['injects']:
|
||||||
t_uuid = inject['uuid']
|
t_uuid = inject['uuid']
|
||||||
if t_uuid in tasks_uuid:
|
if t_uuid in tasks_uuid:
|
||||||
print(f"Duplicated UUID {t_uuid}. ({inject['name']}, {task_by_uuid[t_uuid]['name']})")
|
logger.error(f"Duplicated UUID {t_uuid}. ({inject['name']}, {task_by_uuid[t_uuid]['name']})")
|
||||||
return False
|
return False
|
||||||
tasks_uuid.add(t_uuid)
|
tasks_uuid.add(t_uuid)
|
||||||
task_by_uuid[t_uuid] = inject
|
task_by_uuid[t_uuid] = inject
|
||||||
|
@ -86,7 +88,7 @@ def is_validate_exercises(exercises: list) -> bool:
|
||||||
try:
|
try:
|
||||||
jq.compile(jq_path)
|
jq.compile(jq_path)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
print(f"[{t_uuid} :: {inject['name']}] Could not compile jq path `{jq_path}`\n", e)
|
logger.error(f"[{t_uuid} :: {inject['name']}] Could not compile jq path `{jq_path}`\n", e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -298,7 +300,7 @@ def check_inject(user_id: int, inject: dict, data: dict, context: dict) -> bool:
|
||||||
if not success:
|
if not success:
|
||||||
return False
|
return False
|
||||||
mark_task_completed(user_id, inject['exercise_uuid'], inject['uuid'])
|
mark_task_completed(user_id, inject['exercise_uuid'], inject['uuid'])
|
||||||
print(f"Task success: {inject['uuid']}")
|
logger.info(f"Task success: {inject['uuid']}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -310,10 +312,10 @@ def is_valid_evaluation_context(user_id: int, inject_evaluation: dict, data: dic
|
||||||
if inject_evaluation['evaluation_context']['request_is_rest'] == context['request_is_rest']:
|
if inject_evaluation['evaluation_context']['request_is_rest'] == context['request_is_rest']:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
print('Request type does not match state of `request_is_rest`')
|
logger.debug('Request type does not match state of `request_is_rest`')
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
print('Unknown request type')
|
logger.debug('Unknown request type')
|
||||||
return False
|
return False
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -326,7 +328,7 @@ def inject_checker_router(user_id: int, inject_evaluation: dict, data: dict, con
|
||||||
|
|
||||||
data_to_validate = get_data_to_validate(user_id, inject_evaluation, data)
|
data_to_validate = get_data_to_validate(user_id, inject_evaluation, data)
|
||||||
if data_to_validate is None:
|
if data_to_validate is None:
|
||||||
print('Could not fetch data to validate')
|
logger.debug('Could not fetch data to validate')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if inject_evaluation['evaluation_strategy'] == 'data_filtering':
|
if inject_evaluation['evaluation_strategy'] == 'data_filtering':
|
||||||
|
@ -425,7 +427,7 @@ def check_active_tasks(user_id: int, data: dict, context: dict) -> bool:
|
||||||
inject = db.INJECT_BY_UUID[task_uuid]
|
inject = db.INJECT_BY_UUID[task_uuid]
|
||||||
if inject['exercise_uuid'] not in db.SELECTED_EXERCISES:
|
if inject['exercise_uuid'] not in db.SELECTED_EXERCISES:
|
||||||
continue
|
continue
|
||||||
print(f"[{task_uuid}] :: checking: {inject['name']}")
|
logger.debug(f"[{task_uuid}] :: checking: {inject['name']}")
|
||||||
completed = check_inject(user_id, inject, data, context)
|
completed = check_inject(user_id, inject, data, context)
|
||||||
if completed:
|
if completed:
|
||||||
succeeded_once = True
|
succeeded_once = True
|
||||||
|
|
|
@ -3,6 +3,7 @@ from typing import Union
|
||||||
import jq
|
import jq
|
||||||
import re
|
import re
|
||||||
import operator
|
import operator
|
||||||
|
from config import logger
|
||||||
|
|
||||||
|
|
||||||
# .Event.Attribute[] | select(.value == "evil.exe") | .Tag
|
# .Event.Attribute[] | select(.value == "evil.exe") | .Tag
|
||||||
|
@ -133,7 +134,7 @@ def eval_data_filtering(user_id: int, inject_evaluation: dict, data: dict) -> bo
|
||||||
for evaluation_path, evaluation_config in evaluation_params.items():
|
for evaluation_path, evaluation_config in evaluation_params.items():
|
||||||
data_to_validate = jq_extract(evaluation_path, data, evaluation_config.get('extract_type', 'first'))
|
data_to_validate = jq_extract(evaluation_path, data, evaluation_config.get('extract_type', 'first'))
|
||||||
if data_to_validate is None:
|
if data_to_validate is None:
|
||||||
print('Could not extract data')
|
logger.debug('Could not extract data')
|
||||||
return False
|
return False
|
||||||
if not condition_satisfied(evaluation_config, data_to_validate):
|
if not condition_satisfied(evaluation_config, data_to_validate):
|
||||||
return False
|
return False
|
||||||
|
|
17
misp_api.py
17
misp_api.py
|
@ -1,14 +1,21 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from datetime import timedelta
|
||||||
from typing import Union
|
from typing import Union
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
import requests # type: ignore
|
import requests # type: ignore
|
||||||
import requests.adapters # type: ignore
|
import requests.adapters # type: ignore
|
||||||
|
from requests_cache import CachedSession
|
||||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning # type: ignore
|
from requests.packages.urllib3.exceptions import InsecureRequestWarning # type: ignore
|
||||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||||
|
|
||||||
from config import misp_url, misp_apikey, misp_skipssl
|
from config import misp_url, misp_apikey, misp_skipssl, logger
|
||||||
|
|
||||||
|
requestSession = CachedSession(cache_name='misp_cache', expire_after=timedelta(seconds=5))
|
||||||
|
adapterCache = requests.adapters.HTTPAdapter(pool_connections=50, pool_maxsize=50)
|
||||||
|
requestSession.mount('https://', adapterCache)
|
||||||
|
requestSession.mount('http://', adapterCache)
|
||||||
|
|
||||||
|
|
||||||
def get(url, data={}, api_key=misp_apikey):
|
def get(url, data={}, api_key=misp_apikey):
|
||||||
|
@ -20,9 +27,9 @@ def get(url, data={}, api_key=misp_apikey):
|
||||||
}
|
}
|
||||||
full_url = urljoin(misp_url, url)
|
full_url = urljoin(misp_url, url)
|
||||||
try:
|
try:
|
||||||
response = requests.get(full_url, data=data, headers=headers, verify=not misp_skipssl)
|
response = requestSession.get(full_url, data=data, headers=headers, verify=not misp_skipssl)
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as e:
|
||||||
print('Could not perform request on MISP.', e)
|
logger.info('Could not perform request on MISP.', e)
|
||||||
return None
|
return None
|
||||||
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
||||||
|
|
||||||
|
@ -36,9 +43,9 @@ def post(url, data={}, api_key=misp_apikey):
|
||||||
}
|
}
|
||||||
full_url = urljoin(misp_url, url)
|
full_url = urljoin(misp_url, url)
|
||||||
try:
|
try:
|
||||||
response = requests.post(full_url, data=json.dumps(data), headers=headers, verify=not misp_skipssl)
|
response = requestSession.post(full_url, data=json.dumps(data), headers=headers, verify=not misp_skipssl)
|
||||||
except requests.exceptions.ConnectionError as e:
|
except requests.exceptions.ConnectionError as e:
|
||||||
print('Could not perform request on MISP.', e)
|
logger.info('Could not perform request on MISP.', e)
|
||||||
return None
|
return None
|
||||||
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
return response.json() if response.headers['content-type'].startswith('application/json') else response.text
|
||||||
|
|
||||||
|
|
14
server.py
14
server.py
|
@ -14,6 +14,7 @@ import exercise as exercise_model
|
||||||
import notification as notification_model
|
import notification as notification_model
|
||||||
import db
|
import db
|
||||||
import config
|
import config
|
||||||
|
from config import logger
|
||||||
import misp_api
|
import misp_api
|
||||||
|
|
||||||
|
|
||||||
|
@ -57,11 +58,11 @@ app = socketio.WSGIApp(sio, static_files={
|
||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
def connect(sid, environ):
|
def connect(sid, environ):
|
||||||
print("Client connected: ", sid)
|
logger.debug("Client connected: " + sid)
|
||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
def disconnect(sid):
|
def disconnect(sid):
|
||||||
print("Client disconnected: ", sid)
|
logger.debug("Client disconnected: " + sid)
|
||||||
|
|
||||||
@sio.event
|
@sio.event
|
||||||
def get_exercises(sid):
|
def get_exercises(sid):
|
||||||
|
@ -109,7 +110,7 @@ def toggle_verbose_mode(sid, payload):
|
||||||
|
|
||||||
@sio.on('*')
|
@sio.on('*')
|
||||||
def any_event(event, sid, data={}):
|
def any_event(event, sid, data={}):
|
||||||
print('>> Unhandled event', event)
|
logger.info('>> Unhandled event', event)
|
||||||
|
|
||||||
def handleMessage(topic, s, message):
|
def handleMessage(topic, s, message):
|
||||||
data = json.loads(message)
|
data = json.loads(message)
|
||||||
|
@ -180,19 +181,18 @@ def forward_zmq_to_socketio():
|
||||||
while True:
|
while True:
|
||||||
message = zsocket.recv_string()
|
message = zsocket.recv_string()
|
||||||
topic, s, m = message.partition(" ")
|
topic, s, m = message.partition(" ")
|
||||||
handleMessage(topic, s, m)
|
|
||||||
try:
|
try:
|
||||||
ZMQ_MESSAGE_COUNT += 1
|
ZMQ_MESSAGE_COUNT += 1
|
||||||
# handleMessage(topic, s, m)
|
handleMessage(topic, s, m)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('Error handling message', e)
|
logger.error('Error handling message', e)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
exercises_loaded = exercise_model.load_exercises()
|
exercises_loaded = exercise_model.load_exercises()
|
||||||
if not exercises_loaded:
|
if not exercises_loaded:
|
||||||
print('Could not load exercises')
|
logger.critical('Could not load exercises')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Start the forwarding in a separate thread
|
# Start the forwarding in a separate thread
|
||||||
|
|
Loading…
Reference in a new issue