mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
commit
ab45ac0fef
18 changed files with 1812 additions and 331 deletions
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -31,6 +31,8 @@ var/www/static/
|
|||
!var/www/static/js/trendingchart.js
|
||||
var/www/templates/header.html
|
||||
var/www/submitted
|
||||
var/www/server.crt
|
||||
var/www/server.key
|
||||
|
||||
# Local config
|
||||
bin/packages/config.cfg
|
||||
|
@ -40,6 +42,11 @@ configs/update.cfg
|
|||
update/current_version
|
||||
files
|
||||
|
||||
# Helper
|
||||
bin/helper/gen_cert/rootCA.*
|
||||
bin/helper/gen_cert/server.*
|
||||
|
||||
|
||||
# Pystemon archives
|
||||
pystemon/archives
|
||||
|
||||
|
|
27
OVERVIEW.md
27
OVERVIEW.md
|
@ -38,6 +38,21 @@ Redis and ARDB overview
|
|||
| failed_login_ip:**ip** | **nb login failed** | TTL
|
||||
| failed_login_user_id:**user_id** | **nb login failed** | TTL
|
||||
|
||||
##### Item Import:
|
||||
|
||||
| Key | Value |
|
||||
| ------ | ------ |
|
||||
| **uuid**:nb_total | **nb total** | TTL *(if imported)*
|
||||
| **uuid**:nb_end | **nb** | TTL *(if imported)*
|
||||
| **uuid**:nb_sucess | **nb success** | TTL *(if imported)*
|
||||
| **uuid**:end | **0 (in progress) or (item imported)** | TTL *(if imported)*
|
||||
| **uuid**:processing | **process status: 0 or 1** | TTL *(if imported)*
|
||||
| **uuid**:error | **error message** | TTL *(if imported)*
|
||||
|
||||
| Set Key | Value |
|
||||
| ------ | ------ |
|
||||
| **uuid**:paste_submit_link | **item_path** | TTL *(if imported)*
|
||||
|
||||
## DB0 - Core:
|
||||
|
||||
##### Update keys:
|
||||
|
@ -82,6 +97,18 @@ Redis and ARDB overview
|
|||
| ------ | ------ | ------ |
|
||||
| ail:all_role | **role** | **int, role priority (1=admin)** |
|
||||
|
||||
##### Item Import:
|
||||
| Key | Value |
|
||||
| ------ | ------ |
|
||||
| **uuid**:isfile | **boolean** |
|
||||
| **uuid**:paste_content | **item_content** |
|
||||
|
||||
| Set Key | Value |
|
||||
| ------ | ------ |
|
||||
| submitted:uuid | **uuid** |
|
||||
| **uuid**:ltags | **tag** |
|
||||
| **uuid**:ltagsgalaxies | **tag** |
|
||||
|
||||
## DB2 - TermFreq:
|
||||
|
||||
##### Set:
|
||||
|
|
|
@ -66,8 +66,8 @@ function helptext {
|
|||
"$DEFAULT"
|
||||
This script launch:
|
||||
"$CYAN"
|
||||
- All the ZMQ queuing modules.
|
||||
- All the ZMQ processing modules.
|
||||
- All the queuing modules.
|
||||
- All the processing modules.
|
||||
- All Redis in memory servers.
|
||||
- All ARDB on disk servers.
|
||||
"$DEFAULT"
|
||||
|
@ -143,7 +143,7 @@ function launching_scripts {
|
|||
|
||||
screen -dmS "Script_AIL"
|
||||
sleep 0.1
|
||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
||||
echo -e $GREEN"\t* Launching scripts"$DEFAULT
|
||||
|
||||
screen -S "Script_AIL" -X screen -t "ModuleInformation" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./ModulesInformationV2.py -k 0 -c 1; read x"
|
||||
sleep 0.1
|
||||
|
|
|
@ -40,3 +40,13 @@ class Date(object):
|
|||
comp_month = str(computed_date.month).zfill(2)
|
||||
comp_day = str(computed_date.day).zfill(2)
|
||||
return comp_year + comp_month + comp_day
|
||||
|
||||
def date_add_day(date, num_day=1):
|
||||
new_date = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8])) + datetime.timedelta(num_day)
|
||||
new_date = str(new_date).replace('-', '')
|
||||
return new_date
|
||||
|
||||
def date_substract_day(date, num_day=1):
|
||||
new_date = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8])) - datetime.timedelta(num_day)
|
||||
new_date = str(new_date).replace('-', '')
|
||||
return new_date
|
||||
|
|
76
bin/packages/Import_helper.py
Executable file
76
bin/packages/Import_helper.py
Executable file
|
@ -0,0 +1,76 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
import Flask_config
|
||||
|
||||
r_serv_db = Flask_config.r_serv_db
|
||||
r_serv_log_submit = Flask_config.r_serv_log_submit
|
||||
|
||||
def is_valid_uuid_v4(UUID):
|
||||
UUID = UUID.replace('-', '')
|
||||
try:
|
||||
uuid_test = uuid.UUID(hex=UUID, version=4)
|
||||
return uuid_test.hex == UUID
|
||||
except:
|
||||
return False
|
||||
|
||||
def create_import_queue(tags, galaxy, paste_content, UUID, password=None, isfile = False):
|
||||
|
||||
# save temp value on disk
|
||||
for tag in tags:
|
||||
r_serv_db.sadd(UUID + ':ltags', tag)
|
||||
for tag in galaxy:
|
||||
r_serv_db.sadd(UUID + ':ltagsgalaxies', tag)
|
||||
|
||||
r_serv_db.set(UUID + ':paste_content', paste_content)
|
||||
|
||||
if password:
|
||||
r_serv_db.set(UUID + ':password', password)
|
||||
|
||||
r_serv_db.set(UUID + ':isfile', isfile)
|
||||
|
||||
r_serv_log_submit.set(UUID + ':end', 0)
|
||||
r_serv_log_submit.set(UUID + ':processing', 0)
|
||||
r_serv_log_submit.set(UUID + ':nb_total', -1)
|
||||
r_serv_log_submit.set(UUID + ':nb_end', 0)
|
||||
r_serv_log_submit.set(UUID + ':nb_sucess', 0)
|
||||
|
||||
# save UUID on disk
|
||||
r_serv_db.sadd('submitted:uuid', UUID)
|
||||
return UUID
|
||||
|
||||
def check_import_status(UUID):
|
||||
if not is_valid_uuid_v4(UUID):
|
||||
return ({'status': 'error', 'reason': 'Invalid uuid'}, 400)
|
||||
|
||||
processing = r_serv_log_submit.get(UUID + ':processing')
|
||||
if not processing:
|
||||
return ({'status': 'error', 'reason': 'Unknown uuid'}, 404)
|
||||
|
||||
# nb_total = r_serv_log_submit.get(UUID + ':nb_total')
|
||||
# nb_sucess = r_serv_log_submit.get(UUID + ':nb_sucess')
|
||||
# nb_end = r_serv_log_submit.get(UUID + ':nb_end')
|
||||
items_id = list(r_serv_log_submit.smembers(UUID + ':paste_submit_link'))
|
||||
error = r_serv_log_submit.get(UUID + ':error')
|
||||
end = r_serv_log_submit.get(UUID + ':end')
|
||||
|
||||
dict_import_status = {}
|
||||
if items_id:
|
||||
dict_import_status['items'] = items_id
|
||||
if error:
|
||||
dict_import_status['error'] = error
|
||||
|
||||
if processing == '0':
|
||||
status = 'in queue'
|
||||
else:
|
||||
if end == '0':
|
||||
status = 'in progress'
|
||||
else:
|
||||
status = 'imported'
|
||||
dict_import_status['status'] = status
|
||||
|
||||
return (dict_import_status, 200)
|
93
bin/packages/Item.py
Executable file
93
bin/packages/Item.py
Executable file
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import gzip
|
||||
import redis
|
||||
|
||||
import Flask_config
|
||||
import Date
|
||||
import Tag
|
||||
|
||||
PASTES_FOLDER = Flask_config.PASTES_FOLDER
|
||||
r_cache = Flask_config.r_cache
|
||||
|
||||
def exist_item(item_id):
|
||||
if os.path.isfile(os.path.join(PASTES_FOLDER, item_id)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_item_date(item_id):
|
||||
l_directory = item_id.split('/')
|
||||
return '{}{}{}'.format(l_directory[-4], l_directory[-3], l_directory[-2])
|
||||
|
||||
def get_item_size(item_id):
|
||||
return round(os.path.getsize(os.path.join(PASTES_FOLDER, item_id))/1024.0, 2)
|
||||
|
||||
def get_lines_info(item_id, item_content=None):
|
||||
if not item_content:
|
||||
item_content = get_item_content(item_id)
|
||||
max_length = 0
|
||||
line_id = 0
|
||||
nb_line = 0
|
||||
for line in item_content.splitlines():
|
||||
length = len(line)
|
||||
if length > max_length:
|
||||
max_length = length
|
||||
nb_line += 1
|
||||
return {'nb': nb_line, 'max_length': max_length}
|
||||
|
||||
|
||||
def get_item_content(item_id):
|
||||
item_full_path = os.path.join(PASTES_FOLDER, item_id)
|
||||
try:
|
||||
item_content = r_cache.get(item_full_path)
|
||||
except UnicodeDecodeError:
|
||||
item_content = None
|
||||
except Exception as e:
|
||||
item_content = None
|
||||
if item_content is None:
|
||||
try:
|
||||
with gzip.open(item_full_path, 'r') as f:
|
||||
item_content = f.read().decode()
|
||||
r_cache.set(item_full_path, item_content)
|
||||
r_cache.expire(item_full_path, 300)
|
||||
except:
|
||||
item_content = ''
|
||||
return str(item_content)
|
||||
|
||||
# API
|
||||
def get_item(request_dict):
|
||||
if not request_dict:
|
||||
return Response({'status': 'error', 'reason': 'Malformed JSON'}, 400)
|
||||
|
||||
item_id = request_dict.get('id', None)
|
||||
if not item_id:
|
||||
return ( {'status': 'error', 'reason': 'Mandatory parameter(s) not provided'}, 400 )
|
||||
if not exist_item(item_id):
|
||||
return ( {'status': 'error', 'reason': 'Item not found'}, 404 )
|
||||
|
||||
dict_item = {}
|
||||
dict_item['id'] = item_id
|
||||
date = request_dict.get('date', True)
|
||||
if date:
|
||||
dict_item['date'] = get_item_date(item_id)
|
||||
tags = request_dict.get('tags', True)
|
||||
if tags:
|
||||
dict_item['tags'] = Tag.get_item_tags(item_id)
|
||||
|
||||
size = request_dict.get('size', False)
|
||||
if size:
|
||||
dict_item['size'] = get_item_size(item_id)
|
||||
|
||||
content = request_dict.get('content', False)
|
||||
if content:
|
||||
# UTF-8 outpout, # TODO: use base64
|
||||
dict_item['content'] = get_item_content(item_id)
|
||||
|
||||
lines_info = request_dict.get('lines', False)
|
||||
if lines_info:
|
||||
dict_item['lines'] = get_lines_info(item_id, dict_item.get('content', 'None'))
|
||||
|
||||
return (dict_item, 200)
|
|
@ -115,6 +115,17 @@ class Paste(object):
|
|||
self.p_duplicate = None
|
||||
self.p_tags = None
|
||||
|
||||
def get_item_dict(self):
|
||||
dict_item = {}
|
||||
dict_item['id'] = self.p_rel_path
|
||||
dict_item['date'] = str(self.p_date)
|
||||
dict_item['content'] = self.get_p_content()
|
||||
tags = self._get_p_tags()
|
||||
if tags:
|
||||
dict_item['tags'] = tags
|
||||
return dict_item
|
||||
|
||||
|
||||
def get_p_content(self):
|
||||
"""
|
||||
Returning the content of the Paste
|
||||
|
@ -321,8 +332,8 @@ class Paste(object):
|
|||
return self.store_metadata.scard('dup:'+self.p_path) + self.store_metadata.scard('dup:'+self.p_rel_path)
|
||||
|
||||
def _get_p_tags(self):
|
||||
self.p_tags = self.store_metadata.smembers('tag:'+path, tag)
|
||||
if self.self.p_tags is not None:
|
||||
self.p_tags = self.store_metadata.smembers('tag:'+self.p_rel_path)
|
||||
if self.p_tags is not None:
|
||||
return list(self.p_tags)
|
||||
else:
|
||||
return '[]'
|
||||
|
|
222
bin/packages/Tag.py
Executable file
222
bin/packages/Tag.py
Executable file
|
@ -0,0 +1,222 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import redis
|
||||
|
||||
import Flask_config
|
||||
import Date
|
||||
import Item
|
||||
|
||||
from pytaxonomies import Taxonomies
|
||||
from pymispgalaxies import Galaxies, Clusters
|
||||
|
||||
r_serv_tags = Flask_config.r_serv_tags
|
||||
r_serv_metadata = Flask_config.r_serv_metadata
|
||||
|
||||
def get_taxonomie_from_tag(tag):
|
||||
return tag.split(':')[0]
|
||||
|
||||
def get_galaxy_from_tag(tag):
|
||||
galaxy = tag.split(':')[1]
|
||||
galaxy = galaxy.split('=')[0]
|
||||
return galaxy
|
||||
|
||||
def get_active_taxonomies():
|
||||
return r_serv_tags.smembers('active_taxonomies')
|
||||
|
||||
def get_active_galaxies():
|
||||
return r_serv_tags.smembers('active_galaxies')
|
||||
|
||||
def is_taxonomie_tag_enabled(taxonomie, tag):
|
||||
if tag in r_serv_tags.smembers('active_tag_' + taxonomie):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_galaxy_tag_enabled(galaxy, tag):
|
||||
if tag in r_serv_tags.smembers('active_tag_galaxies_' + galaxy):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# Check if tags are enabled in AIL
|
||||
def is_valid_tags_taxonomies_galaxy(list_tags, list_tags_galaxy):
|
||||
if list_tags:
|
||||
active_taxonomies = get_active_taxonomies()
|
||||
|
||||
for tag in list_tags:
|
||||
taxonomie = get_taxonomie_from_tag(tag)
|
||||
if taxonomie not in active_taxonomies:
|
||||
return False
|
||||
if not is_taxonomie_tag_enabled(taxonomie, tag):
|
||||
return False
|
||||
|
||||
if list_tags_galaxy:
|
||||
active_galaxies = get_active_galaxies()
|
||||
|
||||
for tag in list_tags_galaxy:
|
||||
galaxy = get_galaxy_from_tag(tag)
|
||||
if galaxy not in active_galaxies:
|
||||
return False
|
||||
if not is_galaxy_tag_enabled(galaxy, tag):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_tag_metadata(tag):
|
||||
first_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'first_seen')
|
||||
last_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
return {'tag': tag, 'first_seen': first_seen, 'last_seen': last_seen}
|
||||
|
||||
def is_tag_in_all_tag(tag):
|
||||
if r_serv_tags.sismember('list_tags', tag):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_all_tags():
|
||||
return list(r_serv_tags.smembers('list_tags'))
|
||||
|
||||
def get_item_tags(item_id):
|
||||
tags = r_serv_metadata.smembers('tag:'+item_id)
|
||||
if tags:
|
||||
return list(tags)
|
||||
else:
|
||||
return '[]'
|
||||
|
||||
# TEMPLATE + API QUERY
|
||||
def add_items_tag(tags=[], galaxy_tags=[], item_id=None):
|
||||
res_dict = {}
|
||||
if item_id == None:
|
||||
return ({'status': 'error', 'reason': 'Item id not found'}, 404)
|
||||
if not tags and not galaxy_tags:
|
||||
return ({'status': 'error', 'reason': 'Tags or Galaxy not specified'}, 400)
|
||||
|
||||
res_dict['tags'] = []
|
||||
for tag in tags:
|
||||
taxonomie = get_taxonomie_from_tag(tag)
|
||||
if is_taxonomie_tag_enabled(taxonomie, tag):
|
||||
add_item_tag(tag, item_id)
|
||||
res_dict['tags'].append(tag)
|
||||
else:
|
||||
return ({'status': 'error', 'reason': 'Tags or Galaxy not enabled'}, 400)
|
||||
|
||||
for tag in galaxy_tags:
|
||||
galaxy = get_galaxy_from_tag(tag)
|
||||
if is_galaxy_tag_enabled(galaxy, tag):
|
||||
add_item_tag(tag, item_id)
|
||||
res_dict['tags'].append(tag)
|
||||
else:
|
||||
return ({'status': 'error', 'reason': 'Tags or Galaxy not enabled'}, 400)
|
||||
|
||||
res_dict['id'] = item_id
|
||||
return (res_dict, 200)
|
||||
|
||||
|
||||
def add_item_tag(tag, item_path):
|
||||
|
||||
item_date = int(Item.get_item_date(item_path))
|
||||
|
||||
#add tag
|
||||
r_serv_metadata.sadd('tag:{}'.format(item_path), tag)
|
||||
r_serv_tags.sadd('{}:{}'.format(tag, item_date), item_path)
|
||||
|
||||
r_serv_tags.hincrby('daily_tags:{}'.format(item_date), tag, 1)
|
||||
|
||||
tag_first_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
if tag_first_seen is None:
|
||||
tag_first_seen = 99999999
|
||||
else:
|
||||
tag_first_seen = int(tag_first_seen)
|
||||
tag_last_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
if tag_last_seen is None:
|
||||
tag_last_seen = 0
|
||||
else:
|
||||
tag_last_seen = int(tag_last_seen)
|
||||
|
||||
#add new tag in list of all used tags
|
||||
r_serv_tags.sadd('list_tags', tag)
|
||||
|
||||
# update fisrt_seen/last_seen
|
||||
if item_date < tag_first_seen:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', item_date)
|
||||
|
||||
# update metadata last_seen
|
||||
if item_date > tag_last_seen:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', item_date)
|
||||
|
||||
# API QUERY
|
||||
def remove_item_tags(tags=[], item_id=None):
|
||||
if item_id == None:
|
||||
return ({'status': 'error', 'reason': 'Item id not found'}, 404)
|
||||
if not tags:
|
||||
return ({'status': 'error', 'reason': 'No Tag(s) specified'}, 400)
|
||||
|
||||
dict_res = {}
|
||||
dict_res['tags'] = []
|
||||
for tag in tags:
|
||||
res = remove_item_tag(tag, item_id)
|
||||
if res[1] != 200:
|
||||
return res
|
||||
else:
|
||||
dict_res['tags'].append(tag)
|
||||
dict_res['id'] = item_id
|
||||
return (dict_res, 200)
|
||||
|
||||
# TEMPLATE + API QUERY
|
||||
def remove_item_tag(tag, item_id):
|
||||
item_date = int(Item.get_item_date(item_id))
|
||||
|
||||
#remove tag
|
||||
r_serv_metadata.srem('tag:{}'.format(item_id), tag)
|
||||
res = r_serv_tags.srem('{}:{}'.format(tag, item_date), item_id)
|
||||
|
||||
if res ==1:
|
||||
# no tag for this day
|
||||
if int(r_serv_tags.hget('daily_tags:{}'.format(item_date), tag)) == 1:
|
||||
r_serv_tags.hdel('daily_tags:{}'.format(item_date), tag)
|
||||
else:
|
||||
r_serv_tags.hincrby('daily_tags:{}'.format(item_date), tag, -1)
|
||||
|
||||
tag_first_seen = int(r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen'))
|
||||
tag_last_seen = int(r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen'))
|
||||
# update fisrt_seen/last_seen
|
||||
if item_date == tag_first_seen:
|
||||
update_tag_first_seen(tag, tag_first_seen, tag_last_seen)
|
||||
if item_date == tag_last_seen:
|
||||
update_tag_last_seen(tag, tag_first_seen, tag_last_seen)
|
||||
return ({'status': 'success'}, 200)
|
||||
else:
|
||||
return ({'status': 'error', 'reason': 'Item id or tag not found'}, 400)
|
||||
|
||||
def update_tag_first_seen(tag, tag_first_seen, tag_last_seen):
|
||||
if tag_first_seen == tag_last_seen:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_first_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', tag_first_seen)
|
||||
# no tag in db
|
||||
else:
|
||||
r_serv_tags.srem('list_tags', tag)
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'first_seen')
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
else:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_first_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', tag_first_seen)
|
||||
else:
|
||||
tag_first_seen = Date.date_add_day(tag_first_seen)
|
||||
update_tag_first_seen(tag, tag_first_seen, tag_last_seen)
|
||||
|
||||
def update_tag_last_seen(tag, tag_first_seen, tag_last_seen):
|
||||
if tag_first_seen == tag_last_seen:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_last_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', tag_last_seen)
|
||||
# no tag in db
|
||||
else:
|
||||
r_serv_tags.srem('list_tags', tag)
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'first_seen')
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
else:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_last_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', tag_last_seen)
|
||||
else:
|
||||
tag_last_seen = Date.date_substract_day(tag_last_seen)
|
||||
update_tag_last_seen(tag, tag_first_seen, tag_last_seen)
|
|
@ -47,7 +47,11 @@ def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name):
|
|||
r_serv_log_submit.hincrby("mixer_cache:list_feeder", "submitted", 1)
|
||||
|
||||
# add tags
|
||||
add_tags(ltags, ltagsgalaxies, rel_item_path)
|
||||
for tag in ltags:
|
||||
add_item_tag(tag, rel_item_path)
|
||||
|
||||
for tag in ltagsgalaxies:
|
||||
add_item_tag(tag, rel_item_path)
|
||||
|
||||
r_serv_log_submit.incr(uuid + ':nb_end')
|
||||
r_serv_log_submit.incr(uuid + ':nb_sucess')
|
||||
|
@ -92,7 +96,6 @@ def remove_submit_uuid(uuid):
|
|||
r_serv_log_submit.expire(uuid + ':nb_sucess', expire_time)
|
||||
r_serv_log_submit.expire(uuid + ':nb_end', expire_time)
|
||||
r_serv_log_submit.expire(uuid + ':error', expire_time)
|
||||
r_serv_log_submit.srem(uuid + ':paste_submit_link', '')
|
||||
r_serv_log_submit.expire(uuid + ':paste_submit_link', expire_time)
|
||||
|
||||
# delete uuid
|
||||
|
@ -134,18 +137,6 @@ def add_item_tag(tag, item_path):
|
|||
if item_date > tag_last_seen:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', item_date)
|
||||
|
||||
def add_tags(tags, tagsgalaxies, path):
|
||||
list_tag = tags.split(',')
|
||||
list_tag_galaxies = tagsgalaxies.split(',')
|
||||
|
||||
if list_tag != ['']:
|
||||
for tag in list_tag:
|
||||
add_item_tag(tag, path)
|
||||
|
||||
if list_tag_galaxies != ['']:
|
||||
for tag in list_tag_galaxies:
|
||||
add_item_tag(tag, path)
|
||||
|
||||
def verify_extention_filename(filename):
|
||||
if not '.' in filename:
|
||||
return True
|
||||
|
@ -218,8 +209,8 @@ if __name__ == "__main__":
|
|||
uuid = r_serv_db.srandmember('submitted:uuid')
|
||||
|
||||
# get temp value save on disk
|
||||
ltags = r_serv_db.get(uuid + ':ltags')
|
||||
ltagsgalaxies = r_serv_db.get(uuid + ':ltagsgalaxies')
|
||||
ltags = r_serv_db.smembers(uuid + ':ltags')
|
||||
ltagsgalaxies = r_serv_db.smembers(uuid + ':ltagsgalaxies')
|
||||
paste_content = r_serv_db.get(uuid + ':paste_content')
|
||||
isfile = r_serv_db.get(uuid + ':isfile')
|
||||
password = r_serv_db.get(uuid + ':password')
|
||||
|
@ -230,8 +221,6 @@ if __name__ == "__main__":
|
|||
r_serv_log_submit.set(uuid + ':nb_total', -1)
|
||||
r_serv_log_submit.set(uuid + ':nb_end', 0)
|
||||
r_serv_log_submit.set(uuid + ':nb_sucess', 0)
|
||||
r_serv_log_submit.set(uuid + ':error', 'error:')
|
||||
r_serv_log_submit.sadd(uuid + ':paste_submit_link', '')
|
||||
|
||||
|
||||
r_serv_log_submit.set(uuid + ':processing', 1)
|
||||
|
@ -275,7 +264,7 @@ if __name__ == "__main__":
|
|||
else:
|
||||
#decompress file
|
||||
try:
|
||||
if password == '':
|
||||
if password == None:
|
||||
files = unpack(file_full_path.encode())
|
||||
#print(files.children)
|
||||
else:
|
||||
|
|
806
doc/README.md
Normal file
806
doc/README.md
Normal file
|
@ -0,0 +1,806 @@
|
|||
# API DOCUMENTATION
|
||||
|
||||
## General
|
||||
|
||||
### Automation key
|
||||
|
||||
The authentication of the automation is performed via a secure key available in the AIL UI interface. Make sure you keep that key secret. It gives access to the entire database! The API key is available in the ``Server Management`` menu under ``My Profile``.
|
||||
|
||||
The authorization is performed by using the following header:
|
||||
|
||||
~~~~
|
||||
Authorization: YOUR_API_KEY
|
||||
~~~~
|
||||
### Accept and Content-Type headers
|
||||
|
||||
When submitting data in a POST, PUT or DELETE operation you need to specify in what content-type you encoded the payload. This is done by setting the below Content-Type headers:
|
||||
|
||||
~~~~
|
||||
Content-Type: application/json
|
||||
~~~~
|
||||
|
||||
Example:
|
||||
|
||||
~~~~
|
||||
curl --header "Authorization: YOUR_API_KEY" --header "Content-Type: application/json" https://AIL_URL/
|
||||
~~~~
|
||||
|
||||
## Item management
|
||||
|
||||
### Get item: `api/v1/get/item/default`<a name="get_item_default"></a>
|
||||
|
||||
#### Description
|
||||
Get item default info.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
|
||||
#### JSON response
|
||||
- `content`
|
||||
- item content
|
||||
- *str*
|
||||
- `id`
|
||||
- item id
|
||||
- *str*
|
||||
- `date`
|
||||
- item date
|
||||
- *str - YYMMDD*
|
||||
- `tags`
|
||||
- item tags list
|
||||
- *list*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/item/default --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "item content test",
|
||||
"date": "20190726",
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags":
|
||||
[
|
||||
"misp-galaxy:backdoor=\"Rosenbridge\"",
|
||||
"infoleak:automatic-detection=\"pgp-message\"",
|
||||
"infoleak:automatic-detection=\"encrypted-private-key\"",
|
||||
"infoleak:submission=\"manual\"",
|
||||
"misp-galaxy:backdoor=\"SLUB\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
|
||||
**HTTP Status Code** : `400`
|
||||
```json
|
||||
{"status": "error", "reason": "Mandatory parameter(s) not provided"}
|
||||
```
|
||||
**HTTP Status Code** : `404`
|
||||
```json
|
||||
{"status": "error", "reason": "Item not found"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
### Get item content: `api/v1/get/item/content`<a name="get_item_content"></a>
|
||||
|
||||
#### Description
|
||||
Get a specific item content.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
|
||||
#### JSON response
|
||||
- `content`
|
||||
- item content
|
||||
- *str*
|
||||
- `id`
|
||||
- item id
|
||||
- *str*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/item/content --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "item content test",
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
|
||||
**HTTP Status Code** : `400`
|
||||
```json
|
||||
{"status": "error", "reason": "Mandatory parameter(s) not provided"}
|
||||
```
|
||||
**HTTP Status Code** : `404`
|
||||
```json
|
||||
{"status": "error", "reason": "Item not found"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Get item content: `api/v1/get/item/tag`<a name="get_item_tag"></a>
|
||||
|
||||
#### Description
|
||||
Get all tags from an item.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
|
||||
#### JSON response
|
||||
- `content`
|
||||
- item content
|
||||
- *str*
|
||||
- `tags`
|
||||
- item tags list
|
||||
- *list*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/item/tag --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags":
|
||||
[
|
||||
"misp-galaxy:backdoor=\"Rosenbridge\"",
|
||||
"infoleak:automatic-detection=\"pgp-message\"",
|
||||
"infoleak:automatic-detection=\"encrypted-private-key\"",
|
||||
"infoleak:submission=\"manual\"",
|
||||
"misp-galaxy:backdoor=\"SLUB\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
|
||||
**HTTP Status Code** : `400`
|
||||
```json
|
||||
{"status": "error", "reason": "Mandatory parameter(s) not provided"}
|
||||
```
|
||||
**HTTP Status Code** : `404`
|
||||
```json
|
||||
{"status": "error", "reason": "Item not found"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Advanced Get item: `api/v1/get/item`<a name="get_item"></a>
|
||||
|
||||
#### Description
|
||||
Get item. Filter requested field.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
- `date`
|
||||
- get item date
|
||||
- *boolean*
|
||||
- default: `true`
|
||||
- `tags`
|
||||
- get item tags
|
||||
- *boolean*
|
||||
- default: `true`
|
||||
- `content`
|
||||
- get item content
|
||||
- *boolean*
|
||||
- default: `false`
|
||||
- `size`
|
||||
- get item size
|
||||
- *boolean*
|
||||
- default: `false`
|
||||
- `lines`
|
||||
- get item lines info
|
||||
- *boolean*
|
||||
- default: `false`
|
||||
|
||||
#### JSON response
|
||||
- `content`
|
||||
- item content
|
||||
- *str*
|
||||
- `id`
|
||||
- item id
|
||||
- *str*
|
||||
- `date`
|
||||
- item date
|
||||
- *str - YYMMDD*
|
||||
- `tags`
|
||||
- item tags list
|
||||
- *list*
|
||||
- `size`
|
||||
- item size (Kb)
|
||||
- *int*
|
||||
- `lines`
|
||||
- item lines info
|
||||
- *{}*
|
||||
- `max_length`
|
||||
- line max length line
|
||||
- *int*
|
||||
- `nb`
|
||||
- nb lines item
|
||||
- *int*
|
||||
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/item --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"content": true,
|
||||
"lines_info": true,
|
||||
"tags": true,
|
||||
"size": true
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
```json
|
||||
{
|
||||
"content": "dsvcdsvcdsc vvvv",
|
||||
"date": "20190726",
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"lines": {
|
||||
"max_length": 19,
|
||||
"nb": 1
|
||||
},
|
||||
"size": 0.03,
|
||||
"tags": [
|
||||
"misp-galaxy:stealer=\"Vidar\"",
|
||||
"infoleak:submission=\"manual\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
**HTTP Status Code** : `400`
|
||||
```json
|
||||
{"status": "error", "reason": "Mandatory parameter(s) not provided"}
|
||||
```
|
||||
**HTTP Status Code** : `404`
|
||||
```json
|
||||
{"status": "error", "reason": "Item not found"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### Add item tags: `api/v1/add/item/tag`<a name="add_item_tag"></a>
|
||||
|
||||
#### Description
|
||||
Add tags to an item.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
- `tags`
|
||||
- list of tags
|
||||
- *list*
|
||||
- default: `[]`
|
||||
- `galaxy`
|
||||
- list of galaxy
|
||||
- *list*
|
||||
- default: `[]`
|
||||
|
||||
#### JSON response
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- `tags`
|
||||
- list of item tags added
|
||||
- *list*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/import/item --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags": [
|
||||
"infoleak:analyst-detection=\"private-key\"",
|
||||
"infoleak:analyst-detection=\"api-key\""
|
||||
],
|
||||
"galaxy": [
|
||||
"misp-galaxy:stealer=\"Vidar\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags": [
|
||||
"infoleak:analyst-detection=\"private-key\"",
|
||||
"infoleak:analyst-detection=\"api-key\"",
|
||||
"misp-galaxy:stealer=\"Vidar\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
**HTTP Status Code** : `400`
|
||||
|
||||
```json
|
||||
{"status": "error", "reason": "Item id not found"}
|
||||
{"status": "error", "reason": "Tags or Galaxy not specified"}
|
||||
{"status": "error", "reason": "Tags or Galaxy not enabled"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
### Delete item tags: `api/v1/delete/item/tag`<a name="delete_item_tag"></a>
|
||||
|
||||
#### Description
|
||||
Delete tags from an item.
|
||||
|
||||
**Method** : `DELETE`
|
||||
|
||||
#### Parameters
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- mandatory
|
||||
- `tags`
|
||||
- list of tags
|
||||
- *list*
|
||||
- default: `[]`
|
||||
|
||||
#### JSON response
|
||||
- `id`
|
||||
- item id
|
||||
- *str - relative item path*
|
||||
- `tags`
|
||||
- list of item tags deleted
|
||||
- *list*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/delete/item/tag --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X DELETE
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags": [
|
||||
"infoleak:analyst-detection=\"private-key\"",
|
||||
"infoleak:analyst-detection=\"api-key\"",
|
||||
"misp-galaxy:stealer=\"Vidar\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "submitted/2019/07/26/3efb8a79-08e9-4776-94ab-615eb370b6d4.gz",
|
||||
"tags": [
|
||||
"infoleak:analyst-detection=\"private-key\"",
|
||||
"infoleak:analyst-detection=\"api-key\"",
|
||||
"misp-galaxy:stealer=\"Vidar\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
**HTTP Status Code** : `400`
|
||||
|
||||
```json
|
||||
{"status": "error", "reason": "Item id not found"}
|
||||
{"status": "error", "reason": "No Tag(s) specified"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## Tag management
|
||||
|
||||
|
||||
### Get all AIL tags: `api/v1/get/tag/all`<a name="get_tag_all"></a>
|
||||
|
||||
#### Description
|
||||
Get all tags used in AIL.
|
||||
|
||||
**Method** : `GET`
|
||||
|
||||
#### JSON response
|
||||
- `tags`
|
||||
- list of tag
|
||||
- *list*
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/tag/all --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json"
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
```json
|
||||
{
|
||||
"tags": [
|
||||
"misp-galaxy:backdoor=\"Rosenbridge\"",
|
||||
"infoleak:automatic-detection=\"pgp-private-key\"",
|
||||
"infoleak:automatic-detection=\"pgp-signature\"",
|
||||
"infoleak:automatic-detection=\"base64\"",
|
||||
"infoleak:automatic-detection=\"encrypted-private-key\"",
|
||||
"infoleak:submission=\"crawler\"",
|
||||
"infoleak:automatic-detection=\"binary\"",
|
||||
"infoleak:automatic-detection=\"pgp-public-key-block\"",
|
||||
"infoleak:automatic-detection=\"hexadecimal\"",
|
||||
"infoleak:analyst-detection=\"private-key\"",
|
||||
"infoleak:submission=\"manual\"",
|
||||
"infoleak:automatic-detection=\"private-ssh-key\"",
|
||||
"infoleak:automatic-detection=\"iban\"",
|
||||
"infoleak:automatic-detection=\"pgp-message\"",
|
||||
"infoleak:automatic-detection=\"certificate\"",
|
||||
"infoleak:automatic-detection=\"credential\"",
|
||||
"infoleak:automatic-detection=\"cve\"",
|
||||
"infoleak:automatic-detection=\"google-api-key\"",
|
||||
"infoleak:automatic-detection=\"phone-number\"",
|
||||
"infoleak:automatic-detection=\"rsa-private-key\"",
|
||||
"misp-galaxy:backdoor=\"SLUB\"",
|
||||
"infoleak:automatic-detection=\"credit-card\"",
|
||||
"misp-galaxy:stealer=\"Vidar\"",
|
||||
"infoleak:automatic-detection=\"private-key\"",
|
||||
"infoleak:automatic-detection=\"api-key\"",
|
||||
"infoleak:automatic-detection=\"mail\""
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
### Get tag metadata: `api/v1/get/tag/metadata`<a name="get_tag_metadata"></a>
|
||||
|
||||
#### Description
|
||||
Get tag metadata.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `tag`
|
||||
- tag name
|
||||
- *str*
|
||||
- mandatory
|
||||
|
||||
#### JSON response
|
||||
- `tag`
|
||||
- tag name
|
||||
- *str*
|
||||
- `first_seen`
|
||||
- date: first seen
|
||||
- *str - YYYYMMDD*
|
||||
- `last_seen`
|
||||
- date: last seen
|
||||
- *str - YYYYMMDD*
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/get/tag/metadata --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"tag": "infoleak:submission=\"manual\""
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
```json
|
||||
{
|
||||
"first_seen": "20190605",
|
||||
"last_seen": "20190726",
|
||||
"tag": "infoleak:submission=\"manual\""
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
**HTTP Status Code** : `404`
|
||||
```json
|
||||
{"status": "error", "reason": "Tag not found"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## Import management
|
||||
|
||||
|
||||
|
||||
### Import item (currently: text only): `api/v1/import/item`<a name="import_item"></a>
|
||||
|
||||
#### Description
|
||||
Allows users to import new items. asynchronous function.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `type`
|
||||
- import type
|
||||
- *str*
|
||||
- default: `text`
|
||||
- `text`
|
||||
- text to import
|
||||
- *str*
|
||||
- mandatory if type = text
|
||||
- `default_tags`
|
||||
- add default import tag
|
||||
- *boolean*
|
||||
- default: True
|
||||
- `tags`
|
||||
- list of tags
|
||||
- *list*
|
||||
- default: `[]`
|
||||
- `galaxy`
|
||||
- list of galaxy
|
||||
- *list*
|
||||
- default: `[]`
|
||||
|
||||
#### JSON response
|
||||
- `uuid`
|
||||
- import uuid
|
||||
- *uuid4*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/import/item --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"type": "text",
|
||||
"tags": [
|
||||
"infoleak:analyst-detection=\"private-key\""
|
||||
],
|
||||
"text": "text to import"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"uuid": "0c3d7b34-936e-4f01-9cdf-2070184b6016"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
**HTTP Status Code** : `400`
|
||||
|
||||
```json
|
||||
{"status": "error", "reason": "Malformed JSON"}
|
||||
{"status": "error", "reason": "No text supplied"}
|
||||
{"status": "error", "reason": "Tags or Galaxy not enabled"}
|
||||
{"status": "error", "reason": "Size exceeds default"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### GET Import item info: `api/v1/get/import/item/`<a name="get_import_item"></a>
|
||||
|
||||
#### Description
|
||||
|
||||
Get import status and all items imported by uuid
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
|
||||
- `uuid`
|
||||
- import uuid
|
||||
- *uuid4*
|
||||
- mandatory
|
||||
|
||||
#### JSON response
|
||||
|
||||
- `status`
|
||||
- import status
|
||||
- *str*
|
||||
- values: `in queue`, `in progress`, `imported`
|
||||
- `items`
|
||||
- list of imported items id
|
||||
- *list*
|
||||
- The full list of imported items is not complete until `status` = `"imported"`
|
||||
|
||||
#### Example
|
||||
|
||||
```
|
||||
curl -k https://127.0.0.1:7000/api/v1/get/import/item --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"uuid": "0c3d7b34-936e-4f01-9cdf-2070184b6016"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"items": [
|
||||
"submitted/2019/07/26/b20a69f1-99ad-4cb3-b212-7ce24b763b50.gz"
|
||||
],
|
||||
"status": "imported"
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
|
||||
**HTTP Status Code** : `400`
|
||||
|
||||
```json
|
||||
{"status": "error", "reason": "Invalid uuid"}
|
||||
{"status": "error", "reason": "Unknown uuid"}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# FUTURE endpoints
|
||||
|
||||
### Text search by daterange
|
||||
##### ``api/search/textIndexer/item`` POST
|
||||
|
||||
### Get tagged items by daterange
|
||||
##### ``api/search/tag/item`` POST
|
||||
|
||||
### Submit a domain to crawl
|
||||
##### ``api/add/crawler/domain`` POST
|
||||
|
||||
### Create a term/set/regex tracker
|
||||
##### ``api/add/termTracker/`` POST
|
||||
|
||||
### Get tracker items list
|
||||
##### ``api/get/termTracker/item`` POST
|
||||
|
||||
-----
|
||||
|
||||
### Check if a tor/regular domain have been crawled
|
||||
##### ``api/get/crawler/domain/`` POST
|
||||
|
||||
### Check if a tor/regular domain have been crawled
|
||||
##### ``api/get/crawler/domain/metadata/ <domain><port>`` POST
|
||||
|
||||
### Get domain tags
|
||||
##### ``api/get/crawler/domain/tag/ <domain><port>`` POST
|
||||
|
||||
### Get domain history
|
||||
##### ``api/get/crawler/domain/history/ <domain><port>`` POST
|
||||
|
||||
### Get domain list of items
|
||||
##### ``api/get/crawler/domain/item/ <domain><port>`` POST
|
||||
|
||||
-----
|
||||
|
||||
### Create auto-crawlers
|
||||
##### ``api/add/crawler/autoCrawler/`` POST
|
||||
|
||||
-----
|
||||
|
||||
### get item by mime type/ decoded type
|
||||
##### ``api/get/decoded`` POST
|
||||
|
||||
### Check if a decoded item exists (via sha1)
|
||||
##### ``api/get/decoded/exist/<sha1>`` POST
|
||||
|
||||
### Get decoded item metadata
|
||||
### Check if a decoded item exists (via sha1)
|
||||
##### ``api/get/decoded/metadata/<sha1>`` POST
|
||||
|
||||
### Get decoded item correlation (1 depth)
|
||||
##### ``api/get/decoded/metadata/<sha1>`` POST
|
||||
|
||||
-----
|
||||
|
||||
|
||||
-----
|
||||
##### ``api/get/cryptocurrency`` POST
|
||||
|
||||
### Check if a cryptocurrency address (bitcoin, ..) exists
|
||||
##### ``api/get/cryptocurrency/exist/<bitcoin_address>`` POST
|
||||
|
||||
### Get cryptocurrency address metadata
|
||||
##### ``api/get/cryptocurrency/metadata/<bitcoin_address>`` POST
|
||||
|
||||
-----
|
||||
|
||||
### Item correlation (1 depth)
|
||||
##### ``api/get/item/correlation/`` POST
|
||||
|
||||
### Create MISP event from item
|
||||
##### ``api/export/item/misp`` POST
|
||||
|
||||
### Create TheHive case from item
|
||||
##### ``api/export/item/thehive`` POST
|
|
@ -1,53 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
submit your own pastes in AIL
|
||||
|
||||
empty values must be initialized
|
||||
'''
|
||||
|
||||
import requests
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
#AIL url
|
||||
url = 'http://localhost:7000'
|
||||
|
||||
ail_url = url + '/PasteSubmit/submit'
|
||||
|
||||
# MIPS TAXONOMIE, need to be initialized (tags_taxonomies = '')
|
||||
tags_taxonomies = 'CERT-XLM:malicious-code=\"ransomware\",CERT-XLM:conformity=\"standard\"'
|
||||
|
||||
# MISP GALAXY, need to be initialized (tags_galaxies = '')
|
||||
tags_galaxies = 'misp-galaxy:cert-seu-gocsector=\"Constituency\",misp-galaxy:cert-seu-gocsector=\"EU-Centric\"'
|
||||
|
||||
# user paste input, need to be initialized (paste_content = '')
|
||||
paste_content = 'paste content test'
|
||||
|
||||
#file full or relative path
|
||||
file_to_submit = 'test_file.zip'
|
||||
|
||||
#compress file password, need to be initialized (password = '')
|
||||
password = ''
|
||||
|
||||
'''
|
||||
submit user text
|
||||
'''
|
||||
r = requests.post(ail_url, data={ 'password': password,
|
||||
'paste_content': paste_content,
|
||||
'tags_taxonomies': tags_taxonomies,
|
||||
'tags_galaxies': tags_galaxies})
|
||||
print(r.status_code, r.reason)
|
||||
|
||||
|
||||
'''
|
||||
submit a file
|
||||
'''
|
||||
with open(file_submit,'rb') as f:
|
||||
|
||||
r = requests.post(ail_url, data={ 'password': password,
|
||||
'paste_content': paste_content,
|
||||
'tags_taxonomies': tags_taxonomies,
|
||||
'tags_galaxies': tags_galaxies}, files={'file': (file_to_submit, f.read() )})
|
||||
print(r.status_code, r.reason)
|
Binary file not shown.
Before Width: | Height: | Size: 73 KiB |
165
tests/testApi.py
Normal file
165
tests/testApi.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import unittest
|
||||
|
||||
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
|
||||
sys.path.append(os.environ['AIL_FLASK'])
|
||||
sys.path.append(os.path.join(os.environ['AIL_FLASK'], 'modules'))
|
||||
|
||||
import Import_helper
|
||||
import Tag
|
||||
|
||||
from Flask_server import app
|
||||
|
||||
def parse_response(obj, ail_response):
|
||||
res_json = ail_response.get_json()
|
||||
if 'status' in res_json:
|
||||
if res_json['status'] == 'error':
|
||||
return obj.fail('{}: {}: {}'.format(ail_response.status_code, res_json['status'], res_json['reason']))
|
||||
return res_json
|
||||
|
||||
def get_api_key():
|
||||
with open(os.path.join(os.environ['AIL_HOME'], 'DEFAULT_PASSWORD'), 'r') as f:
|
||||
content = f.read()
|
||||
content = content.splitlines()
|
||||
apikey = content[-1]
|
||||
apikey = apikey.replace('API_Key=', '', 1)
|
||||
return apikey
|
||||
|
||||
APIKEY = get_api_key()
|
||||
|
||||
class TestApiV1(unittest.TestCase):
|
||||
import_uuid = None
|
||||
item_id = None
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.app = app
|
||||
self.app.config['TESTING'] = True
|
||||
self.client = self.app.test_client()
|
||||
self.apikey = APIKEY
|
||||
self.item_content = "text to import"
|
||||
self.item_tags = ["infoleak:analyst-detection=\"private-key\""]
|
||||
self.expected_tags = ["infoleak:analyst-detection=\"private-key\"", 'infoleak:submission="manual"']
|
||||
|
||||
# POST /api/v1/import/item
|
||||
def test_0001_api_import_item(self):
|
||||
input_json = {"type": "text","tags": self.item_tags,"text": self.item_content}
|
||||
req = self.client.post('/api/v1/import/item', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
import_uuid = req_json['uuid']
|
||||
self.__class__.import_uuid = import_uuid
|
||||
self.assertTrue(Import_helper.is_valid_uuid_v4(import_uuid))
|
||||
|
||||
# POST /api/v1/get/import/item
|
||||
def test_0002_api_get_import_item(self):
|
||||
input_json = {"uuid": self.__class__.import_uuid}
|
||||
item_not_imported = True
|
||||
import_timout = 30
|
||||
start = time.time()
|
||||
|
||||
while item_not_imported:
|
||||
req = self.client.post('/api/v1/get/import/item', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
if req_json['status'] == 'imported':
|
||||
try:
|
||||
item_id = req_json['items'][0]
|
||||
item_not_imported = False
|
||||
except Exception as e:
|
||||
if time.time() - start > import_timout:
|
||||
item_not_imported = False
|
||||
self.fail("Import error: {}".format(req_json))
|
||||
else:
|
||||
if time.time() - start > import_timout:
|
||||
item_not_imported = False
|
||||
self.fail("Import Timeout, import status: {}".format(req_json['status']))
|
||||
self.__class__.item_id = item_id
|
||||
|
||||
# Process item
|
||||
time.sleep(5)
|
||||
|
||||
# POST /api/v1/get/item/content
|
||||
def test_0003_api_get_item_content(self):
|
||||
input_json = {"id": self.__class__.item_id}
|
||||
req = self.client.post('/api/v1/get/item/content', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_content = req_json['content']
|
||||
self.assertEqual(item_content, self.item_content)
|
||||
|
||||
# POST /api/v1/get/item/tag
|
||||
def test_0004_api_get_item_tag(self):
|
||||
input_json = {"id": self.__class__.item_id}
|
||||
req = self.client.post('/api/v1/get/item/tag', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_tags = req_json['tags']
|
||||
self.assertCountEqual(item_tags, self.expected_tags)
|
||||
|
||||
# POST /api/v1/get/item/tag
|
||||
def test_0005_api_get_item_default(self):
|
||||
input_json = {"id": self.__class__.item_id}
|
||||
req = self.client.post('/api/v1/get/item/default', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_tags = req_json['tags']
|
||||
self.assertCountEqual(item_tags, self.expected_tags)
|
||||
item_content = req_json['content']
|
||||
self.assertEqual(item_content, self.item_content)
|
||||
|
||||
# POST /api/v1/get/item/tag
|
||||
# # TODO: add more test
|
||||
def test_0006_api_get_item(self):
|
||||
input_json = {"id": self.__class__.item_id, "content": True}
|
||||
req = self.client.post('/api/v1/get/item', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_tags = req_json['tags']
|
||||
self.assertCountEqual(item_tags, self.expected_tags)
|
||||
item_content = req_json['content']
|
||||
self.assertEqual(item_content, self.item_content)
|
||||
|
||||
# POST api/v1/add/item/tag
|
||||
def test_0007_api_add_item_tag(self):
|
||||
tags_to_add = ["infoleak:analyst-detection=\"api-key\""]
|
||||
current_item_tag = Tag.get_item_tags(self.__class__.item_id)
|
||||
current_item_tag.append(tags_to_add[0])
|
||||
|
||||
#galaxy_to_add = ["misp-galaxy:stealer=\"Vidar\""]
|
||||
input_json = {"id": self.__class__.item_id, "tags": tags_to_add}
|
||||
req = self.client.post('/api/v1/add/item/tag', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_tags = req_json['tags']
|
||||
self.assertEqual(item_tags, tags_to_add)
|
||||
|
||||
new_item_tag = Tag.get_item_tags(self.__class__.item_id)
|
||||
self.assertCountEqual(new_item_tag, current_item_tag)
|
||||
|
||||
# DELETE api/v1/delete/item/tag
|
||||
def test_0008_api_add_item_tag(self):
|
||||
tags_to_delete = ["infoleak:analyst-detection=\"api-key\""]
|
||||
input_json = {"id": self.__class__.item_id, "tags": tags_to_delete}
|
||||
req = self.client.delete('/api/v1/delete/item/tag', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
item_tags = req_json['tags']
|
||||
self.assertCountEqual(item_tags, tags_to_delete)
|
||||
current_item_tag = Tag.get_item_tags(self.__class__.item_id)
|
||||
if tags_to_delete[0] in current_item_tag:
|
||||
self.fail('Tag no deleted')
|
||||
|
||||
# POST api/v1/get/tag/metadata
|
||||
def test_0009_api_add_item_tag(self):
|
||||
input_json = {"tag": self.item_tags[0]}
|
||||
req = self.client.post('/api/v1/get/tag/metadata', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
self.assertEqual(req_json['tag'], self.item_tags[0])
|
||||
|
||||
# GET api/v1/get/tag/all
|
||||
def test_0010_api_add_item_tag(self):
|
||||
input_json = {"tag": self.item_tags[0]}
|
||||
req = self.client.get('/api/v1/get/tag/all', json=input_json ,headers={ 'Authorization': self.apikey })
|
||||
req_json = parse_response(self, req)
|
||||
self.assertTrue(req_json['tags'])
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -5,6 +5,7 @@ import os
|
|||
import re
|
||||
import sys
|
||||
import ssl
|
||||
import json
|
||||
import time
|
||||
|
||||
import redis
|
||||
|
@ -13,7 +14,7 @@ import logging
|
|||
import logging.handlers
|
||||
import configparser
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Request, session, redirect, url_for
|
||||
from flask import Flask, render_template, jsonify, request, Request, Response, session, redirect, url_for
|
||||
from flask_login import LoginManager, current_user, login_user, logout_user, login_required
|
||||
|
||||
import bcrypt
|
||||
|
@ -37,6 +38,8 @@ import Flask_config
|
|||
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
|
||||
from Role_Manager import login_admin, login_analyst
|
||||
|
||||
Flask_dir = os.environ['AIL_FLASK']
|
||||
|
||||
# CONFIG #
|
||||
cfg = Flask_config.cfg
|
||||
baseUrl = cfg.get("Flask", "baseurl")
|
||||
|
@ -67,21 +70,21 @@ log_dir = os.path.join(os.environ['AIL_HOME'], 'logs')
|
|||
if not os.path.isdir(log_dir):
|
||||
os.makedirs(logs_dir)
|
||||
|
||||
log_filename = os.path.join(log_dir, 'flask_server.logs')
|
||||
logger = logging.getLogger()
|
||||
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
handler_log = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", interval=1)
|
||||
handler_log.suffix = '%Y-%m-%d.log'
|
||||
handler_log.setFormatter(formatter)
|
||||
handler_log.setLevel(30)
|
||||
logger.addHandler(handler_log)
|
||||
logger.setLevel(30)
|
||||
# log_filename = os.path.join(log_dir, 'flask_server.logs')
|
||||
# logger = logging.getLogger()
|
||||
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
# handler_log = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", interval=1)
|
||||
# handler_log.suffix = '%Y-%m-%d.log'
|
||||
# handler_log.setFormatter(formatter)
|
||||
# handler_log.setLevel(30)
|
||||
# logger.addHandler(handler_log)
|
||||
# logger.setLevel(30)
|
||||
|
||||
# ========= =========#
|
||||
|
||||
# ========= TLS =========#
|
||||
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
|
||||
ssl_context.load_cert_chain(certfile='server.crt', keyfile='server.key')
|
||||
ssl_context.load_cert_chain(certfile=os.path.join(Flask_dir, 'server.crt'), keyfile=os.path.join(Flask_dir, 'server.key'))
|
||||
#print(ssl_context.get_ciphers())
|
||||
# ========= =========#
|
||||
|
||||
|
@ -112,13 +115,12 @@ try:
|
|||
toIgnoreModule.add(line)
|
||||
|
||||
except IOError:
|
||||
f = open('templates/ignored_modules.txt', 'w')
|
||||
f.close()
|
||||
pass
|
||||
|
||||
# Dynamically import routes and functions from modules
|
||||
# Also, prepare header.html
|
||||
to_add_to_header_dico = {}
|
||||
for root, dirs, files in os.walk('modules/'):
|
||||
for root, dirs, files in os.walk(os.path.join(Flask_dir, 'modules')):
|
||||
sys.path.append(join(root))
|
||||
|
||||
# Ignore the module
|
||||
|
@ -140,7 +142,7 @@ for root, dirs, files in os.walk('modules/'):
|
|||
|
||||
#create header.html
|
||||
complete_header = ""
|
||||
with open('templates/header_base.html', 'r') as f:
|
||||
with open(os.path.join(Flask_dir, 'templates', 'header_base.html'), 'r') as f:
|
||||
complete_header = f.read()
|
||||
modified_header = complete_header
|
||||
|
||||
|
@ -159,7 +161,7 @@ for module_name, txt in to_add_to_header_dico.items():
|
|||
modified_header = modified_header.replace('<!--insert here-->', '\n'.join(to_add_to_header))
|
||||
|
||||
#Write the header.html file
|
||||
with open('templates/header.html', 'w') as f:
|
||||
with open(os.path.join(Flask_dir, 'templates', 'header.html'), 'w') as f:
|
||||
f.write(modified_header)
|
||||
|
||||
# ========= JINJA2 FUNCTIONS ========
|
||||
|
@ -226,7 +228,7 @@ def login():
|
|||
# login failed
|
||||
else:
|
||||
# set brute force protection
|
||||
logger.warning("Login failed, ip={}, username={}".format(current_ip, username))
|
||||
#logger.warning("Login failed, ip={}, username={}".format(current_ip, username))
|
||||
r_cache.incr('failed_login_ip:{}'.format(current_ip))
|
||||
r_cache.expire('failed_login_ip:{}'.format(current_ip), 300)
|
||||
r_cache.incr('failed_login_user_id:{}'.format(username))
|
||||
|
@ -289,7 +291,26 @@ def searchbox():
|
|||
|
||||
# ========== ERROR HANDLER ============
|
||||
|
||||
@app.errorhandler(405)
|
||||
def _handle_client_error(e):
|
||||
if request.path.startswith('/api/'):
|
||||
res_dict = {"status": "error", "reason": "Method Not Allowed: The method is not allowed for the requested URL"}
|
||||
anchor_id = request.path[8:]
|
||||
anchor_id = anchor_id.replace('/', '_')
|
||||
api_doc_url = 'https://github.com/CIRCL/AIL-framework/tree/master/doc#{}'.format(anchor_id)
|
||||
res_dict['documentation'] = api_doc_url
|
||||
return Response(json.dumps(res_dict, indent=2, sort_keys=True), mimetype='application/json'), 405
|
||||
else:
|
||||
return e
|
||||
|
||||
@app.errorhandler(404)
|
||||
def error_page_not_found(e):
|
||||
if request.path.startswith('/api/'):
|
||||
return Response(json.dumps({"status": "error", "reason": "404 Not Found"}, indent=2, sort_keys=True), mimetype='application/json'), 404
|
||||
else:
|
||||
# avoid endpoint enumeration
|
||||
return page_not_found(e)
|
||||
|
||||
@login_required
|
||||
def page_not_found(e):
|
||||
# avoid endpoint enumeration
|
||||
|
|
|
@ -12,7 +12,6 @@ import sys
|
|||
|
||||
# FLASK #
|
||||
app = None
|
||||
#secret_key = 'ail-super-secret_key01C'
|
||||
|
||||
# CONFIG #
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
|
@ -146,7 +145,7 @@ if HiveApi != False:
|
|||
HiveApi = False
|
||||
print('The Hive not connected')
|
||||
|
||||
# VARIABLES #
|
||||
#### VARIABLES ####
|
||||
baseUrl = cfg.get("Flask", "baseurl")
|
||||
baseUrl = baseUrl.replace('/', '')
|
||||
if baseUrl != '':
|
||||
|
@ -179,6 +178,8 @@ crawler_enabled = cfg.getboolean("Crawler", "activate_crawler")
|
|||
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
||||
email_regex = re.compile(email_regex)
|
||||
|
||||
IMPORT_MAX_TEXT_SIZE = 900000 # size in bytes
|
||||
|
||||
# VT
|
||||
try:
|
||||
from virusTotalKEYS import vt_key
|
||||
|
|
|
@ -23,6 +23,9 @@ import json
|
|||
|
||||
import Paste
|
||||
|
||||
import Import_helper
|
||||
import Tag
|
||||
|
||||
from pytaxonomies import Taxonomies
|
||||
from pymispgalaxies import Galaxies, Clusters
|
||||
|
||||
|
@ -87,65 +90,6 @@ def clean_filename(filename, whitelist=valid_filename_chars, replace=' '):
|
|||
# keep only whitelisted chars
|
||||
return ''.join(c for c in cleaned_filename if c in whitelist)
|
||||
|
||||
def launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password, isfile = False):
|
||||
|
||||
# save temp value on disk
|
||||
r_serv_db.set(UUID + ':ltags', ltags)
|
||||
r_serv_db.set(UUID + ':ltagsgalaxies', ltagsgalaxies)
|
||||
r_serv_db.set(UUID + ':paste_content', paste_content)
|
||||
r_serv_db.set(UUID + ':password', password)
|
||||
r_serv_db.set(UUID + ':isfile', isfile)
|
||||
|
||||
r_serv_log_submit.set(UUID + ':end', 0)
|
||||
r_serv_log_submit.set(UUID + ':processing', 0)
|
||||
r_serv_log_submit.set(UUID + ':nb_total', -1)
|
||||
r_serv_log_submit.set(UUID + ':nb_end', 0)
|
||||
r_serv_log_submit.set(UUID + ':nb_sucess', 0)
|
||||
r_serv_log_submit.set(UUID + ':error', 'error:')
|
||||
r_serv_log_submit.sadd(UUID + ':paste_submit_link', '')
|
||||
|
||||
|
||||
# save UUID on disk
|
||||
r_serv_db.sadd('submitted:uuid', UUID)
|
||||
|
||||
|
||||
def addTagsVerification(tags, tagsgalaxies):
|
||||
|
||||
list_tag = tags.split(',')
|
||||
list_tag_galaxies = tagsgalaxies.split(',')
|
||||
|
||||
taxonomies = Taxonomies()
|
||||
active_taxonomies = r_serv_tags.smembers('active_taxonomies')
|
||||
|
||||
active_galaxies = r_serv_tags.smembers('active_galaxies')
|
||||
|
||||
if list_tag != ['']:
|
||||
for tag in list_tag:
|
||||
# verify input
|
||||
tax = tag.split(':')[0]
|
||||
if tax in active_taxonomies:
|
||||
if tag in r_serv_tags.smembers('active_tag_' + tax):
|
||||
pass
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
if list_tag_galaxies != ['']:
|
||||
for tag in list_tag_galaxies:
|
||||
# verify input
|
||||
gal = tag.split(':')[1]
|
||||
gal = gal.split('=')[0]
|
||||
|
||||
if gal in active_galaxies:
|
||||
if tag in r_serv_tags.smembers('active_tag_galaxies_' + gal):
|
||||
pass
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
def date_to_str(date):
|
||||
return "{0}-{1}-{2}".format(date.year, date.month, date.day)
|
||||
|
||||
|
@ -279,11 +223,9 @@ def hive_create_case(hive_tlp, threat_level, hive_description, hive_case_title,
|
|||
@login_required
|
||||
@login_analyst
|
||||
def PasteSubmit_page():
|
||||
#active taxonomies
|
||||
active_taxonomies = r_serv_tags.smembers('active_taxonomies')
|
||||
|
||||
#active galaxies
|
||||
active_galaxies = r_serv_tags.smembers('active_galaxies')
|
||||
# Get all active tags/galaxy
|
||||
active_taxonomies = Tag.get_active_taxonomies()
|
||||
active_galaxies = Tag.get_active_galaxies()
|
||||
|
||||
return render_template("submit_items.html",
|
||||
active_taxonomies = active_taxonomies,
|
||||
|
@ -311,21 +253,27 @@ def submit():
|
|||
submitted_tag = 'infoleak:submission="manual"'
|
||||
|
||||
#active taxonomies
|
||||
active_taxonomies = r_serv_tags.smembers('active_taxonomies')
|
||||
active_taxonomies = Tag.get_active_taxonomies()
|
||||
#active galaxies
|
||||
active_galaxies = r_serv_tags.smembers('active_galaxies')
|
||||
active_galaxies = Tag.get_active_galaxies()
|
||||
|
||||
if ltags or ltagsgalaxies:
|
||||
if not addTagsVerification(ltags, ltagsgalaxies):
|
||||
|
||||
ltags = ltags.split(',')
|
||||
ltagsgalaxies = ltagsgalaxies.split(',')
|
||||
|
||||
print(ltags)
|
||||
print(ltagsgalaxies)
|
||||
|
||||
if not Tags.is_valid_tags_taxonomies_galaxy(ltags, ltagsgalaxies):
|
||||
content = 'INVALID TAGS'
|
||||
print(content)
|
||||
return content, 400
|
||||
|
||||
# add submitted tags
|
||||
if(ltags != ''):
|
||||
ltags = ltags + ',' + submitted_tag
|
||||
else:
|
||||
ltags = submitted_tag
|
||||
if not ltags:
|
||||
ltags = []
|
||||
ltags.append(submitted_tag)
|
||||
|
||||
if is_file:
|
||||
if file:
|
||||
|
@ -358,7 +306,7 @@ def submit():
|
|||
|
||||
paste_content = full_path
|
||||
|
||||
launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password ,True)
|
||||
Import_helper.create_import_queue(ltags, ltagsgalaxies, paste_content, UUID, password ,True)
|
||||
|
||||
return render_template("submit_items.html",
|
||||
active_taxonomies = active_taxonomies,
|
||||
|
@ -376,12 +324,7 @@ def submit():
|
|||
|
||||
# get id
|
||||
UUID = str(uuid.uuid4())
|
||||
|
||||
#if paste_name:
|
||||
# clean file name
|
||||
#id = clean_filename(paste_name)
|
||||
|
||||
launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password)
|
||||
Import_helper.create_import_queue(ltags, ltagsgalaxies, paste_content, UUID, password)
|
||||
|
||||
return render_template("submit_items.html",
|
||||
active_taxonomies = active_taxonomies,
|
||||
|
@ -415,7 +358,7 @@ def submit_status():
|
|||
nb_sucess = r_serv_log_submit.get(UUID + ':nb_sucess')
|
||||
paste_submit_link = list(r_serv_log_submit.smembers(UUID + ':paste_submit_link'))
|
||||
|
||||
if (end != None) and (nb_total != None) and (nb_end != None) and (error != None) and (processing != None) and (paste_submit_link != None):
|
||||
if (end != None) and (nb_total != None) and (nb_end != None) and (processing != None):
|
||||
|
||||
link = ''
|
||||
if paste_submit_link:
|
||||
|
@ -433,10 +376,10 @@ def submit_status():
|
|||
else:
|
||||
prog = 0
|
||||
|
||||
if error == 'error:':
|
||||
isError = False
|
||||
else:
|
||||
if error:
|
||||
isError = True
|
||||
else:
|
||||
isError = False
|
||||
|
||||
if end == '0':
|
||||
end = False
|
||||
|
|
|
@ -20,6 +20,7 @@ from pymispgalaxies import Galaxies, Clusters
|
|||
|
||||
# ============ VARIABLES ============
|
||||
import Flask_config
|
||||
import Tag
|
||||
|
||||
app = Flask_config.app
|
||||
cfg = Flask_config.cfg
|
||||
|
@ -59,16 +60,6 @@ for name, tags in clusters.items(): #galaxie name + tags
|
|||
def one():
|
||||
return 1
|
||||
|
||||
def date_substract_day(date, num_day=1):
|
||||
new_date = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8])) - datetime.timedelta(num_day)
|
||||
new_date = str(new_date).replace('-', '')
|
||||
return new_date
|
||||
|
||||
def date_add_day(date, num_day=1):
|
||||
new_date = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8])) + datetime.timedelta(num_day)
|
||||
new_date = str(new_date).replace('-', '')
|
||||
return new_date
|
||||
|
||||
def get_tags_with_synonyms(tag):
|
||||
str_synonyms = ' - synonyms: '
|
||||
synonyms = r_serv_tags.smembers('synonym_tag_' + tag)
|
||||
|
@ -131,93 +122,6 @@ def get_last_seen_from_tags_list(list_tags):
|
|||
min_last_seen = tag_last_seen
|
||||
return str(min_last_seen)
|
||||
|
||||
def add_item_tag(tag, item_path):
|
||||
item_date = int(get_item_date(item_path))
|
||||
|
||||
#add tag
|
||||
r_serv_metadata.sadd('tag:{}'.format(item_path), tag)
|
||||
r_serv_tags.sadd('{}:{}'.format(tag, item_date), item_path)
|
||||
|
||||
r_serv_tags.hincrby('daily_tags:{}'.format(item_date), tag, 1)
|
||||
|
||||
tag_first_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
if tag_first_seen is None:
|
||||
tag_first_seen = 99999999
|
||||
else:
|
||||
tag_first_seen = int(tag_first_seen)
|
||||
tag_last_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
if tag_last_seen is None:
|
||||
tag_last_seen = 0
|
||||
else:
|
||||
tag_last_seen = int(tag_last_seen)
|
||||
|
||||
#add new tag in list of all used tags
|
||||
r_serv_tags.sadd('list_tags', tag)
|
||||
|
||||
# update fisrt_seen/last_seen
|
||||
if item_date < tag_first_seen:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', item_date)
|
||||
|
||||
# update metadata last_seen
|
||||
if item_date > tag_last_seen:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', item_date)
|
||||
|
||||
def remove_item_tag(tag, item_path):
|
||||
item_date = int(get_item_date(item_path))
|
||||
|
||||
#remove tag
|
||||
r_serv_metadata.srem('tag:{}'.format(item_path), tag)
|
||||
res = r_serv_tags.srem('{}:{}'.format(tag, item_date), item_path)
|
||||
|
||||
if res ==1:
|
||||
# no tag for this day
|
||||
if int(r_serv_tags.hget('daily_tags:{}'.format(item_date), tag)) == 1:
|
||||
r_serv_tags.hdel('daily_tags:{}'.format(item_date), tag)
|
||||
else:
|
||||
r_serv_tags.hincrby('daily_tags:{}'.format(item_date), tag, -1)
|
||||
|
||||
tag_first_seen = int(r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen'))
|
||||
tag_last_seen = int(r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen'))
|
||||
# update fisrt_seen/last_seen
|
||||
if item_date == tag_first_seen:
|
||||
update_tag_first_seen(tag, tag_first_seen, tag_last_seen)
|
||||
if item_date == tag_last_seen:
|
||||
update_tag_last_seen(tag, tag_first_seen, tag_last_seen)
|
||||
else:
|
||||
return 'Error incorrect tag'
|
||||
|
||||
def update_tag_first_seen(tag, tag_first_seen, tag_last_seen):
|
||||
if tag_first_seen == tag_last_seen:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_first_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', tag_first_seen)
|
||||
# no tag in db
|
||||
else:
|
||||
r_serv_tags.srem('list_tags', tag)
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'first_seen')
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
else:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_first_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'first_seen', tag_first_seen)
|
||||
else:
|
||||
tag_first_seen = date_add_day(tag_first_seen)
|
||||
update_tag_first_seen(tag, tag_first_seen, tag_last_seen)
|
||||
|
||||
def update_tag_last_seen(tag, tag_first_seen, tag_last_seen):
|
||||
if tag_first_seen == tag_last_seen:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_last_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', tag_last_seen)
|
||||
# no tag in db
|
||||
else:
|
||||
r_serv_tags.srem('list_tags', tag)
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'first_seen')
|
||||
r_serv_tags.hdel('tag_metadata:{}'.format(tag), 'last_seen')
|
||||
else:
|
||||
if r_serv_tags.scard('{}:{}'.format(tag, tag_last_seen)) > 0:
|
||||
r_serv_tags.hset('tag_metadata:{}'.format(tag), 'last_seen', tag_last_seen)
|
||||
else:
|
||||
tag_last_seen = date_substract_day(tag_last_seen)
|
||||
update_tag_last_seen(tag, tag_first_seen, tag_last_seen)
|
||||
|
||||
# ============= ROUTES ==============
|
||||
|
||||
@Tags.route("/tags/", methods=['GET'])
|
||||
|
@ -472,8 +376,9 @@ def remove_tag():
|
|||
path = request.args.get('paste')
|
||||
tag = request.args.get('tag')
|
||||
|
||||
remove_item_tag(tag, path)
|
||||
|
||||
res = Tag.remove_item_tag(tag, path)
|
||||
if res[1] != 200:
|
||||
str(res[0])
|
||||
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path))
|
||||
|
||||
@Tags.route("/Tags/confirm_tag")
|
||||
|
@ -486,11 +391,11 @@ def confirm_tag():
|
|||
tag = request.args.get('tag')
|
||||
|
||||
if(tag[9:28] == 'automatic-detection'):
|
||||
remove_item_tag(tag, path)
|
||||
Tag.remove_item_tag(tag, path)
|
||||
|
||||
tag = tag.replace('automatic-detection','analyst-detection', 1)
|
||||
#add analyst tag
|
||||
add_item_tag(tag, path)
|
||||
Tag.add_item_tag(tag, path)
|
||||
|
||||
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path))
|
||||
|
||||
|
@ -530,42 +435,12 @@ def addTags():
|
|||
list_tag = tags.split(',')
|
||||
list_tag_galaxies = tagsgalaxies.split(',')
|
||||
|
||||
taxonomies = Taxonomies()
|
||||
active_taxonomies = r_serv_tags.smembers('active_taxonomies')
|
||||
|
||||
active_galaxies = r_serv_tags.smembers('active_galaxies')
|
||||
|
||||
if not path:
|
||||
return 'INCORRECT INPUT0'
|
||||
|
||||
if list_tag != ['']:
|
||||
for tag in list_tag:
|
||||
# verify input
|
||||
tax = tag.split(':')[0]
|
||||
if tax in active_taxonomies:
|
||||
if tag in r_serv_tags.smembers('active_tag_' + tax):
|
||||
add_item_tag(tag, path)
|
||||
|
||||
else:
|
||||
return 'INCORRECT INPUT1'
|
||||
else:
|
||||
return 'INCORRECT INPUT2'
|
||||
|
||||
if list_tag_galaxies != ['']:
|
||||
for tag in list_tag_galaxies:
|
||||
# verify input
|
||||
gal = tag.split(':')[1]
|
||||
gal = gal.split('=')[0]
|
||||
|
||||
if gal in active_galaxies:
|
||||
if tag in r_serv_tags.smembers('active_tag_galaxies_' + gal):
|
||||
add_item_tag(tag, path)
|
||||
|
||||
else:
|
||||
return 'INCORRECT INPUT3'
|
||||
else:
|
||||
return 'INCORRECT INPUT4'
|
||||
|
||||
res = Tag.add_items_tag(list_tag, list_tag_galaxies, path)
|
||||
print(res)
|
||||
# error
|
||||
if res[1] != 200:
|
||||
return str(res[0])
|
||||
# success
|
||||
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path))
|
||||
|
||||
|
||||
|
|
|
@ -8,10 +8,16 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
import json
|
||||
import redis
|
||||
import datetime
|
||||
|
||||
import Import_helper
|
||||
import Item
|
||||
import Paste
|
||||
import Tag
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
|
||||
from flask_login import login_required
|
||||
|
||||
|
@ -20,6 +26,7 @@ from functools import wraps
|
|||
# ============ VARIABLES ============
|
||||
import Flask_config
|
||||
|
||||
|
||||
app = Flask_config.app
|
||||
cfg = Flask_config.cfg
|
||||
baseUrl = Flask_config.baseUrl
|
||||
|
@ -28,6 +35,7 @@ r_serv_db = Flask_config.r_serv_db
|
|||
r_serv_onion = Flask_config.r_serv_onion
|
||||
r_serv_metadata = Flask_config.r_serv_metadata
|
||||
|
||||
|
||||
restApi = Blueprint('restApi', __name__, template_folder='templates')
|
||||
|
||||
# ============ AUTH FUNCTIONS ============
|
||||
|
@ -36,7 +44,7 @@ def check_token_format(strg, search=re.compile(r'[^a-zA-Z0-9_-]').search):
|
|||
return not bool(search(strg))
|
||||
|
||||
def verify_token(token):
|
||||
if len(token) != 55:
|
||||
if len(token) != 41:
|
||||
return False
|
||||
|
||||
if not check_token_format(token):
|
||||
|
@ -47,23 +55,41 @@ def verify_token(token):
|
|||
else:
|
||||
return False
|
||||
|
||||
def verify_user_role(role, token):
|
||||
user_id = r_serv_db.hget('user:tokens', token)
|
||||
if user_id:
|
||||
if is_in_role(user_id, role):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_in_role(user_id, role):
|
||||
if r_serv_db.sismember('user_role:{}'.format(role), user_id):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# ============ DECORATOR ============
|
||||
|
||||
def token_required(funct):
|
||||
def token_required(user_role):
|
||||
def actual_decorator(funct):
|
||||
@wraps(funct)
|
||||
def api_token(*args, **kwargs):
|
||||
data = authErrors()
|
||||
data = authErrors(user_role)
|
||||
if data:
|
||||
return Response(json.dumps(data[0], indent=2, sort_keys=True), mimetype='application/json'), data[1]
|
||||
else:
|
||||
return funct(*args, **kwargs)
|
||||
return api_token
|
||||
return actual_decorator
|
||||
|
||||
def get_auth_from_header():
|
||||
token = request.headers.get('Authorization').replace(' ', '') # remove space
|
||||
return token
|
||||
|
||||
def authErrors():
|
||||
def authErrors(user_role):
|
||||
# Check auth
|
||||
if not request.headers.get('Authorization'):
|
||||
return ({'status': 'error', 'reason': 'Authentication needed'}, 401)
|
||||
|
@ -71,12 +97,27 @@ def authErrors():
|
|||
data = None
|
||||
# verify token format
|
||||
|
||||
# brute force protection
|
||||
current_ip = request.remote_addr
|
||||
login_failed_ip = r_cache.get('failed_login_ip_api:{}'.format(current_ip))
|
||||
# brute force by ip
|
||||
if login_failed_ip:
|
||||
login_failed_ip = int(login_failed_ip)
|
||||
if login_failed_ip >= 5:
|
||||
return ({'status': 'error', 'reason': 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_ip_api:{}'.format(current_ip)))}, 401)
|
||||
|
||||
try:
|
||||
authenticated = False
|
||||
if verify_token(token):
|
||||
authenticated = True
|
||||
|
||||
# check user role
|
||||
if not verify_user_role(user_role, token):
|
||||
data = ({'status': 'error', 'reason': 'Access Forbidden'}, 403)
|
||||
|
||||
if not authenticated:
|
||||
r_cache.incr('failed_login_ip_api:{}'.format(current_ip))
|
||||
r_cache.expire('failed_login_ip_api:{}'.format(current_ip), 300)
|
||||
data = ({'status': 'error', 'reason': 'Authentication failed'}, 401)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
@ -86,8 +127,18 @@ def authErrors():
|
|||
else:
|
||||
return None
|
||||
|
||||
# ============ API CORE =============
|
||||
|
||||
# ============ FUNCTIONS ============
|
||||
|
||||
def is_valid_uuid_v4(header_uuid):
|
||||
try:
|
||||
header_uuid=header_uuid.replace('-', '')
|
||||
uuid_test = uuid.UUID(hex=header_uuid, version=4)
|
||||
return uuid_test.hex == header_uuid
|
||||
except:
|
||||
return False
|
||||
|
||||
def one():
|
||||
return 1
|
||||
|
||||
|
@ -98,12 +149,249 @@ def one():
|
|||
# def api():
|
||||
# return 'api doc'
|
||||
|
||||
@restApi.route("api/items", methods=['POST'])
|
||||
@token_required
|
||||
def items():
|
||||
item = request.args.get('id')
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# POST
|
||||
#
|
||||
# {
|
||||
# "id": item_id, mandatory
|
||||
# "content": true,
|
||||
# "tags": true,
|
||||
#
|
||||
#
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "id": "item_id",
|
||||
# "tags": [],
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/get/item", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def get_item_id():
|
||||
data = request.get_json()
|
||||
res = Item.get_item(data)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
return Response(json.dumps({'test': 2}), mimetype='application/json')
|
||||
@restApi.route("api/v1/get/item/default", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def get_item_id_basic():
|
||||
|
||||
data = request.get_json()
|
||||
item_id = data.get('id', None)
|
||||
req_data = {'id': item_id, 'date': True, 'content': True, 'tags': True}
|
||||
res = Item.get_item(req_data)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# GET
|
||||
#
|
||||
# {
|
||||
# "id": item_id, mandatory
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "id": "item_id",
|
||||
# "tags": [],
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/get/item/tag", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def get_item_tag():
|
||||
|
||||
data = request.get_json()
|
||||
item_id = data.get('id', None)
|
||||
req_data = {'id': item_id, 'date': False, 'tags': True}
|
||||
res = Item.get_item(req_data)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# POST
|
||||
#
|
||||
# {
|
||||
# "id": item_id, mandatory
|
||||
# "tags": [tags to add],
|
||||
# "galaxy": [galaxy to add],
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "id": "item_id",
|
||||
# "tags": [tags added],
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/add/item/tag", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def add_item_tags():
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Malformed JSON'}, indent=2, sort_keys=True), mimetype='application/json'), 400
|
||||
|
||||
item_id = data.get('id', None)
|
||||
tags = data.get('tags', [])
|
||||
galaxy = data.get('galaxy', [])
|
||||
|
||||
res = Tag.add_items_tag(tags, galaxy, item_id)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# DELETE
|
||||
#
|
||||
# {
|
||||
# "id": item_id, mandatory
|
||||
# "tags": [tags to delete],
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "id": "item_id",
|
||||
# "tags": [tags deleted],
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/delete/item/tag", methods=['DELETE'])
|
||||
@token_required('analyst')
|
||||
def delete_item_tags():
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Malformed JSON'}, indent=2, sort_keys=True), mimetype='application/json'), 400
|
||||
|
||||
item_id = data.get('id', None)
|
||||
tags = data.get('tags', [])
|
||||
|
||||
res = Tag.remove_item_tags(tags, item_id)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# GET
|
||||
#
|
||||
# {
|
||||
# "id": item_id, mandatory
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "id": "item_id",
|
||||
# "content": "item content"
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/get/item/content", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def get_item_content():
|
||||
|
||||
data = request.get_json()
|
||||
item_id = data.get('id', None)
|
||||
req_data = {'id': item_id, 'date': False, 'content': True, 'tags': False}
|
||||
res = Item.get_item(req_data)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# # # # # # # # # # # # # # TAGS # # # # # # # # # # # # # # # # #
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
@restApi.route("api/v1/get/tag/metadata", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def get_tag_metadata():
|
||||
data = request.get_json()
|
||||
tag = data.get('tag', None)
|
||||
if not Tag.is_tag_in_all_tag(tag):
|
||||
return Response(json.dumps({'status': 'error', 'reason':'Tag not found'}, indent=2, sort_keys=True), mimetype='application/json'), 404
|
||||
metadata = Tag.get_tag_metadata(tag)
|
||||
return Response(json.dumps(metadata, indent=2, sort_keys=True), mimetype='application/json'), 200
|
||||
|
||||
@restApi.route("api/v1/get/tag/all", methods=['GET'])
|
||||
@token_required('analyst')
|
||||
def get_all_tags():
|
||||
res = {'tags': Tag.get_all_tags()}
|
||||
return Response(json.dumps(res, indent=2, sort_keys=True), mimetype='application/json'), 200
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# # # # # # # # # # # # # IMPORT # # # # # # # # # # # # # # # # # #
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
|
||||
|
||||
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
#
|
||||
# POST JSON FORMAT
|
||||
#
|
||||
# {
|
||||
# "type": "text", (default value)
|
||||
# "tags": [], (default value)
|
||||
# "default_tags": True, (default value)
|
||||
# "galaxy" [], (default value)
|
||||
# "text": "", mandatory if type = text
|
||||
# }
|
||||
#
|
||||
# response: {"uuid": "uuid"}
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/import/item", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def import_item():
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Malformed JSON'}, indent=2, sort_keys=True), mimetype='application/json'), 400
|
||||
|
||||
# unpack json
|
||||
text_to_import = data.get('text', None)
|
||||
if not text_to_import:
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'No text supplied'}, indent=2, sort_keys=True), mimetype='application/json'), 400
|
||||
|
||||
tags = data.get('tags', [])
|
||||
if not type(tags) is list:
|
||||
tags = []
|
||||
galaxy = data.get('galaxy', [])
|
||||
if not type(galaxy) is list:
|
||||
galaxy = []
|
||||
|
||||
if not Tag.is_valid_tags_taxonomies_galaxy(tags, galaxy):
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Tags or Galaxy not enabled'}, indent=2, sort_keys=True), mimetype='application/json'), 400
|
||||
|
||||
default_tags = data.get('default_tags', True)
|
||||
if default_tags:
|
||||
tags.append('infoleak:submission="manual"')
|
||||
|
||||
if sys.getsizeof(text_to_import) > 900000:
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Size exceeds default'}, indent=2, sort_keys=True), mimetype='application/json'), 413
|
||||
|
||||
UUID = str(uuid.uuid4())
|
||||
Import_helper.create_import_queue(tags, galaxy, text_to_import, UUID)
|
||||
|
||||
return Response(json.dumps({'uuid': UUID}, indent=2, sort_keys=True), mimetype='application/json')
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# GET
|
||||
#
|
||||
# {
|
||||
# "uuid": "uuid", mandatory
|
||||
# }
|
||||
#
|
||||
# response: {
|
||||
# "status": "in queue"/"in progress"/"imported",
|
||||
# "items": [all item id]
|
||||
# }
|
||||
#
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
@restApi.route("api/v1/get/import/item", methods=['POST'])
|
||||
@token_required('analyst')
|
||||
def import_item_uuid():
|
||||
data = request.get_json()
|
||||
UUID = data.get('uuid', None)
|
||||
|
||||
# Verify uuid
|
||||
if not is_valid_uuid_v4(UUID):
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Invalid uuid'}), mimetype='application/json'), 400
|
||||
|
||||
data = Import_helper.check_import_status(UUID)
|
||||
if data:
|
||||
return Response(json.dumps(data[0]), mimetype='application/json'), data[1]
|
||||
|
||||
return Response(json.dumps({'status': 'error', 'reason': 'Invalid response'}), mimetype='application/json'), 400
|
||||
|
||||
# ========= REGISTRATION =========
|
||||
app.register_blueprint(restApi, url_prefix=baseUrl)
|
||||
|
|
Loading…
Reference in a new issue