chg: [Kvrocks migration] add users + updates + trackers + retro_hunts + investigation migration

This commit is contained in:
Terrtia 2022-07-08 09:47:47 +02:00
parent 591a2abdf3
commit 50f7a31820
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
45 changed files with 1925 additions and 529 deletions

2
.gitignore vendored
View file

@ -8,6 +8,8 @@ AILENV
redis-leveldb
redis
ardb
kvrocks
DATA_KVROCKS
faup
tlsh
Blooms

258
bin/DB_KVROCKS_MIGRATION.py Executable file
View file

@ -0,0 +1,258 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
"""
import os
import sys
import importlib.util
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.ConfigLoader import ConfigLoader
from lib import Users
# # # # CONFIGS # # # #
config_loader = ConfigLoader()
r_kvrocks = config_loader.get_redis_conn("Kvrocks_DB")
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
config_loader = None
# # - - CONFIGS - - # #
from core import ail_2_ail
spec = importlib.util.find_spec('ail_2_ail')
old_ail_2_ail = importlib.util.module_from_spec(spec)
spec.loader.exec_module(old_ail_2_ail)
old_ail_2_ail.r_serv_sync = r_serv_db
from lib import Tracker
spec = importlib.util.find_spec('Tracker')
old_Tracker = importlib.util.module_from_spec(spec)
spec.loader.exec_module(old_Tracker)
old_Tracker.r_serv_tracker = r_serv_tracker
from lib import Investigations
spec = importlib.util.find_spec('Investigations')
old_Investigations = importlib.util.module_from_spec(spec)
spec.loader.exec_module(old_Investigations)
old_Investigations.r_tracking = r_serv_tracker
# # TODO: desable features - credentials - stats ? - sentiment analysis
# CREATE FUNCTION BY DB/FEATURES
# /!\ ISSUE WITH FILE DUPLICATES => NEED TO BE REFACTORED
def core_migration():
print('CORE MIGRATION...')
# AIL UUID
ail_uuid = r_serv_db.get('ail:uuid')
r_kvrocks.set('ail:uuid', ail_uuid)
# AIL update # # TODO: TO TEST
ail_version = r_serv_db.get('ail:version')
r_kvrocks.set('ail:version', ail_version)
dict_update = r_serv_db.hgetall('ail:update_date')
for version in dict_update:
r_kvrocks.hset('ail:update_date', version, dict_update[version])
versions_to_update = r_serv_db.smembers('ail:to_update')
for version in versions_to_update:
r_kvrocks.sadd('ail:update:to_update', version)
update_error = r_serv_db.get('ail:update_error')
update_in_progress = r_serv_db.get('ail:update_in_progress')
r_kvrocks.set('ail:update:error', update_error)
r_kvrocks.set('ail:update:update_in_progress', update_in_progress)
# d4 passivedns
d4_enabled = r_serv_db.hget('d4:passivedns', 'enabled')
d4_update_time = r_serv_db.hget('d4:passivedns', 'update_time')
r_kvrocks.hset('d4:passivedns', 'enabled', bool(d4_enabled))
r_kvrocks.hset('d4:passivedns', 'update_time', d4_update_time)
# ail:misp
# ail:thehive
# hive:auto-alerts
# list_export_tags
# misp:auto-events
# whitelist_hive
# whitelist_misp
# # TODO: TO CHECK
# config:all_global_section +
# config:global:crawler +
# mess_not_saved_export
# # # # # # # # # # # # # # # #
# USERS
#
# HSET 'user:all' user_id passwd_hash
# HSET 'user:tokens' token user_id
# HSET 'user_metadata:{user_id}' 'token' token
# 'role' role
# 'change_passwd' 'True'
# SET 'user_role:{role}' user_id
#
def user_migration():
print('USER MIGRATION...')
# create role_list
Users._create_roles_list()
for user_id in r_serv_db.hkeys('user:all'):
role = r_serv_db.hget(f'user_metadata:{user_id}', 'role')
password_hash = r_serv_db.hget('user:all', user_id)
token = r_serv_db.hget(f'user_metadata:{user_id}', 'token')
chg_passwd = r_serv_db.hget(f'user_metadata:{user_id}', 'change_passwd')
if not chg_passwd:
chg_passwd = None
Users.create_user(user_id, password=None, chg_passwd=chg_passwd, role=role)
Users.edit_user_password(user_id, password_hash, chg_passwd=chg_passwd)
Users._delete_user_token(user_id)
Users._set_user_token(user_id, token)
# # # # # # # # # # # # # # # #
# AIL 2 AIL
def ail_2_ail_migration():
print('AIL_2_AIL MIGRATION...')
# AIL Queues
for queue_uuid in old_ail_2_ail.get_all_sync_queue():
#print(queue_uuid)
meta = old_ail_2_ail.get_sync_queue_metadata(queue_uuid)
name = meta['name']
tags = meta['tags']
description = meta['description']
max_size = meta['max_size']
ail_2_ail.create_sync_queue(name, tags=tags, description=description, max_size=max_size, _queue_uuid=queue_uuid)
# AIL Instances
for ail_uuid in old_ail_2_ail.get_all_ail_instance():
#print(ail_uuid)
meta = old_ail_2_ail.get_ail_instance_metadata(ail_uuid, client_sync_mode=True, server_sync_mode=True, sync_queues=True)
url = meta['url']
api_key = meta['api_key']
description = meta['description']
pull = meta['pull']
push = meta['push']
ail_2_ail.create_ail_instance(ail_uuid, url, api_key=api_key, description=description, pull=pull, push=push)
version = old_ail_2_ail.get_ail_server_version(ail_uuid)
if version:
ail_2_ail.set_ail_server_version(ail_uuid, version)
ping = old_ail_2_ail.get_ail_server_ping(ail_uuid)
if ping:
ail_2_ail.set_ail_server_ping(ail_uuid, ping)
error = old_ail_2_ail.get_ail_server_error(ail_uuid)
if error:
ail_2_ail.save_ail_server_error(ail_uuid, error)
for queue_uuid in meta['sync_queues']:
ail_2_ail.register_ail_to_sync_queue(ail_uuid, queue_uuid)
for dict_obj in reversed(old_ail_2_ail.get_sync_queue_objects_by_queue_uuid(queue_uuid, ail_uuid, push=True)):
ail_2_ail.add_object_to_sync_queue(queue_uuid, ail_uuid, dict_obj, push=True, pull=False, json_obj=False)
for dict_obj in reversed(old_ail_2_ail.get_sync_queue_objects_by_queue_uuid(queue_uuid, ail_uuid, push=False)):
ail_2_ail.add_object_to_sync_queue(queue_uuid, ail_uuid, dict_obj, push=False, pull=True, json_obj=False)
# server
# queue
# item in queue
ail_2_ail.set_last_updated_sync_config()
# trackers + retro_hunts
def trackers_migration():
print('TRACKERS MIGRATION...')
for tracker_uuid in old_Tracker.get_all_tracker_uuid():
meta = old_Tracker.get_tracker_metadata(tracker_uuid, user_id=True, description=True, level=True, tags=True, mails=True, sources=True, sparkline=False, webhook=True)
Tracker._re_create_tracker(meta['tracker'], meta['type'], meta['user_id'], meta['level'], meta['tags'], meta['mails'], meta['description'], meta['webhook'], 0, meta['uuid'], meta['sources'], meta['first_seen'], meta['last_seen'])
# object migration # # TODO: in background
for item_id in old_Tracker.get_tracker_items_by_daterange(tracker_uuid, meta['first_seen'], meta['last_seen']):
Tracker.add_tracked_item(tracker_uuid, item_id)
print('RETRO HUNT MIGRATION...')
for task_uuid in old_Tracker.get_all_retro_hunt_tasks():
meta = old_Tracker.get_retro_hunt_task_metadata(task_uuid, date=True, progress=True, creator=True, sources=True, tags=True, description=True, nb_match=True)
last_id = old_Tracker.get_retro_hunt_last_analyzed(task_uuid)
timeout = old_Tracker.get_retro_hunt_task_timeout(task_uuid)
Tracker._re_create_retro_hunt_task(meta['name'], meta['rule'], meta['date'], meta['date_from'], meta['date_to'], meta['creator'], meta['sources'], meta['tags'], [], timeout, meta['description'], task_uuid, state=meta['state'], nb_match=meta['nb_match'], last_id=last_id)
# # TODO: IN background ?
for id in old_Tracker.get_retro_hunt_items_by_daterange(task_uuid, meta['date_from'], meta['date_to']):
Tracker.save_retro_hunt_match(task_uuid, id)
def investigations_migration():
print('INVESTIGATION MIGRATION...')
for investigation_uuid in old_Investigations.get_all_investigations():
old_investigation = old_Investigations.Investigation(investigation_uuid)
meta = old_investigation.get_metadata()
Investigations._re_create_investagation(meta['uuid'], meta['user_creator'], meta['date'], meta['name'], meta['threat_level'], meta['analysis'], meta['info'], meta['tags'], meta['last_change'], meta['timestamp'], meta['misp_events'])
new_investigation = Investigations.Investigation(meta['uuid'])
for dict_obj in old_investigation.get_objects():
new_investigation.register_object(dict_obj['id'], dict_obj['type'], dict_obj['subtype'])
new_investigation.set_last_change(meta['last_change'])
def item_submit_migration():
pass
# /!\ KEY COLISION
# # TODO: change db
def tags_migration():
pass
def crawler_migration():
pass
def domain_migration():
pass
# # TODO: refractor keys
def correlations_migration():
pass
# # # # # # # # # # # # # # # #
# STATISTICS
#
# Credential:
# HSET 'credential_by_tld:'+date, tld, 1
def statistics_migration():
pass
if __name__ == '__main__':
core_migration()
user_migration()
#ail_2_ail_migration()
#trackers_migration()
#investigations_migration()
##########################################################

View file

@ -26,8 +26,8 @@ import Tag
config_loader = ConfigLoader.ConfigLoader()
r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_sync = config_loader.get_redis_conn("ARDB_DB")
r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
r_serv_sync = config_loader.get_redis_conn("Kvrocks_DB")
config_loader = None
WEBSOCKETS_CLOSE_CODES = {
@ -480,7 +480,7 @@ def change_pull_push_state(ail_uuid, pull=None, push=None):
else:
pull = False
if curr_pull != pull:
print('pull hset')
#print('pull hset')
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'pull', pull)
edited = True
if push is not None:
@ -490,7 +490,7 @@ def change_pull_push_state(ail_uuid, pull=None, push=None):
else:
push = False
if curr_push != push:
print('push hset')
#print('push hset')
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'push', push)
edited = True
if edited:
@ -991,7 +991,11 @@ def edit_sync_queue_filter_tags(queue_uuid, new_tags):
# # TODO: optionnal name ???
# # TODO: SANITYZE TAGS
def create_sync_queue(name, tags=[], description=None, max_size=100):
# # TODO: SANITYZE queue_uuid
def create_sync_queue(name, tags=[], description=None, max_size=100, _queue_uuid=None):
if _queue_uuid:
queue_uuid = sanityze_uuid(_queue_uuid).replace('-', '')
else:
queue_uuid = generate_uuid()
r_serv_sync.sadd('ail2ail:sync_queue:all', queue_uuid)
@ -1156,8 +1160,19 @@ def get_sync_queue_object_by_queue_uuid(queue_uuid, ail_uuid, push=True):
# # REVIEW: # TODO: create by obj type
return Item(obj_dict['id'])
def add_object_to_sync_queue(queue_uuid, ail_uuid, obj_dict, push=True, pull=True):
def get_sync_queue_objects_by_queue_uuid(queue_uuid, ail_uuid, push=True):
if push:
sync_mode = 'push'
else:
sync_mode = 'pull'
return r_serv_sync.lrange(f'sync:queue:{sync_mode}:{queue_uuid}:{ail_uuid}', 0, -1)
# # TODO: use queue max_size
def add_object_to_sync_queue(queue_uuid, ail_uuid, obj_dict, push=True, pull=True, json_obj=True):
if json_obj:
obj = json.dumps(obj_dict)
else:
obj = obj_dict
# # TODO: # FIXME: USE CACHE ??????
if push:

View file

@ -28,7 +28,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
import Tag
config_loader = ConfigLoader.ConfigLoader()
r_tracking = config_loader.get_redis_conn("DB_Tracking")
r_tracking = config_loader.get_redis_conn("Kvrocks_DB")
config_loader = None
@ -287,11 +287,30 @@ def delete_obj_investigations(obj_id, obj_type, subtype=''):
return unregistred
def _set_timestamp(investigation_uuid, timestamp):
r_tracking.hset(f'investigations:data:{investigation_uuid}', 'timestamp', timestamp)
# analysis - threat level - info - date - creator
def _re_create_investagation(investigation_uuid, user_id, date, name, threat_level, analysis, info, tags, last_change, timestamp, misp_events):
create_investigation(user_id, date, name, threat_level, analysis, info, tags=tags, investigation_uuid=investigation_uuid)
if timestamp:
_set_timestamp(investigation_uuid, timestamp)
investigation = Investigation(investigation_uuid)
if last_change:
investigation.set_last_change(last_change)
for misp_event in misp_events:
investigation.add_misp_events(misp_event)
# # TODO: fix default threat_level analysis
# # TODO: limit description + name
# # TODO: sanityze tags
# # TODO: sanityze date
def create_investigation(user_id, date, name, threat_level, analysis, info, tags=[]):
def create_investigation(user_id, date, name, threat_level, analysis, info, tags=[], investigation_uuid=None):
if investigation_uuid:
if not is_valid_uuid_v4(investigation_uuid):
investigation_uuid = generate_uuid()
else:
investigation_uuid = generate_uuid()
r_tracking.sadd('investigations:all', investigation_uuid)
# user map
@ -461,9 +480,10 @@ if __name__ == '__main__':
# res = r_tracking.dbsize()
# print(res)
investigation_uuid = 'e4e1c8e3b0a349bf81482f2f823efc0f'
investigation_uuid = 'a6545c38083444eeb9383d357f8fa747'
_set_timestamp(investigation_uuid, int(time.time()))
investigation = Investigation(investigation_uuid)
investigation.delete()
# investigation = Investigation(investigation_uuid)
# investigation.delete()
# # TODO: PAGINATION

View file

@ -25,8 +25,8 @@ import item_basic
config_loader = ConfigLoader.ConfigLoader()
r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
r_serv_tracker = config_loader.get_redis_conn("Kvrocks_DB")
items_dir = config_loader.get_config_str("Directories", "pastes")
if items_dir[-1] == '/':
@ -99,7 +99,10 @@ def get_tracker_type(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'type')
def get_tracker_level(tracker_uuid):
return int(r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'level'))
level = r_serv_tracker.hget(f'tracker:{tracker_uuid}', 'level')
if not level:
level = 0
return int(level)
def get_tracker_user_id(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'user_id')
@ -233,6 +236,9 @@ def set_tracker_first_seen(tracker_uuid, date):
def set_tracker_last_seen(tracker_uuid, date):
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'last_seen', int(date))
def set_tracker_user_id(tracker_uuid, user_id):
r_serv_tracker.hset(f'tracker:{tracker_uuid}', 'user_id', user_id)
# # TODO: ADD CACHE ???
def update_tracker_daterange(tracker_uuid, date, op='add'):
date = int(date)
@ -420,6 +426,15 @@ def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1):
return {"status": "error", "reason": "Incorrect type"}, 400
return {"status": "success", "tracker": tracker, "type": tracker_type}, 200
def _re_create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard, tracker_uuid, sources, first_seen, last_seen):
create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard=dashboard, tracker_uuid=tracker_uuid, sources=sources)
set_tracker_user_id(tracker_uuid, user_id)
if first_seen:
set_tracker_first_seen(tracker_uuid, first_seen)
if last_seen:
set_tracker_last_seen(tracker_uuid, last_seen)
def create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, webhook, dashboard=0, tracker_uuid=None, sources=[]):
# edit tracker
if tracker_uuid:
@ -836,8 +851,11 @@ def set_retro_hunt_task_state(task_uuid, new_state):
r_serv_tracker.sadd(f'tracker:retro_hunt:task:{new_state}', task_uuid)
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'state', new_state)
def get_retro_hunt_task_type(task_uuid):
return r_serv_tracker(f'tracker:retro_hunt:task:{task_uuid}', 'type')
# def get_retro_hunt_task_type(task_uuid):
# return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'type')
#
# def set_retro_hunt_task_type(task_uuid, task_type):
# r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'type', task_type)
# # TODO: yararule
def get_retro_hunt_task_rule(task_uuid, r_compile=False):
@ -857,6 +875,12 @@ def get_retro_hunt_task_timeout(task_uuid):
else:
return 30 # # TODO: FIXME use instance limit
def get_retro_hunt_task_date(task_uuid):
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date')
def set_retro_hunt_task_date(task_uuid, date):
return r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'date', date)
def get_retro_hunt_task_date_from(task_uuid):
return r_serv_tracker.hget(f'tracker:retro_hunt:task:{task_uuid}', 'date_from')
@ -944,8 +968,17 @@ def clear_retro_hunt_task_cache(task_uuid):
# name
# description
# state error
def _re_create_retro_hunt_task(name, rule, date, date_from, date_to, creator, sources, tags, mails, timeout, description, task_uuid, state='pending', nb_match=0, last_id=None):
create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=sources, tags=tags, mails=mails, timeout=timeout, description=description, task_uuid=task_uuid, state=state)
if last_id:
set_retro_hunt_last_analyzed(task_uuid, last_id)
_set_retro_hunt_nb_match(task_uuid, nb_match)
set_retro_hunt_task_date(task_uuid, date)
# # # TODO: TYPE
def create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=[], tags=[], mails=[], timeout=30, description=None, task_uuid=None):
def create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=[], tags=[], mails=[], timeout=30, description=None, task_uuid=None, state='pending'):
if not task_uuid:
task_uuid = str(uuid.uuid4())
@ -970,9 +1003,11 @@ def create_retro_hunt_task(name, rule, date_from, date_to, creator, sources=[],
r_serv_tracker.sadd(f'tracker:retro_hunt:task:mails:{task_uuid}', escape(mail))
r_serv_tracker.sadd('tracker:retro_hunt:task:all', task_uuid)
# add to pending tasks
r_serv_tracker.sadd('tracker:retro_hunt:task:pending', task_uuid)
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'state', 'pending')
if state not in ('pending', 'completed', 'paused'):
state = 'pending'
set_retro_hunt_task_state(task_uuid, state)
return task_uuid
# # TODO: delete rule
@ -1092,12 +1127,15 @@ def compute_retro_hunt_task_progress(task_uuid, date_from=None, date_to=None, so
# # TODO: # FIXME: # Cache
# # TODO: ADD MAP ID => Retro_Hunt
def save_retro_hunt_match(task_uuid, id, object_type='item'):
item_date = item_basic.get_item_date(id)
res = r_serv_tracker.sadd(f'tracker:retro_hunt:task:item:{task_uuid}:{item_date}', id)
# track nb item by date
if res == 1:
r_serv_tracker.zincrby(f'tracker:retro_hunt:task:stat:{task_uuid}', int(item_date), 1)
# Add map obj_id -> task_uuid
r_serv_tracker.sadd(f'obj:retro_hunt:item:{item_id}', task_uuid)
def get_retro_hunt_all_item_dates(task_uuid):
return r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1)
@ -1111,12 +1149,15 @@ def get_retro_hunt_nb_match(task_uuid):
nb_match += int(tuple[1])
return int(nb_match)
def _set_retro_hunt_nb_match(task_uuid, nb_match):
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'nb_match', nb_match)
def set_retro_hunt_nb_match(task_uuid):
l_date_value = r_serv_tracker.zrange(f'tracker:retro_hunt:task:stat:{task_uuid}', 0, -1, withscores=True)
nb_match = 0
for tuple in l_date_value:
nb_match += int(tuple[1])
r_serv_tracker.hset(f'tracker:retro_hunt:task:{task_uuid}', 'nb_match', nb_match)
_set_retro_hunt_nb_match(task_uuid, nb_match)
def get_retro_hunt_items_by_daterange(task_uuid, date_from, date_to):
all_item_id = set()
@ -1296,8 +1337,9 @@ def _fix_db_custom_tags():
#### -- ####
if __name__ == '__main__':
_fix_db_custom_tags()
#if __name__ == '__main__':
#_fix_db_custom_tags()
# fix_all_tracker_uuid_list()
# res = get_all_tracker_uuid()
# print(len(res))

277
bin/lib/Users.py Executable file
View file

@ -0,0 +1,277 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import bcrypt
import os
import re
import secrets
import sys
from flask_login import UserMixin
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.ConfigLoader import ConfigLoader
# Config
config_loader = ConfigLoader()
#r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
config_loader = None
regex_password = r'^(?=(.*\d){2})(?=.*[a-z])(?=.*[A-Z]).{10,100}$'
regex_password = re.compile(regex_password)
# # TODO: ADD FUNCTIONS PASSWORD RESET + API KEY RESET + CREATE USER
# # TODO: migrate Role_Manager
#### PASSWORDS + TOKENS ####
def check_password_strength(password):
result = regex_password.match(password)
if result:
return True
else:
return False
def gen_password():
return secrets.token_urlsafe(30)
def hashing_password(password):
password = password.encode()
return bcrypt.hashpw(password, bcrypt.gensalt())
def gen_token():
return secrets.token_urlsafe(41)
def _delete_user_token(user_id):
current_token = get_user_token(user_id)
r_serv_db.hdel('ail:users:tokens', current_token)
def _set_user_token(user_id, token):
r_serv_db.hset('ail:users:tokens', token, user_id)
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'token', token)
def generate_new_token(user_id):
# create user token
_delete_user_token(user_id)
token = gen_token()
_set_user_token(user_id, token)
def get_default_admin_token():
if r_serv_db.exists('ail:user:metadata:admin@admin.test'):
return r_serv_db.hget('ail:user:metadata:admin@admin.test', 'token')
else:
return ''
##-- PASSWORDS + TOKENS --##
#### USERS ####
def get_all_users():
return r_serv_db.hkeys('ail:users:all')
def get_user_role(user_id):
return r_serv_db.hget(f'ail:user:metadata:{user_id}', 'role')
def get_user_passwd_hash(user_id):
return r_serv_db.hget('ail:users:all', user_id)
def get_user_token(user_id):
return r_serv_db.hget(f'ail:users:metadata:{user_id}', 'token')
def exists_user(user_id):
return r_serv_db.exists(f'ail:user:metadata:{user_id}')
def get_user_metadata(user_id):
user_metadata = {}
user_metadata['email'] = user_id
user_metadata['role'] = r_serv_db.hget(f'ail:user:metadata:{user_id}', 'role')
user_metadata['api_key'] = r_serv_db.hget(f'ail:user:metadata:{user_id}', 'token')
return user_metadata
def get_users_metadata(list_users):
users = []
for user in list_users:
users.append(get_user_metadata(user))
return users
def create_user(user_id, password=None, chg_passwd=True, role=None):
# # TODO: check password strenght
if password:
new_password = password
else:
new_password = gen_password()
password_hash = hashing_password(new_password)
# EDIT
if exists_user(user_id):
if password or chg_passwd:
edit_user_password(user_id, password_hash, chg_passwd=chg_passwd)
if role:
edit_user_role(user_id, role)
# CREATE USER
else:
# Role
if not role:
role = get_default_role()
if role in get_all_roles():
for role_to_add in get_all_user_role(role):
r_serv_db.sadd(f'ail:users:role:{role_to_add}', user_id)
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'role', role)
r_serv_db.hset('ail:users:all', user_id, password_hash)
if chg_passwd:
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'change_passwd', True)
# create user token
generate_new_token(user_id)
def edit_user_password(user_id, password_hash, chg_passwd=False):
if chg_passwd:
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'change_passwd', True)
else:
r_serv_db.hdel(f'ail:user:metadata:{user_id}', 'change_passwd')
# remove default user password file
if user_id=='admin@admin.test':
default_passwd_file = os.path.join(os.environ['AIL_HOME'], 'DEFAULT_PASSWORD')
if os.path.isfile(default_passwd_file):
os.remove(default_passwd_file)
r_serv_db.hset('ail:users:all', user_id, password_hash)
# create new token
generate_new_token(user_id)
# # TODO: solve edge_case self delete
def delete_user(user_id):
if exists_user(user_id):
for role_id in get_all_roles():
r_serv_db.srem('ail:users:role:{role_id}', user_id)
user_token = get_user_token(user_id)
r_serv_db.hdel('ail:users:tokens', user_token)
r_serv_db.delete(f'ail:user:metadata:{user_id}')
r_serv_db.hdel('ail:users:all', user_id)
# # TODO: raise Exception
else:
print(f'Error: user {user_id} do not exist')
##-- USERS --##
#### ROLES ####
def get_all_roles():
return r_serv_db.zrange('ail:roles:all', 0, -1)
# create role_list
def _create_roles_list():
if not r_serv_db.exists('ail:roles:all'):
r_serv_db.zadd('ail:roles:all', 1, 'admin')
r_serv_db.zadd('ail:roles:all', 2, 'analyst')
r_serv_db.zadd('ail:roles:all', 3, 'user')
r_serv_db.zadd('ail:roles:all', 4, 'user_no_api')
r_serv_db.zadd('ail:roles:all', 5, 'read_only')
def get_role_level(role):
return int(r_serv_db.zscore('ail:roles:all', role))
def get_user_role_by_range(inf, sup):
return r_serv_db.zrange('ail:roles:all', inf, sup)
def get_all_user_role(user_role):
current_role_val = get_role_level(user_role)
return r_serv_db.zrange('ail:roles:all', current_role_val -1, -1)
def get_all_user_upper_role(user_role):
current_role_val = get_role_level(user_role)
# remove one rank
if current_role_val > 1:
return r_serv_db.zrange('ail:roles:all', 0, current_role_val -2)
else:
return []
def get_default_role():
return 'read_only'
def edit_user_role(user_id, role):
current_role = get_user_role(user_id)
if role != current_role:
request_level = get_role_level(role)
current_role = get_role_level(current_role)
if current_role < request_level:
role_to_remove = get_user_role_by_range(current_role -1, request_level - 2)
for role_id in role_to_remove:
r_serv_db.srem(f'ail:users:role:{role_id}', user_id)
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'role', role)
else:
role_to_add = get_user_role_by_range(request_level -1, current_role)
for role_id in role_to_add:
r_serv_db.sadd(f'ail:users:role:{role_id}', user_id)
r_serv_db.hset(f'ail:user:metadata:{user_id}', 'role', role)
def check_user_role_integrity(user_id):
user_role = get_user_role(user_id)
all_user_role = get_all_user_role(user_role)
res = True
for role in all_user_role:
if not r_serv_db.sismember(f'ail:users:role:{role}', user_id):
res = False
upper_role = get_all_user_upper_role(user_role)
for role in upper_role:
if r_serv_db.sismember(f'ail:users:role:{role}', user_id):
res = False
return res
##-- ROLES --##
class User(UserMixin):
def __init__(self, id):
if r_serv_db.hexists('ail:users:all', id):
self.id = id
else:
self.id = "__anonymous__"
# return True or False
#def is_authenticated():
# return True or False
#def is_anonymous():
@classmethod
def get(self_class, id):
return self_class(id)
def user_is_anonymous(self):
if self.id == "__anonymous__":
return True
else:
return False
def check_password(self, password):
if self.user_is_anonymous():
return False
password = password.encode()
hashed_password = r_serv_db.hget('ail:users:all', self.id).encode()
if bcrypt.checkpw(password, hashed_password):
return True
else:
return False
def request_password_change(self):
if r_serv_db.hget(f'ail:user:metadata:{self.id}', 'change_passwd') == 'True':
return True
else:
return False
def is_in_role(self, role):
if r_serv_db.sismember(f'ail:users:role:{role}', self.id):
return True
else:
return False

183
bin/lib/ail_updates.py Executable file
View file

@ -0,0 +1,183 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import datetime
import os
import sys
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.ConfigLoader import ConfigLoader
from lib import Users
config_loader = ConfigLoader()
#r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
r_serv = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
BACKGROUND_UPDATES = {
'v1.5':{
'nb_updates': 5,
'message': 'Tags and Screenshots'
},
'v2.4':{
'nb_updates': 1,
'message': ' Domains Tags and Correlations'
},
'v2.6':{
'nb_updates': 1,
'message': 'Domains Tags and Correlations'
},
'v2.7':{
'nb_updates': 1,
'message': 'Domains Tags'
},
'v3.4':{
'nb_updates': 1,
'message': 'Domains Languages'
},
'v3.7':{
'nb_updates': 1,
'message': 'Trackers first_seen/last_seen'
}
}
def get_ail_version():
return r_serv.get('ail:version')
def get_ail_float_version():
version = get_ail_version()
if version:
version = float(version[1:])
else:
version = 0
return version
def get_ail_all_updates(date_separator='-'):
dict_update = r_serv.hgetall('ail:update_date')
if date_separator:
for version in dict_update:
u_date = dict_update[version]
dict_update[version] = f'{u_date[0:4]}{date_separator}{u_date[4:6]}{date_separator}{u_date[6:8]}'
return dict_update
def add_ail_update(version):
#Add new AIL version
r_serv.hset('ail:update_date', version, datetime.datetime.now().strftime("%Y%m%d"))
#Set current ail version
if float(version[1:]) > get_ail_float_version():
r_serv.set('ail:version', version)
def check_version(version):
if version[0] != 'v':
return False
try:
int(version[1])
int(version[-1])
int(version[1:].replace('.', ''))
except:
return False
if '..' in version:
return False
return True
#### UPDATE BACKGROUND ####
def exits_background_update_to_launch():
return r_serv.scard('ail:update:to_update') != 0
def is_version_in_background_update(version):
return r_serv.sismember('ail:update:to_update', version)
def get_all_background_updates_to_launch():
return r_serv.smembers('ail:update:to_update')
def get_current_background_update():
return r_serv.get('ail:update:update_in_progress')
def get_current_background_update_script():
return r_serv.get('ail:update:current_background_script')
def get_current_background_update_script_path(version, script_name):
return os.path.join(os.environ['AIL_HOME'], 'update', version, script_name)
def get_current_background_nb_update_completed():
return r_serv.scard('ail:update:update_in_progress:completed')
def get_current_background_update_progress():
progress = r_serv.get('ail:update:current_background_script_stat')
if not progress:
progress = 0
return int(progress)
def get_background_update_error():
return r_serv.get('ail:update:error')
def add_background_updates_to_launch(version):
return r_serv.sadd('ail:update:to_update', version)
def start_background_update(version):
r_serv.delete('ail:update:error')
r_serv.set('ail:update:update_in_progress', version)
def set_current_background_update_script(script_name):
r_serv.set('ail:update:current_background_script', script_name)
r_serv.set('ail:update:current_background_script_stat', 0)
def set_current_background_update_progress(progress):
r_serv.set('ail:update:current_background_script_stat', progress)
def set_background_update_error(error):
r_serv.set('ail:update:error', error)
def end_background_update_script():
r_serv.sadd('ail:update:update_in_progress:completed')
def end_background_update(version):
r_serv.delete('ail:update:update_in_progress')
r_serv.delete('ail:update:current_background_script')
r_serv.delete('ail:update:current_background_script_stat')
r_serv.delete('ail:update:update_in_progress:completed')
r_serv.srem('ail:update:to_update', version)
def clear_background_update():
r_serv.delete('ail:update:error')
r_serv.delete('ail:update:update_in_progress')
r_serv.delete('ail:update:current_background_script')
r_serv.delete('ail:update:current_background_script_stat')
r_serv.delete('ail:update:update_in_progress:completed')
def get_update_background_message(version):
return BACKGROUND_UPDATES[version]['message']
# TODO: Detect error in subprocess
def get_update_background_metadata():
dict_update = {}
version = get_current_background_update()
if version:
dict_update['version'] = version
dict_update['script'] = get_current_background_update_script()
dict_update['script_progress'] = get_current_background_update_progress()
dict_update['nb_update'] = BACKGROUND_UPDATES[dict_update['version']]['nb_updates']
dict_update['nb_completed'] = get_current_background_nb_update_completed()
dict_update['progress'] = int(dict_update['nb_completed']*100/dict_update['nb_update'])
dict_update['error'] = get_background_update_error()
return dict_update
##-- UPDATE BACKGROUND --##
##########################
if __name__ == '__main__':
res = check_version('v3.1..1')
print(res)

View file

@ -11,7 +11,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import ConfigLoader
config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_db = config_loader.get_redis_conn("Kvrocks_DB")
r_cache = config_loader.get_redis_conn("Redis_Cache")
config_loader = None

View file

@ -5,24 +5,17 @@ import os
import sys
import redis
# sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
from abstract_object import AbstractObject
from flask import url_for
config_loader = ConfigLoader.ConfigLoader()
sys.path.append(os.environ['AIL_BIN'])
from lib.ConfigLoader import ConfigLoader
from lib.objects import abstract_object
config_loader = ConfigLoader()
config_loader = None
################################################################################
################################################################################
################################################################################
class CryptoCurrency(AbstractObject):
class CryptoCurrency(abstract_object.AbstractObject):
"""
AIL CryptoCurrency Object. (strings)
"""
@ -40,6 +33,23 @@ class CryptoCurrency(AbstractObject):
# # TODO:
pass
def get_currency_symbol(self):
if self.subtype=='bitcoin':
return 'BTC'
elif self.subtype=='ethereum':
return 'ETH'
elif self.subtype=='bitcoin-cash':
return 'BCH'
elif self.subtype=='litecoin':
return 'LTC'
elif self.subtype=='monero':
return 'XMR'
elif self.subtype=='zcash':
return 'ZEC'
elif self.subtype=='dash':
return 'DASH'
return None
def get_link(self, flask_context=False):
if flask_context:
url = url_for('correlation.show_correlation', object_type=self.type, type_id=self.subtype, correlation_id=self.id)
@ -62,6 +72,22 @@ class CryptoCurrency(AbstractObject):
icon = '\uf51e'
return {'style': style, 'icon': icon, 'color': '#DDCC77', 'radius':5}
def get_misp_object(self):
obj_attrs = []
obj = MISPObject('coin-address')
obj.first_seen = self.get_first_seen()
obj.last_seen = self.get_last_seen()
obj_attrs.append( obj.add_attribute('address', value=self.id) )
crypto_symbol = self.get_currency_symbol()
if crypto_symbol:
obj_attrs.append( obj.add_attribute('symbol', value=crypto_symbol) )
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################
@ -72,6 +98,19 @@ class CryptoCurrency(AbstractObject):
############################################################################
############################################################################
def build_crypto_regex(subtype, search_id):
pass
def search_by_name(subtype, search_id):
# # TODO: BUILD regex
obj = CryptoCurrency(subtype, search_id)
if obj.exists():
return search_id
else:
regex = build_crypto_regex(subtype, search_id)
return abstract_object.search_subtype_obj_by_id('cryptocurrency', subtype, regex)
#if __name__ == '__main__':

View file

@ -3,18 +3,16 @@
import os
import sys
import redis
# sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
from abstract_object import AbstractObject
from flask import url_for
from io import BytesIO
config_loader = ConfigLoader.ConfigLoader()
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
sys.path.append(os.environ['AIL_BIN'])
from lib.ConfigLoader import ConfigLoader
from lib.objects.abstract_object import AbstractObject
config_loader = ConfigLoader()
r_metadata = config_loader.get_redis_conn("ARDB_Metadata")
HASH_DIR = config_loader.get_config_str('Directories', 'hash')
baseurl = config_loader.get_config_str("Notifications", "ail_domain")
config_loader = None
@ -43,7 +41,7 @@ class Decoded(AbstractObject):
return self.id.split('/')[0]
def get_file_type(self):
return r_serv_metadata.hget(f'metadata_hash:{self.get_sha1()}', 'estimated_type')
return r_metadata.hget(f'metadata_hash:{self.get_sha1()}', 'estimated_type')
# # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\
def delete(self):
@ -71,6 +69,42 @@ class Decoded(AbstractObject):
icon = '\uf249'
return {'style': 'fas', 'icon': icon, 'color': '#88CCEE', 'radius':5}
'''
Return the estimed type of a given decoded item.
:param sha1_string: sha1_string
'''
def get_estimated_type(self):
return r_metadata.hget(f'metadata_hash:{self.id}', 'estimated_type')
def get_rel_path(self, mimetype=None):
if not mimetype:
mimetype = self.get_estimated_type()
return os.path.join(HASH_DIR, mimetype, self.id[0:2], self.id)
def get_filepath(self, mimetype=None):
return os.path.join(os.environ['AIL_HOME'], self.get_rel_path(mimetype=mimetype))
def get_file_content(self, mimetype=None):
filepath = self.get_filepath(mimetype=mimetype)
with open(filepath, 'rb') as f:
file_content = BytesIO(f.read())
return file_content
def get_misp_object(self):
obj_attrs = []
obj = MISPObject('file')
obj.first_seen = self.get_first_seen()
obj.last_seen = self.get_last_seen()
obj_attrs.append( obj.add_attribute('sha1', value=self.id) )
obj_attrs.append( obj.add_attribute('mimetype', value=self.get_estimated_type()) )
obj_attrs.append( obj.add_attribute('malware-sample', value=self.id, data=self.get_file_content()) )
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################
@ -78,6 +112,17 @@ class Decoded(AbstractObject):
def exist_correlation(self):
pass
def create(self, content, date):
Decoded.save_decoded_file_content(sha1_string, decoded_file, item_date, mimetype=mimetype)
####correlation Decoded.save_item_relationship(sha1_string, item_id)
Decoded.create_decoder_matadata(sha1_string, item_id, decoder_name)
############################################################################
############################################################################

View file

@ -3,18 +3,18 @@
import os
import sys
import redis
import time
# sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
from abstract_object import AbstractObject
from flask import url_for
config_loader = ConfigLoader.ConfigLoader()
sys.path.append(os.environ['AIL_BIN'])
from lib.ConfigLoader import ConfigLoader
from lib.objects.abstract_object import AbstractObject
from lib.item_basic import get_item_children, get_item_date, get_item_url
config_loader = ConfigLoader()
r_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None
@ -27,8 +27,10 @@ class Domain(AbstractObject):
AIL Decoded Object. (strings)
"""
# id: domain name
def __init__(self, id):
super(Domain, self).__init__('domain', id)
self.domain_type = self.get_domain_type()
# def get_ail_2_ail_payload(self):
# payload = {'raw': self.get_gzip_content(b64=True),
@ -41,6 +43,57 @@ class Domain(AbstractObject):
else:
return 'regular'
def get_first_seen(selfr_int=False, separator=True):
first_seen = r_onion.hget(f'{self.domain_type}_metadata:{self.id}', 'first_seen')
if first_seen:
if separator:
first_seen = f'{first_seen[0:4]}/{first_seen[4:6]}/{first_seen[6:8]}'
first_seen = int(first_seen)
elif r_int==True:
first_seen = int(first_seen)
return first_seen
def get_last_check(self, r_int=False, separator=True):
last_check = r_onion.hget(f'{self.domain_type}_metadata:{self.id}', 'last_check')
if last_check is not None:
if separator:
last_check = f'{last_check[0:4]}/{last_check[4:6]}/{last_check[6:8]}'
elif r_format=="int":
last_check = int(last_check)
return last_check
def get_ports(self):
l_ports = r_onion.hget(f'{self.domain_type}_metadata:{self.id}', 'ports')
if l_ports:
return l_ports.split(";")
return []
def get_history_by_port(self, port, status=False, root=False):
'''
Return .
:return:
:rtype: list of tuple (item_core, epoch)
'''
history_tuple = r_onion.zrange(f'crawler_history_{self.domain_type}:{self.id}:{port}', 0, -1, withscores=True)
history = []
for root_id, epoch in history_tuple:
dict_history = {}
epoch = int(epoch) # force int
dict_history["epoch"] = epoch
dict_history["date"] = time.strftime('%Y/%m/%d - %H:%M.%S', time.gmtime(epoch_val))
try:
int(root_item)
if status:
dict_history['status'] = False
except ValueError:
if status:
dict_history['status'] = True
if root:
dict_history['root'] = root_id
history.append(dict_history)
return history
# # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\
def delete(self):
# # TODO:
@ -63,6 +116,69 @@ class Domain(AbstractObject):
icon = '\uf13b'
return {'style': style, 'icon': icon, 'color':color, 'radius':5}
def is_crawled_item(self, item_id):
domain_lenght = len(self.id)
if len(item_id) > (domain_lenght+48):
if item_id[-36-domain_lenght:-36] == self.id:
return True
return False
def get_crawled_items(self, root_id):
crawled_items = self.get_crawled_items_children(root_id)
crawled_items.append(root_id)
return crawled_items
def get_crawled_items_children(self, root_id):
crawled_items = []
for item_id in get_item_children(root_id):
if self.is_crawled_item(item_id):
crawled_items.append(item_id)
crawled_items.extend(self.get_crawled_items_children(self.id, item_id))
return crawled_items
def get_all_urls(self, date=False): ## parameters to add first_seen/last_seen ??????????????????????????????
if date:
urls = {}
else:
urls = set()
for port in self.get_ports():
for history in self.get_history_by_port(port, root=True):
if history.get('root'):
for item_id in self.get_crawled_items(history.get('root')):
url = get_item_url(item_id)
if url:
if date:
item_date = int(get_item_date(item_id))
if url not in urls:
urls[url] = {'first_seen': item_date,'last_seen': item_date}
else: # update first_seen / last_seen
if item_date < urls[url]['first_seen']:
all_url[url]['first_seen'] = item_date
if item_date > urls[url]['last_seen']:
all_url[url]['last_seen'] = item_date
else:
urls.add(url)
return urls
def get_misp_object(self):
# create domain-ip obj
obj_attrs = []
obj = MISPObject('domain-crawled', standalone=True)
obj.first_seen = self.get_first_seen()
obj.last_seen = self.get_last_check()
obj_attrs.append( obj.add_attribute('domain', value=self.id) )
urls = self.get_all_urls(date=True)
for url in urls:
attribute = obj.add_attribute('url', value=url)
attribute.first_seen = str(urls[url]['first_seen'])
attribute.last_seen = str(urls[url]['last_seen'])
obj_attrs.append( attribute )
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################

View file

@ -2,6 +2,7 @@
# -*-coding:UTF-8 -*
import base64
import gzip
import os
import re
import sys
@ -38,10 +39,9 @@ import Username
from flask import url_for
config_loader = ConfigLoader()
# get and sanityze PASTE DIRECTORY
# # TODO: rename PASTES_FOLDER
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
# # TODO: get and sanityze ITEMS DIRECTORY
ITEMS_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
ITEMS_FOLDER = os.path.join(os.path.realpath(ITEMS_FOLDER), '')
r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
@ -82,11 +82,11 @@ class Item(AbstractObject):
def get_filename(self):
# Creating the full filepath
filename = os.path.join(PASTES_FOLDER, self.id)
filename = os.path.join(ITEMS_FOLDER, self.id)
filename = os.path.realpath(filename)
# incorrect filename
if not os.path.commonprefix([filename, PASTES_FOLDER]) == PASTES_FOLDER:
if not os.path.commonprefix([filename, ITEMS_FOLDER]) == ITEMS_FOLDER:
return None
else:
return filename
@ -114,8 +114,53 @@ class Item(AbstractObject):
payload = {'raw': self.get_gzip_content(b64=True)}
return payload
def set_origin(self): # set_parent ?
pass
def add_duplicate(self):
pass
def sanitize_id(self):
pass
# # TODO: sanitize_id
# # TODO: check if already exists ?
# # TODO: check if duplicate
def save_on_disk(self, content, binary=True, compressed=False, base64=False):
if not binary:
content = content.encode()
if base64:
content = base64.standard_b64decode(content)
if not compressed:
content = gzip.compress(content)
# # TODO: # FIXME: raise Exception id filename is None ######
filename = self.get_filename()
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as f:
f.write(content)
# # TODO: correlations
#
# content
# tags
# origin
# duplicate -> all item iterations ???
#
def create(self, content, tags, origin=None, duplicate=None):
self.save_on_disk(content, binary=True, compressed=False, base64=False)
# # TODO:
def create(self):
# for tag in tags:
# self.add_tag(tag)
if origin:
if duplicate:
pass
# # WARNING: UNCLEAN DELETE /!\ TEST ONLY /!\
@ -174,7 +219,7 @@ def get_basename(item_id):
return os.path.basename(item_id)
def get_item_id(full_path):
return full_path.replace(PASTES_FOLDER, '', 1)
return full_path.replace(ITEMS_FOLDER, '', 1)
def get_item_filepath(item_id):
return item_basic.get_item_filepath(item_id)
@ -192,7 +237,7 @@ def get_item_basename(item_id):
return os.path.basename(item_id)
def get_item_size(item_id):
return round(os.path.getsize(os.path.join(PASTES_FOLDER, item_id))/1024.0, 2)
return round(os.path.getsize(os.path.join(ITEMS_FOLDER, item_id))/1024.0, 2)
def get_item_encoding(item_id):
return None
@ -561,11 +606,11 @@ def get_item_har(har_path):
def get_item_filename(item_id):
# Creating the full filepath
filename = os.path.join(PASTES_FOLDER, item_id)
filename = os.path.join(ITEMS_FOLDER, item_id)
filename = os.path.realpath(filename)
# incorrect filename
if not os.path.commonprefix([filename, PASTES_FOLDER]) == PASTES_FOLDER:
if not os.path.commonprefix([filename, ITEMS_FOLDER]) == ITEMS_FOLDER:
return None
else:
return filename
@ -712,7 +757,7 @@ def delete_domain_node(item_id):
delete_item(child_id)
# if __name__ == '__main__':
#
# item = Item('')
# print(item.get_misp_object().to_json())
if __name__ == '__main__':
content = 'test file content'
item = Item('tests/2020/01/02/test_save.gz')
item.save_on_disk(content, binary=False)

View file

@ -58,6 +58,24 @@ class Pgp(AbstractObject):
icon = 'times'
return {'style': 'fas', 'icon': icon, 'color': '#44AA99', 'radius':5}
def get_misp_object(self):
obj_attrs = []
obj = MISPObject('pgp-meta')
obj.first_seen = self.get_first_seen()
obj.last_seen = self.get_last_seen()
if self.subtype=='key':
obj_attrs.append( obj.add_attribute('key-id', value=self.id) )
elif self.subtype=='name':
obj_attrs.append( obj.add_attribute('user-id-name', value=self.id) )
else: # mail
obj_attrs.append( obj.add_attribute('user-id-email', value=self.id) )
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################

View file

@ -3,30 +3,24 @@
import os
import sys
import redis
# sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
from io import BytesIO
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.environ['AIL_BIN'])
from lib.ConfigLoader import ConfigLoader
from lib.objects.abstract_object import AbstractObject
from abstract_object import AbstractObject
config_loader = ConfigLoader.ConfigLoader()
config_loader = ConfigLoader()
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
HASH_DIR = config_loader.get_config_str('Directories', 'hash')
SCREENSHOT_FOLDER = config_loader.get_files_directory('screenshot')
config_loader = None
################################################################################
################################################################################
################################################################################
class Screenshot(AbstractObject):
"""
AIL Screenshot Object. (strings)
"""
# ID = SHA256
def __init__(self, id):
super(Screenshot, self).__init__('screenshot', id)
@ -50,6 +44,33 @@ class Screenshot(AbstractObject):
def get_svg_icon(self):
return {'style': 'fas', 'icon': '\uf03e', 'color': '#E1F5DF', 'radius':5}
def get_rel_path(self, add_extension=False):
rel_path = os.path.join(self.id[0:2], self.id[2:4], self.id[4:6], self.id[6:8], self.id[8:10], self.id[10:12], self.id[12:])
if add_extension:
rel_path = f'{rel_path}.png'
return rel_path
def get_filepath(self):
filename = os.path.join(SCREENSHOT_FOLDER, self.get_rel_path(add_extension=True))
return os.path.realpath(filename)
def get_file_content(self):
filepath = self.get_filepath()
with open(filepath, 'rb') as f:
file_content = BytesIO(f.read())
return file_content
def get_misp_object(self):
obj_attrs = []
obj = MISPObject('file')
obj_attrs.append( obj.add_attribute('sha256', value=self.id) )
obj_attrs.append( obj.add_attribute('attachment', value=self.id, data=self.get_file_content()) )
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################

View file

@ -5,13 +5,15 @@ import os
import sys
import redis
from flask import url_for
from pymisp import MISPObject
# sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
from abstract_object import AbstractObject
from flask import url_for
from abstract_subtype_object import AbstractSubtypeObject
config_loader = ConfigLoader.ConfigLoader()
@ -22,13 +24,13 @@ config_loader = None
################################################################################
################################################################################
class Username(AbstractObject):
class Username(AbstractSubtypeObject):
"""
AIL Username Object. (strings)
"""
def __init__(self, id, subtype):
super(Username, self).__init__('username', id, subtype=subtype)
super(Username, self).__init__('username', id, subtype)
# def get_ail_2_ail_payload(self):
# payload = {'raw': self.get_gzip_content(b64=True),
@ -59,6 +61,28 @@ class Username(AbstractObject):
icon = '\uf007'
return {'style': style, 'icon': icon, 'color': '#4dffff', 'radius':5}
def get_misp_object(self):
obj_attrs = []
if self.subtype == 'telegram':
obj = MISPObject('telegram-account', standalone=True)
obj_attrs.append( obj.add_attribute('username', value=self.id) )
elif self.subtype == 'twitter':
obj = MISPObject('twitter-account', standalone=True)
obj_attrs.append( obj.add_attribute('name', value=self.id) )
else:
obj = MISPObject('user-account', standalone=True)
obj_attrs.append( obj.add_attribute('username', value=self.id) )
obj.first_seen = self.get_first_seen()
obj.last_seen = self.get_last_seen()
for obj_attr in obj_attrs:
for tag in self.get_tags():
obj_attr.add_tag(tag)
return obj
############################################################################
############################################################################
############################################################################
@ -71,4 +95,7 @@ class Username(AbstractObject):
#if __name__ == '__main__':
if __name__ == '__main__':
obj = Username('ninechantw', 'telegram')
print(obj.get_misp_object().to_json())

View file

@ -17,8 +17,8 @@ sys.path.append(os.environ['AIL_BIN'])
# Import Project packages
##################################
from packages import Tag
from lib.Investigations import is_object_investigated, get_obj_investigations
from lib.Tracker import is_obj_tracked, get_obj_all_trackers
from lib.Investigations import is_object_investigated, get_obj_investigations, delete_obj_investigations
from lib.Tracker import is_obj_tracked, get_obj_all_trackers, delete_obj_trackers
# # TODO: ADD CORRELATION ENGINE
@ -90,6 +90,14 @@ class AbstractObject(ABC):
else:
investigations = get_obj_investigations(self.id, self.type, self.subtype)
return investigations
def delete_investigations(self):
if not self.subtype:
unregistred = delete_obj_investigations(self.id, self.type)
else:
unregistred = delete_obj_investigations(self.id, self.type, self.subtype)
return unregistred
#- Investigations -#
## Trackers ##
@ -100,12 +108,20 @@ class AbstractObject(ABC):
def get_trackers(self):
return get_obj_all_trackers(self.type, self.subtype, self.id)
def delete_trackers(self):
return delete_obj_trackers(self.type, self.subtype, self.id)
#- Investigations -#
def _delete(self):
# DELETE TAGS
Tag.delete_obj_all_tags(self.id, self.type)
# # TODO: remove from investigations
# remove from tracker
self.delete_trackers()
# remove from investigations
self.delete_investigations()
# # TODO: remove from correlation
@abstractmethod
def delete(self):
@ -121,6 +137,10 @@ class AbstractObject(ABC):
# """
# pass
@abstractmethod
def get_link(self, flask_context=False):
pass
@abstractmethod
def get_svg_icon(self):
"""
@ -129,7 +149,7 @@ class AbstractObject(ABC):
pass
@abstractmethod
def get_link(self, flask_context=False):
def get_misp_object(self):
pass
# # TODO:

View file

@ -0,0 +1,155 @@
# -*-coding:UTF-8 -*
"""
Base Class for AIL Objects
"""
##################################
# Import External packages
##################################
import os
import sys
from abc import abstractmethod
#from flask import url_for
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.objects.abstract_object import AbstractObject
from lib.ConfigLoader import ConfigLoader
# LOAD CONFIG
config_loader = ConfigLoader()
r_metadata = config_loader.get_redis_conn("ARDB_Metadata")
config_loader = None
# # TODO: ADD CORRELATION ENGINE
# # FIXME: SAVE SUBTYPE NAMES ?????
class AbstractSubtypeObject(AbstractObject):
"""
Abstract Subtype Object
"""
def __init__(self, obj_type, id, subtype):
""" Abstract for all the AIL object
:param obj_type: object type (item, ...)
:param id: Object ID
"""
self.id = id
self.type = obj_type
self.subtype = subtype
# # TODO: # FIXME: REMOVE R_INT ????????????????????????????????????????????????????????????????????
def get_first_seen(self, r_int=False):
first_seen = r_metadata.hget(f'{self.type}_metadata_{self.subtype}:{self.id}', 'first_seen')
if r_int:
if first_seen:
return int(first_seen)
else:
return 99999999
else:
return first_seen
# # TODO: # FIXME: REMOVE R_INT ????????????????????????????????????????????????????????????????????
def get_last_seen(self, r_int=False):
last_seen = r_metadata.hget(f'{self.type}_metadata_{self.subtype}:{self.id}', 'last_seen')
if r_int:
if last_seen:
return int(last_seen)
else:
return 0
else:
return last_seen
def get_nb_seen(self):
return r_metadata.scard(f'set_{self.type}_{self.subtype}:{self.id}')
# # TODO: CHECK RESULT
def get_nb_seen_by_date(self, date_day):
nb = r_metadata.hget(f'{self.type}:{self.subtype}:{date_day}', self.id)
if nb is None:
return 0
else:
return int(nb)
def _get_meta(self):
meta_dict = {}
meta_dict['first_seen'] = self.get_first_seen()
meta_dict['last_seen'] = self.get_last_seen()
meta_dict['nb_seen'] = self.get_nb_seen()
return meta_dict
# def exists(self):
# res = r_metadata.zscore(f'{self.type}_all:{self.subtype}', self.id)
# if res is not None:
# return True
# else:
# return False
def exists(self):
return r_metadata.exists(f'{self.type}_metadata_{self.subtype}:{self.id}')
def set_first_seen(self, first_seen):
r_metadata.hset(f'{self.type}_metadata_{self.subtype}:{self.id}', 'first_seen', first_seen)
def set_last_seen(self, last_seen):
r_metadata.hset(f'{self.type}_metadata_{self.subtype}:{self.id}', 'last_seen', last_seen)
def update_daterange(self, date):
date = int(date)
# obj don't exit
if not self.exists():
self.set_first_seen(date)
self.set_last_seen(date)
else:
first_seen = self.get_first_seen(r_int=True)
last_seen = self.get_last_seen(r_int=True)
if date < first_seen:
self.set_first_seen(date)
if date > last_seen:
self.set_last_seen(date)
def add(self, date):
self.update_correlation_daterange()
# daily
r_metadata.hincrby(f'{self.type}:{self.subtype}:{date}', self.id, 1)
# all type
r_metadata.zincrby(f'{self.type}_all:{self.subtype}', self.id, 1)
# # domain
# if item_basic.is_crawled(item_id):
# domain = item_basic.get_item_domain(item_id)
# self.save_domain_correlation(domain, subtype, obj_id)
def _delete(self):
pass
####################################
#
# _get_items
# get_metadata
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#

View file

@ -14,9 +14,9 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/objects'))
from CryptoCurrencies import CryptoCurrency
from Decodeds import Decoded
from Domains import Domain
from CryptoCurrencies import CryptoCurrency
from Items import Item
from Pgps import Pgp
from Screenshots import Screenshot
@ -76,12 +76,69 @@ def get_objects_meta(l_dict_objs, icon=False, url=False, flask_context=False):
l_meta.append(dict_meta)
return l_meta
# # TODO: CHECK IF object already have an UUID
def get_misp_object(obj_type, subtype, id):
object = get_object(obj_type, subtype, id)
return object.get_misp_object()
# get misp relationship
def get_objects_relationship(obj_1, obj2):
relationship = {}
obj_types = ( obj_1.get_type(), obj2.get_type() )
##############################################################
# if ['cryptocurrency', 'pgp', 'username', 'decoded', 'screenshot']:
# {'relation': '', 'src':, 'dest':}
# relationship[relation] =
##############################################################
if 'cryptocurrency' in obj_types:
relationship['relation'] = 'extracted-from'
if obj1_type == 'cryptocurrency':
relationship['src'] = obj1_id
relationship['dest'] = obj2_id
else:
relationship['src'] = obj2_id
relationship['dest'] = obj1_id
elif 'pgp' in obj_types:
relationship['relation'] = 'extracted-from'
elif 'username' in obj_types:
relationship['relation'] = 'extracted-from'
elif 'decoded' in obj_types:
relationship['relation'] = 'included-in'
elif 'screenshot' in obj_types:
relationship['relation'] = 'screenshot-of'
elif 'domain' in obj_types:
relationship['relation'] = 'extracted-from'
# default
else:
pass
return relationship
################################################################################
################################################################################
################################################################################
def delete_obj(obj_type, subtype, id):
object = get_object(obj_type, subtype, id)
return object.delete()
################################################################################
################################################################################
################################################################################
################################################################################
################################################################################
################################################################################
################################################################################
################################################################################

View file

@ -63,6 +63,8 @@ class Hosts(AbstractModule):
content = item.get_content()
hosts = regex_helper.regex_findall(self.module_name, self.redis_cache_key, self.host_regex, item.get_id(), content)
if hosts:
print(f'{len(hosts)} host {item.get_id()}')
for host in hosts:
#print(host)

View file

@ -9,6 +9,8 @@ TERMINAL_BLUE = '\33[94m'
TERMINAL_BLINK = '\33[6m'
TERMINAL_DEFAULT = '\033[0m'
REPO_ORIGIN = 'https://github.com/ail-project/ail-framework.git'
# set defaut_remote
def set_default_remote(new_origin_url, verbose=False):
process = subprocess.run(['git', 'remote', 'set-url', 'origin', new_origin_url], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@ -144,6 +146,28 @@ def get_last_tag_from_remote(verbose=False):
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
return ''
def get_git_metadata():
dict_git = {}
dict_git['current_branch'] = get_current_branch()
dict_git['is_clone'] = is_not_fork(REPO_ORIGIN)
dict_git['is_working_directory_clean'] = is_working_directory_clean()
dict_git['current_commit'] = get_last_commit_id_from_local()
dict_git['last_remote_commit'] = get_last_commit_id_from_remote()
dict_git['last_local_tag'] = get_last_tag_from_local()
dict_git['last_remote_tag'] = get_last_tag_from_remote()
if dict_git['current_commit'] != dict_git['last_remote_commit']:
dict_git['new_git_update_available'] = True
else:
dict_git['new_git_update_available'] = False
if dict_git['last_local_tag'] != dict_git['last_remote_tag']:
dict_git['new_git_version_available'] = True
else:
dict_git['new_git_version_available'] = False
return dict_git
if __name__ == "__main__":
get_last_commit_id_from_remote(verbose=True)
get_last_commit_id_from_local(verbose=True)

View file

@ -11,55 +11,39 @@ Update AIL in the background
import os
import sys
import redis
import subprocess
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import ail_updates
def launch_background_upgrade(version, l_script_name):
if r_serv.sismember('ail:to_update', version):
r_serv.delete('ail:update_error')
r_serv.set('ail:update_in_progress', version)
r_serv.set('ail:current_background_update', version)
if ail_updates.is_version_in_background_update(version):
ail_updates.start_background_update(version)
for script_name in l_script_name:
r_serv.set('ail:current_background_script', script_name)
update_file = os.path.join(os.environ['AIL_HOME'], 'update', version, script_name)
ail_updates.set_current_background_update_script(script_name)
update_file = ail_updates.get_current_background_update_script_path(version, script_name)
# # TODO: Get error output
process = subprocess.run(['python' ,update_file])
update_progress = r_serv.get('ail:current_background_script_stat')
#if update_progress:
# if int(update_progress) != 100:
# r_serv.set('ail:update_error', 'Update {} Failed'.format(version))
update_progress = r_serv.get('ail:current_background_script_stat')
if update_progress:
if int(update_progress) == 100:
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
r_serv.srem('ail:to_update', version)
update_progress = ail_updates.get_current_background_update_progress()
if update_progress == 100:
ail_updates.end_background_update_script()
# # TODO: Create Custom error
# 'Please relaunch the bin/update-background.py script'
# # TODO: Create Class background update
def clean_update_db():
r_serv.delete('ail:update_error')
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
ail_updates.end_background_update()
if __name__ == "__main__":
config_loader = ConfigLoader.ConfigLoader()
r_serv = config_loader.get_redis_conn("ARDB_DB")
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None
if r_serv.scard('ail:to_update') == 0:
clean_update_db()
if not ail_updates.exits_background_update_to_launch():
ail_updates.clear_background_update()
else:
launch_background_upgrade('v1.5', ['Update-ARDB_Onions.py', 'Update-ARDB_Metadata.py', 'Update-ARDB_Tags.py', 'Update-ARDB_Tags_background.py', 'Update-ARDB_Onions_screenshots.py'])
launch_background_upgrade('v2.4', ['Update_domain.py'])
launch_background_upgrade('v2.6', ['Update_screenshots.py'])

View file

@ -2,14 +2,15 @@
# -*-coding:UTF-8 -*
import os
import re
import sys
import time
import redis
import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import ail_updates
from lib.ConfigLoader import ConfigLoader
class AIL_Updater(object):
"""docstring for AIL_Updater."""
@ -18,15 +19,11 @@ class AIL_Updater(object):
self.version = new_version
self.start_time = time.time()
self.config = ConfigLoader.ConfigLoader()
self.r_serv = self.config.get_redis_conn("ARDB_DB")
self.config = ConfigLoader()
self.r_serv = self.config.get_redis_conn("Kvrocks_DB")
self.f_version = float(self.version[1:])
self.current_f_version = self.r_serv.get('ail:version')
if self.current_f_version:
self.current_f_version = float(self.current_f_version[1:])
else:
self.current_f_version = 0
self.current_f_version = ail_updates.get_ail_float_version()
def update(self):
"""
@ -38,11 +35,7 @@ class AIL_Updater(object):
"""
Update DB version
"""
#Set current ail version
self.r_serv.hset('ail:update_date', self.version, datetime.datetime.now().strftime("%Y%m%d"))
#Set current ail version
if self.f_version > self.current_f_version:
self.r_serv.set('ail:version', self.version)
ail_updates.add_ail_update(version)
def run_update(self):
self.update()

49
update/bin/old_ail_updater.py Executable file
View file

@ -0,0 +1,49 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import re
import sys
import time
import redis
import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
class AIL_Updater(object):
"""docstring for AIL_Updater."""
def __init__(self, new_version):
self.version = new_version
self.start_time = time.time()
self.config = ConfigLoader.ConfigLoader()
self.r_serv = self.config.get_redis_conn("ARDB_DB")
self.f_version = float(self.version[1:])
self.current_f_version = self.r_serv.get('ail:version')
if self.current_f_version:
self.current_f_version = float(self.current_f_version[1:])
else:
self.current_f_version = 0
def update(self):
"""
AIL DB update
"""
pass
def end_update(self):
"""
Update DB version
"""
#Set current ail version
self.r_serv.hset('ail:update_date', self.version, datetime.datetime.now().strftime("%Y%m%d"))
#Set current ail version
if self.f_version > self.current_f_version:
self.r_serv.set('ail:version', self.version)
def run_update(self):
self.update()
self.end_update()

View file

@ -9,8 +9,11 @@ import argparse
import datetime
import configparser
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import ail_updates
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='AIL default update')
@ -23,15 +26,11 @@ if __name__ == '__main__':
# remove space
update_tag = args.tag.replace(' ', '')
if not ail_updates.check_version(update_tag):
parser.print_help()
print(f'Error: Invalid update tag {update_tag})
sys.exit(0)
start_deb = time.time()
config_loader = ConfigLoader.ConfigLoader()
r_serv = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
#Set current ail version
r_serv.set('ail:version', update_tag)
#Set current ail version
r_serv.hset('ail:update_date', update_tag, datetime.datetime.now().strftime("%Y%m%d"))
ail_updates.add_ail_update(update_tag)

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -13,7 +13,7 @@ import ConfigLoader
import Tracker
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -12,7 +12,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
from old_ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""

View file

@ -28,7 +28,11 @@ import Tag
sys.path.append('./modules/')
from User import User
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.Users import User
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
@ -47,6 +51,7 @@ from blueprints.objects_item import objects_item
from blueprints.hunters import hunters
from blueprints.old_endpoints import old_endpoints
from blueprints.ail_2_ail_sync import ail_2_ail_sync
from blueprints.settings_b import settings_b
Flask_dir = os.environ['AIL_FLASK']
@ -107,6 +112,7 @@ app.register_blueprint(objects_item, url_prefix=baseUrl)
app.register_blueprint(hunters, url_prefix=baseUrl)
app.register_blueprint(old_endpoints, url_prefix=baseUrl)
app.register_blueprint(ail_2_ail_sync, url_prefix=baseUrl)
app.register_blueprint(settings_b, url_prefix=baseUrl)
# ========= =========#
# ========= Cookie name ========

View file

@ -17,7 +17,6 @@ sys.path.append('modules')
import Flask_config
# Import Role_Manager
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
from Role_Manager import login_admin, login_analyst, login_read_only
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))

View file

@ -17,7 +17,6 @@ sys.path.append('modules')
import Flask_config
# Import Role_Manager
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
from Role_Manager import login_admin, login_analyst, login_read_only
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))

View file

@ -15,15 +15,16 @@ sys.path.append('modules')
import Flask_config
# Import Role_Manager
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
from Role_Manager import login_admin, login_analyst
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
from User import User
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import Users
from lib.Users import User
r_cache = Flask_config.r_cache
r_serv_db = Flask_config.r_serv_db
r_serv_tags = Flask_config.r_serv_tags
# ============ BLUEPRINT ============
@ -67,7 +68,7 @@ def login():
return render_template("login.html", error=error)
if user and user.check_password(password):
if not check_user_role_integrity(user.get_id()):
if not Users.check_user_role_integrity(user.get_id()):
error = 'Incorrect User ACL, Please contact your administrator'
return render_template("login.html", error=error)
login_user(user) ## TODO: use remember me ?
@ -113,9 +114,9 @@ def change_password():
if current_user.is_authenticated and password1!=None:
if password1==password2:
if check_password_strength(password1):
if Users.check_password_strength(password1):
user_id = current_user.get_id()
create_user_db(user_id , password1, update=True)
Users.create_user(user_id , password=password1, chg_passwd=False)
# update Note
# dashboard
return redirect(url_for('dashboard.index', update_note=True))

View file

@ -0,0 +1,66 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
'''
Blueprint Flask: ail_investigations
'''
import os
import sys
import json
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response, abort, send_file
from flask_login import login_required, current_user
# Import Role_Manager
from Role_Manager import login_admin, login_analyst, login_read_only
# sys.path.append('modules')
# import Flask_config
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import ail_updates
from packages import git_status
# ============ BLUEPRINT ============
settings_b = Blueprint('settings_b', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/settings'))
# ============ VARIABLES ============
#bootstrap_label = Flask_config.bootstrap_label
# ============ FUNCTIONS ============
def create_json_response(data, status_code):
return Response(json.dumps(data, indent=2, sort_keys=True), mimetype='application/json'), status_code
# ============= ROUTES ==============
@settings_b.route("/settings", methods=['GET'])
@login_required
@login_read_only
def settings_page():
git_metadata = git_status.get_git_metadata()
ail_version = ail_updates.get_ail_version()
#admin_level = current_user.is_in_role('admin')
return render_template("settings_index.html", git_metadata=git_metadata,
ail_version=ail_version)
@settings_b.route("/settings/background_update/json", methods=['GET'])
@login_required
@login_read_only
def get_background_update_metadata_json():
return jsonify(ail_updates.get_update_background_metadata())
#############################################

View file

@ -5,42 +5,29 @@ import os
import sys
import redis
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_FLASK'], 'modules'))
from Role_Manager import create_user_db, edit_user_db, get_default_admin_token, gen_password
config_loader = ConfigLoader.ConfigLoader()
r_serv = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib import Users
if __name__ == "__main__":
# create role_list
if not r_serv.exists('ail:all_role'):
r_serv.zadd('ail:all_role', 1, 'admin')
r_serv.zadd('ail:all_role', 2, 'analyst')
r_serv.zadd('ail:all_role', 3, 'user')
r_serv.zadd('ail:all_role', 4, 'user_no_api')
r_serv.zadd('ail:all_role', 5, 'read_only')
Users._create_roles_list()
username = 'admin@admin.test'
password = gen_password()
if r_serv.exists('user_metadata:admin@admin.test'):
edit_user_db(username, password=password, role='admin')
else:
create_user_db(username, password, role='admin', default=True)
token = get_default_admin_token()
user_id = 'admin@admin.test'
password = Users.gen_password()
create_user(user_id, password=password, role='admin')
token = Users.get_default_admin_token()
default_passwd_file = os.path.join(os.environ['AIL_HOME'], 'DEFAULT_PASSWORD')
to_write_str = '# Password Generated by default\n# This file is deleted after the first login\n#\nemail=admin@admin.test\npassword='
to_write_str = to_write_str + password + '\nAPI_Key=' + token + '\n'
to_write_str = f'{to_write_str}{password}\nAPI_Key={token}\n'
with open(default_passwd_file, 'w') as f:
f.write(to_write_str)
print('new user created: {}'.format(username))
print('new user created: {}'.format(user_id))
print('password: {}'.format(password))
print('token: {}'.format(token))

View file

@ -7,9 +7,6 @@ import sys
import redis
import bcrypt
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
from functools import wraps
from flask_login import LoginManager, current_user, login_user, logout_user, login_required
@ -18,17 +15,6 @@ from flask import request, make_response, current_app
login_manager = LoginManager()
login_manager.login_view = 'root.role'
# CONFIG #
config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
default_passwd_file = os.path.join(os.environ['AIL_HOME'], 'DEFAULT_PASSWORD')
regex_password = r'^(?=(.*\d){2})(?=.*[a-z])(?=.*[A-Z]).{10,100}$'
regex_password = re.compile(regex_password)
###############################################################
############### FLASK CACHE ##################
###############################################################
@ -100,138 +86,3 @@ def login_read_only(func):
###############################################################
###############################################################
###############################################################
def gen_password(length=30, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_!@#$%^&*()"):
random_bytes = os.urandom(length)
len_charset = len(charset)
indices = [int(len_charset * (byte / 256.0)) for byte in random_bytes]
return "".join([charset[index] for index in indices])
def gen_token(length=41, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"):
random_bytes = os.urandom(length)
len_charset = len(charset)
indices = [int(len_charset * (byte / 256.0)) for byte in random_bytes]
return "".join([charset[index] for index in indices])
def generate_new_token(user_id):
# create user token
current_token = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
r_serv_db.hdel('user:tokens', current_token)
token = gen_token(41)
r_serv_db.hset('user:tokens', token, user_id)
r_serv_db.hset('user_metadata:{}'.format(user_id), 'token', token)
def get_default_admin_token():
if r_serv_db.exists('user_metadata:admin@admin.test'):
return r_serv_db.hget('user_metadata:admin@admin.test', 'token')
else:
return ''
def create_user_db(username_id , password, default=False, role=None, update=False):
password = password.encode()
password_hash = hashing_password(password)
# create user token
generate_new_token(username_id)
if not role:
role = 'read_only'
if update:
r_serv_db.hdel('user_metadata:{}'.format(username_id), 'change_passwd')
# remove default user password file
if username_id=='admin@admin.test':
os.remove(default_passwd_file)
r_serv_db.hset('user:all', username_id, password_hash)
else:
if default:
r_serv_db.hset('user_metadata:{}'.format(username_id), 'change_passwd', True)
if role:
if role in get_all_role():
for role_to_add in get_all_user_role(role):
r_serv_db.sadd('user_role:{}'.format(role_to_add), username_id)
r_serv_db.hset('user_metadata:{}'.format(username_id), 'role', role)
r_serv_db.hset('user:all', username_id, password_hash)
def edit_user_db(user_id, role, password=None):
if password:
password_hash = hashing_password(password.encode())
r_serv_db.hset('user:all', user_id, password_hash)
current_role = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
if role != current_role:
request_level = get_role_level(role)
current_role = get_role_level(current_role)
if current_role < request_level:
role_to_remove = get_user_role_by_range(current_role -1, request_level - 2)
for role_id in role_to_remove:
r_serv_db.srem('user_role:{}'.format(role_id), user_id)
r_serv_db.hset('user_metadata:{}'.format(user_id), 'role', role)
else:
role_to_add = get_user_role_by_range(request_level -1, current_role)
for role_id in role_to_add:
r_serv_db.sadd('user_role:{}'.format(role_id), user_id)
r_serv_db.hset('user_metadata:{}'.format(user_id), 'role', role)
def delete_user_db(user_id):
if r_serv_db.exists('user_metadata:{}'.format(user_id)):
role_to_remove =get_all_role()
for role_id in role_to_remove:
r_serv_db.srem('user_role:{}'.format(role_id), user_id)
user_token = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
r_serv_db.hdel('user:tokens', user_token)
r_serv_db.delete('user_metadata:{}'.format(user_id))
r_serv_db.hdel('user:all', user_id)
else:
print('Error: user {} do not exist'.format(user_id))
def hashing_password(bytes_password):
hashed = bcrypt.hashpw(bytes_password, bcrypt.gensalt())
return hashed
def check_password_strength(password):
result = regex_password.match(password)
if result:
return True
else:
return False
def get_all_role():
return r_serv_db.zrange('ail:all_role', 0, -1)
def get_role_level(role):
return int(r_serv_db.zscore('ail:all_role', role))
def get_all_user_role(user_role):
current_role_val = get_role_level(user_role)
return r_serv_db.zrange('ail:all_role', current_role_val -1, -1)
def get_all_user_upper_role(user_role):
current_role_val = get_role_level(user_role)
# remove one rank
if current_role_val > 1:
return r_serv_db.zrange('ail:all_role', 0, current_role_val -2)
else:
return []
def get_user_role_by_range(inf, sup):
return r_serv_db.zrange('ail:all_role', inf, sup)
def get_user_role(user_id):
return r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
def check_user_role_integrity(user_id):
user_role = get_user_role(user_id)
all_user_role = get_all_user_role(user_role)
res = True
for role in all_user_role:
if not r_serv_db.sismember('user_role:{}'.format(role), user_id):
res = False
upper_role = get_all_user_upper_role(user_role)
for role in upper_role:
if r_serv_db.sismember('user_role:{}'.format(role), user_id):
res = False
return res

View file

@ -20,6 +20,7 @@ from flask_login import login_required
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import queues_modules
import ail_updates
# ============ VARIABLES ============
import Flask_config
@ -29,10 +30,8 @@ config_loader = Flask_config.config_loader
baseUrl = Flask_config.baseUrl
r_serv = Flask_config.r_serv
r_serv_log = Flask_config.r_serv_log
r_serv_db = Flask_config.r_serv_db
max_dashboard_logs = Flask_config.max_dashboard_logs
dict_update_description = Flask_config.dict_update_description
dashboard = Blueprint('dashboard', __name__, template_folder='templates')
@ -151,12 +150,11 @@ def stuff():
return jsonify(row1=get_queues(r_serv))
# TODO: ADD UPDATE NOTE BY USER
@dashboard.route("/")
@login_required
@login_read_only
def index():
update_note = request.args.get('update_note')
default_minute = config_loader.get_config_str("Flask", "minute_processed_paste")
threshold_stucked_module = config_loader.get_config_int("Module_ModuleInformation", "threshold_stucked_module")
log_select = {10, 25, 50, 100}
@ -165,21 +163,15 @@ def index():
log_select.sort()
# Check if update in progress
update_in_progress = False
update_warning_message = ''
update_warning_message_notice_me = ''
current_update = r_serv_db.get('ail:current_background_update')
if current_update:
if r_serv_db.scard('ail:update_{}'.format(current_update)) != dict_update_description[current_update]['nb_background_update']:
update_in_progress = True
update_warning_message = dict_update_description[current_update]['update_warning_message']
update_warning_message_notice_me = dict_update_description[current_update]['update_warning_message_notice_me']
background_update = False
update_message = ''
if ail_updates.get_current_background_update():
background_update = True
update_message = ail_updates.get_update_background_message()
return render_template("index.html", default_minute = default_minute, threshold_stucked_module=threshold_stucked_module,
log_select=log_select, selected=max_dashboard_logs,
update_warning_message=update_warning_message, update_in_progress=update_in_progress,
#update_note=update_note,
update_warning_message_notice_me=update_warning_message_notice_me)
background_update=background_update, update_message=update_message)
# ========= REGISTRATION =========
app.register_blueprint(dashboard, url_prefix=baseUrl)

View file

@ -62,10 +62,11 @@
<div class="col-12 col-lg-10" id="core_content">
{%if update_in_progress%}
{%if background_update%}
<div class="alert alert-warning alert-dismissible fade show my-2" role="alert">
<strong>Warning!</strong> {{update_warning_message}} <strong>{{update_warning_message_notice_me}}</strong>
(<a href="{{ url_for('settings.settings_page') }}">Check Update Status</a>)
<strong>Warning!</strong> An Update is running on the background. Some informations can be <strong>missing from the UI.</strong>
<strong>Updating: {{update_message}}</strong>
(<a href="{{ url_for('settings_b.settings_page') }}">Check Update Status</a>)
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>

View file

@ -8,13 +8,13 @@ from flask import Flask, render_template, jsonify, request, Blueprint, redirect,
from flask_login import login_required, current_user
from Role_Manager import login_admin, login_analyst, login_user, login_read_only
from Role_Manager import create_user_db, edit_user_db, delete_user_db, check_password_strength, generate_new_token, gen_password
import json
import datetime
import git_status
import d4
import Users
# ============ VARIABLES ============
import Flask_config
@ -31,8 +31,6 @@ settings = Blueprint('settings', __name__, template_folder='templates')
# ============ FUNCTIONS ============
def one():
return 1
def check_email(email):
result = email_regex.match(email)
@ -41,84 +39,13 @@ def check_email(email):
else:
return False
def get_git_metadata():
dict_git = {}
dict_git['current_branch'] = git_status.get_current_branch()
dict_git['is_clone'] = git_status.is_not_fork(REPO_ORIGIN)
dict_git['is_working_directory_clean'] = git_status.is_working_directory_clean()
dict_git['current_commit'] = git_status.get_last_commit_id_from_local()
dict_git['last_remote_commit'] = git_status.get_last_commit_id_from_remote()
dict_git['last_local_tag'] = git_status.get_last_tag_from_local()
dict_git['last_remote_tag'] = git_status.get_last_tag_from_remote()
if dict_git['current_commit'] != dict_git['last_remote_commit']:
dict_git['new_git_update_available'] = True
else:
dict_git['new_git_update_available'] = False
if dict_git['last_local_tag'] != dict_git['last_remote_tag']:
dict_git['new_git_version_available'] = True
else:
dict_git['new_git_version_available'] = False
return dict_git
def get_update_metadata():
dict_update = {}
dict_update['current_version'] = r_serv_db.get('ail:version')
dict_update['current_background_update'] = r_serv_db.get('ail:current_background_update')
dict_update['update_in_progress'] = r_serv_db.get('ail:update_in_progress')
dict_update['update_error'] = r_serv_db.get('ail:update_error')
if dict_update['update_in_progress']:
dict_update['update_progression'] = r_serv_db.scard('ail:update_{}'.format(dict_update['update_in_progress']))
dict_update['update_nb'] = dict_update_description[dict_update['update_in_progress']]['nb_background_update']
dict_update['update_stat'] = int(dict_update['update_progression']*100/dict_update['update_nb'])
dict_update['current_background_script'] = r_serv_db.get('ail:current_background_script')
dict_update['current_background_script_stat'] = r_serv_db.get('ail:current_background_script_stat')
return dict_update
def get_user_metadata(user_id):
user_metadata = {}
user_metadata['email'] = user_id
user_metadata['role'] = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
user_metadata['api_key'] = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
return user_metadata
def get_users_metadata(list_users):
users = []
for user in list_users:
users.append(get_user_metadata(user))
return users
def get_all_users():
return r_serv_db.hkeys('user:all')
def get_all_roles():
return r_serv_db.zrange('ail:all_role', 0, -1)
# ============= ROUTES ==============
@settings.route("/settings/", methods=['GET'])
@login_required
@login_read_only
def settings_page():
git_metadata = get_git_metadata()
current_version = r_serv_db.get('ail:version')
update_metadata = get_update_metadata()
admin_level = current_user.is_in_role('admin')
return render_template("settings_index.html", git_metadata=git_metadata,
admin_level=admin_level,
current_version=current_version)
@settings.route("/settings/edit_profile", methods=['GET'])
@login_required
@login_read_only
def edit_profile():
user_metadata = get_user_metadata(current_user.get_id())
user_metadata = Users.get_user_metadata(current_user.get_id())
admin_level = current_user.is_in_role('admin')
return render_template("edit_profile.html", user_metadata=user_metadata,
admin_level=admin_level)
@ -127,7 +54,7 @@ def edit_profile():
@login_required
@login_user
def new_token():
generate_new_token(current_user.get_id())
Users.generate_new_token(current_user.get_id())
return redirect(url_for('settings.edit_profile'))
@settings.route("/settings/new_token_user", methods=['POST'])
@ -135,8 +62,8 @@ def new_token():
@login_admin
def new_token_user():
user_id = request.form.get('user_id')
if r_serv_db.exists('user_metadata:{}'.format(user_id)):
generate_new_token(user_id)
if Users.exists_user(user_id):
Users.generate_new_token(user_id)
return redirect(url_for('settings.users_list'))
@settings.route("/settings/create_user", methods=['GET'])
@ -151,7 +78,7 @@ def create_user():
role = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
else:
user_id = None
all_roles = get_all_roles()
all_roles = Users.get_all_roles()
return render_template("create_user.html", all_roles=all_roles, user_id=user_id, user_role=role,
error=error, error_mail=error_mail,
admin_level=True)
@ -165,14 +92,14 @@ def create_user_post():
password1 = request.form.get('password1')
password2 = request.form.get('password2')
all_roles = get_all_roles()
all_roles = Users.get_all_roles()
if email and len(email)< 300 and check_email(email) and role:
if role in all_roles:
# password set
if password1 and password2:
if password1==password2:
if check_password_strength(password1):
if Users.check_password_strength(password1):
password = password1
else:
return render_template("create_user.html", all_roles=all_roles, error="Incorrect Password", admin_level=True)
@ -180,21 +107,16 @@ def create_user_post():
return render_template("create_user.html", all_roles=all_roles, error="Passwords don't match", admin_level=True)
# generate password
else:
password = gen_password()
password = Users.gen_password()
if current_user.is_in_role('admin'):
# edit user
if r_serv_db.exists('user_metadata:{}'.format(email)):
if password1 and password2:
edit_user_db(email, password=password, role=role)
return redirect(url_for('settings.users_list', new_user=email, new_user_password=password, new_user_edited=True))
else:
edit_user_db(email, role=role)
return redirect(url_for('settings.users_list', new_user=email, new_user_password='Password not changed', new_user_edited=True))
# create user
else:
create_user_db(email, password, default=True, role=role)
return redirect(url_for('settings.users_list', new_user=email, new_user_password=password, new_user_edited=False))
str_password = password
if Users.exists_user(email):
if not password1 and not password2:
password = None
str_password = 'Password not changed'
Users.create_user(email, password=password, role=role)
return redirect(url_for('settings.users_list', new_user=email, new_user_password=str_password))
else:
return render_template("create_user.html", all_roles=all_roles, admin_level=True)
@ -205,7 +127,7 @@ def create_user_post():
@login_required
@login_admin
def users_list():
all_users = get_users_metadata(get_all_users())
all_users = Users.get_users_metadata(Users.get_all_users())
new_user = request.args.get('new_user')
new_user_dict = {}
if new_user:
@ -226,53 +148,10 @@ def edit_user():
@login_admin
def delete_user():
user_id = request.form.get('user_id')
delete_user_db(user_id)
Users.delete_user(user_id)
return redirect(url_for('settings.users_list'))
@settings.route("/settings/get_background_update_stats_json", methods=['GET'])
@login_required
@login_read_only
def get_background_update_stats_json():
# handle :end, error
update_stats = {}
current_update = r_serv_db.get('ail:current_background_update')
update_in_progress = r_serv_db.get('ail:update_in_progress')
if current_update:
update_stats['update_version']= current_update
update_stats['background_name']= r_serv_db.get('ail:current_background_script')
update_stats['background_stats']= r_serv_db.get('ail:current_background_script_stat')
if update_stats['background_stats'] is None:
update_stats['background_stats'] = 0
else:
update_stats['background_stats'] = int(update_stats['background_stats'])
update_progression = r_serv_db.scard('ail:update_{}'.format(current_update))
update_nb_scripts = dict_update_description[current_update]['nb_background_update']
update_stats['update_stat'] = int(update_progression*100/update_nb_scripts)
update_stats['update_stat_label'] = '{}/{}'.format(update_progression, update_nb_scripts)
if not update_in_progress:
update_stats['error'] = True
error_message = r_serv_db.get('ail:update_error')
if error_message:
update_stats['error_message'] = error_message
else:
update_stats['error_message'] = 'Please relaunch the bin/update-background.py script'
else:
if update_stats['background_name'] is None:
update_stats['error'] = True
update_stats['error_message'] = 'Please launch the bin/update-background.py script'
else:
update_stats['error'] = False
return jsonify(update_stats)
else:
return jsonify({})
@settings.route("/settings/passivedns", methods=['GET'])
@login_required
@login_read_only

View file

@ -1 +1 @@
<li id='page-hiddenServices'><a href="{{ url_for('settings.settings_page') }}"><i class="fa fa-cog"></i> Server Management </a></li>
<li id='page-hiddenServices'><a href="{{ url_for('settings_b.settings_page') }}"><i class="fa fa-cog"></i> Server Management </a></li>

View file

@ -31,7 +31,7 @@
<a class="nav-link" href="{{ url_for('trendingmodules.moduletrending') }}" aria-disabled="true"><i class="fas fa-chart-bar"></i> Statistics</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" id="page-options" href="{{ url_for('settings.settings_page') }}" aria-disabled="true"><i class="fas fa-cog"></i> Server Management</a>
<a class="nav-link" id="page-options" href="{{ url_for('settings_b.settings_page') }}" aria-disabled="true"><i class="fas fa-cog"></i> Server Management</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" id="page-logout" href="{{ url_for('root.logout') }}" aria-disabled="true"><i class="fas fa-sign-out-alt"></i> Log Out</a>

View file

@ -11,7 +11,7 @@
</h5>
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100"> <!--nav-pills-->
<li class="nav-item">
<a class="nav-link" href="{{url_for('settings.settings_page')}}" id="nav_server_status">
<a class="nav-link" href="{{url_for('settings_b.settings_page')}}" id="nav_server_status">
<i class="fas fa-tools"></i>
<span>Server Status</span>
</a>
@ -50,6 +50,15 @@
<span>AIL Configs</span>
</a>
</li> -->
<!-- <li class="nav-item">
<a class="nav-link" href="{{url_for('settings.passive_dns')}}" id="nav_data_retention">
<i class="fas fa-database"></i>
<i class="fas fa-hourglass"></i>
<i class="fas fa-clock"></i>
<i class="fas fa-stopwatch"></i>
<span>Data Retention</span>
</a>
</li> -->
<li class="nav-item">
<a class="nav-link" href="{{url_for('settings.passive_dns')}}" id="passive_dns">
<img src="{{ url_for('static', filename='image/d4-logo.png')}}" alt="D4 project" style="width:25px;">

View file

@ -0,0 +1,215 @@
<!DOCTYPE html>
<html>
<head>
<title>Server Management - AIL</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap4.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card mb-3 mt-1">
<div class="card-header text-white bg-dark pb-1">
<h5 class="card-title">AIL-framework Status :</h5>
</div>
<div class="card-body">
<div class="row">
<div class="col-xl-6">
<div class="card text-center border-secondary">
<div class="card-body px-1 py-0">
<table class="table table-sm">
<tbody>
<tr>
<td>AIL Version</td>
<td>{{ail_version}}<a target="_blank" href="https://github.com/ail-project/ail-framework/releases/tag/{{ail_version}}" class="text-info"><small> (release note)</small></a></td>
</tr>
<tr
{%if git_metadata['current_branch'] != 'master'%}
class="table-danger"
{%endif%}
>
<td>Current Branch</td>
<td>
{%if git_metadata['current_branch'] != 'master'%}
<i class="fas fa-times-circle text-danger" data-toggle="tooltip" data-placement="top" title="Please checkout the master branch"></i>&nbsp;
{%endif%}
{{git_metadata['current_branch']}}
</td>
</tr>
<tr
{%if git_metadata['new_git_update_available']%}
class="table-warning"
{%endif%}
>
<td>Current Commit ID</td>
<td>
{%if git_metadata['new_git_update_available']%}
<i class="fas fa-exclamation-triangle text-secondary" data-toggle="tooltip" data-placement="top" title="A New Update Is Available"></i>&nbsp;
{%endif%}
{{git_metadata['current_commit']}}
</td>
</tr>
<tr
{%if git_metadata['new_git_version_available']%}
class="table-danger"
{%endif%}
>
<td>Current Tag</td>
<td>
{%if git_metadata['new_git_version_available']%}
<i class="fas fa-exclamation-circle text-danger" data-toggle="tooltip" data-placement="top" title="A New Version Is Available"></i>&nbsp;&nbsp;
{%endif%}
{{git_metadata['last_local_tag']}}
</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
<div class="col-xl-6">
<div class="card text-center border-success" id="card_progress">
<div class="card-body" id="card_progress_body">
<h5 class="card-title">Backgroud Update: <span id="backgroud_update_version"></span></h5>
<div class="progress">
<div class="progress-bar bg-danger" role="progressbar" id="update_global_progress" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100"></div>
</div>
<hr class="my-1">
Updating: <strong id="backgroud_update_name"></strong> ...
<div class="progress">
<div class="progress-bar progress-bar-striped bg-warning progress-bar-animated" role="progressbar" id="update_background_progress" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100"></div>
</div>
<div class="text-danger" id="update_error_div">
<hr>
<h5 class="card-title"><i class="fas fa-times-circle text-danger"></i> Update Error:</h5>
<p id="update_error_mess"></p>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
{%if git_metadata['new_git_version_available']%}
<div class="alert alert-danger" role="alert">
<h4 class="alert-heading">New Version Available!</h4>
<hr class="my-0">
<p>A new version is available, new version: <strong>{{git_metadata['last_remote_tag']}}</strong></p>
<a target="_blank" href="https://github.com/CIRCL/AIL-framework/releases/tag/{{git_metadata['last_remote_tag']}}"> Check last release note.</a>
</div>
{%endif%}
{%if git_metadata['new_git_update_available']%}
<div class="alert alert-warning" role="alert">
<h4 class="alert-heading">New Update Available!</h4>
<hr class="my-0">
<p>A new update is available, new commit ID: <strong>{{git_metadata['last_remote_commit']}}</strong></p>
<a target="_blank" href="https://github.com/CIRCL/AIL-framework/commit/{{git_metadata['last_remote_commit']}}"> Check last commit content.</a>
</div>
{%endif%}
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$("#nav_server_status").addClass("active");
$("#nav_server").removeClass("text-muted");
$( window ).on("focus", function() {
to_refresh = true
refresh_update_status();
});
$( window ).on("blur", function() {
to_refresh = false
});
to_refresh = true
refresh_update_status();
} );
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
function refresh_update_status(){
$.getJSON("{{ url_for('settings_b.get_background_update_metadata_json') }}", function(data){
if(! jQuery.isEmptyObject(data)){
$('#card_progress').show();
$('#backgroud_update_version').text(data['version']);
$('#backgroud_update_name').text(data['script']);
let progress_label = data['nb_completed'] + "/" + data['nb_update']
$('#update_global_progress').attr('aria-valuenow', data['progress']).width(data['progress']+'%').text(progress_label);
$('#update_background_progress').attr('aria-valuenow', data['script_progress']).width(data['script_progress']+'%').text(data['script_progress']+'%');
if(data['error']){
$('#update_error_div').show();
$('#update_error_mess').text(data['error']);
$('#card_progress').removeClass("border-success");
$('#card_progress').addClass("border-danger");
} else {
$('#update_error_div').hide();
$('#card_progress').removeClass("border-danger");
$('#card_progress').add("border-success");
}
} else {
$('#card_progress').hide();
clearInterval(progress_interval);
}
});
if (to_refresh) {
setTimeout("refresh_crawler_status()", 10000);
}
}
update_progress();
//Interval
var progress_interval = setInterval(function(){
refresh_update_status()
}, 4000);
</script>
</html>