Merge branch 'dev' into master

This commit is contained in:
Thirion Aurélien 2021-11-26 16:41:33 +01:00 committed by GitHub
commit 7346f81460
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 3495 additions and 34 deletions

View file

@ -10,8 +10,9 @@ AIL
<td>Latest Release</td>
<td><a href="https://github.com/ail-project/ail-framework/releases/latest"><img src="https://img.shields.io/github/release/ail-project/ail-framework/all.svg"></a></td>
</tr>
<td>Travis</td>
<td><a href="https://travis-ci.com/ail-project/ail-framework"><img src="https://travis-ci.com/ail-project/ail-framework.svg?branch=master" /></a></td>
<tr>
<td>CI</td>
<td><a href="https://github.com/CIRCL/AIL-framework/actions/workflows/ail_framework_test.yml"><img src="https://github.com/CIRCL/AIL-framework/actions/workflows/ail_framework_test.yml/badge.svg"></a></td>
</tr>
<tr>
<td>Gitter</td>

View file

@ -44,6 +44,8 @@ isredis=`screen -ls | egrep '[0-9]+.Redis_AIL' | cut -d. -f1`
isardb=`screen -ls | egrep '[0-9]+.ARDB_AIL' | cut -d. -f1`
islogged=`screen -ls | egrep '[0-9]+.Logging_AIL' | cut -d. -f1`
isqueued=`screen -ls | egrep '[0-9]+.Queue_AIL' | cut -d. -f1`
is_ail_core=`screen -ls | egrep '[0-9]+.Core_AIL' | cut -d. -f1`
is_ail_2_ail=`screen -ls | egrep '[0-9]+.AIL_2_AIL' | cut -d. -f1`
isscripted=`screen -ls | egrep '[0-9]+.Script_AIL' | cut -d. -f1`
isflasked=`screen -ls | egrep '[0-9]+.Flask_AIL' | cut -d. -f1`
iscrawler=`screen -ls | egrep '[0-9]+.Crawler_AIL' | cut -d. -f1`
@ -145,11 +147,25 @@ function launching_scripts {
screen -dmS "Script_AIL"
sleep 0.1
echo -e $GREEN"\t* Launching scripts"$DEFAULT
##################################
# CORE MODULES #
##################################
# screen -dmS "Core_AIL"
# sleep 0.1
echo -e $GREEN"\t* Launching core scripts ..."$DEFAULT
# TODO: MOOVE IMPORTER ???? => multiple scripts
#### SYNC ####
screen -S "Script_AIL" -X screen -t "Sync_importer" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./Sync_importer.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "ail_2_ail_server" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./ail_2_ail_server.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Sync_manager" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./Sync_manager.py; read x"
sleep 0.1
##-- SYNC --##
screen -S "Script_AIL" -X screen -t "JSON_importer" bash -c "cd ${AIL_BIN}/import; ${ENV_PY} ./JSON_importer.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Crawler_manager" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./Crawler_manager.py; read x"
@ -165,6 +181,10 @@ function launching_scripts {
##################################
# MODULES #
##################################
# screen -dmS "Script_AIL"
# sleep 0.1
echo -e $GREEN"\t* Launching scripts"$DEFAULT
screen -S "Script_AIL" -X screen -t "Global" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Global.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Categ" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Categ.py; read x"
@ -176,6 +196,9 @@ function launching_scripts {
screen -S "Script_AIL" -X screen -t "SubmitPaste" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./submit_paste.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Sync_module" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./Sync_module.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "ApiKey" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./ApiKey.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "Credential" bash -c "cd ${AIL_BIN}/modules; ${ENV_PY} ./Credential.py; read x"
@ -366,7 +389,7 @@ function launch_queues {
}
function launch_scripts {
if [[ ! $isscripted ]]; then
if [[ ! $isscripted ]]; then ############################# is core
sleep 1
if checking_ardb && checking_redis; then
launching_scripts;
@ -414,19 +437,19 @@ function launch_feeder {
}
function killscript {
if [[ $islogged || $isqueued || $isscripted || $isflasked || $isfeeded || $iscrawler ]]; then
if [[ $islogged || $isqueued || $is_ail_core || $isscripted || $isflasked || $isfeeded || $iscrawler ]]; then
echo -e $GREEN"Killing Script"$DEFAULT
kill $islogged $isqueued $isscripted $isflasked $isfeeded $iscrawler
kill $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler
sleep 0.2
echo -e $ROSE`screen -ls`$DEFAULT
echo -e $GREEN"\t* $islogged $isqueued $isscripted $isflasked $isfeeded $iscrawler killed."$DEFAULT
echo -e $GREEN"\t* $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler killed."$DEFAULT
else
echo -e $RED"\t* No script to kill"$DEFAULT
fi
}
function killall {
if [[ $isredis || $isardb || $islogged || $isqueued || $isscripted || $isflasked || $isfeeded || $iscrawler ]]; then
if [[ $isredis || $isardb || $islogged || $isqueued || $is_ail_2_ail || $isscripted || $isflasked || $isfeeded || $iscrawler || $is_ail_core ]]; then
if [[ $isredis ]]; then
echo -e $GREEN"Gracefully closing redis servers"$DEFAULT
shutting_down_redis;
@ -437,10 +460,10 @@ function killall {
shutting_down_ardb;
fi
echo -e $GREEN"Killing all"$DEFAULT
kill $isredis $isardb $islogged $isqueued $isscripted $isflasked $isfeeded $iscrawler
kill $isredis $isardb $islogged $isqueued $is_ail_core $isscripted $isflasked $isfeeded $iscrawler $is_ail_2_ail
sleep 0.2
echo -e $ROSE`screen -ls`$DEFAULT
echo -e $GREEN"\t* $isredis $isardb $islogged $isqueued $isscripted $isflasked $isfeeded $iscrawler killed."$DEFAULT
echo -e $GREEN"\t* $isredis $isardb $islogged $isqueued $isscripted $is_ail_2_ail $isflasked $isfeeded $iscrawler $is_ail_core killed."$DEFAULT
else
echo -e $RED"\t* No screen to kill"$DEFAULT
fi

89
bin/core/Sync_importer.py Executable file
View file

@ -0,0 +1,89 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The SYNC Module
================================
This module .
"""
##################################
# Import External packages
##################################
import json
import os
import sys
import time
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from core import ail_2_ail
from modules.abstract_module import AbstractModule
from packages.Item import Item
from packages import Tag
class Sync_importer(AbstractModule):
"""
Tags module for AIL framework
"""
def __init__(self):
super(Sync_importer, self).__init__()
# Waiting time in secondes between to message proccessed
self.pending_seconds = 10
#self.dict_ail_sync_filters = ail_2_ail.get_all_sync_queue_dict()
#self.last_refresh = time.time()
# Send module state to logs
self.redis_logger.info(f'Module {self.module_name} Launched')
def run(self):
while self.proceed:
### REFRESH DICT
# if self.last_refresh < ail_2_ail.get_last_updated_ail_instance():
# self.dict_ail_sync_filters = ail_2_ail.get_all_sync_queue_dict()
# self.last_refresh = time.time()
ail_stream = ail_2_ail.get_sync_importer_ail_stream()
if ail_stream:
ail_stream = json.loads(ail_stream)
self.compute(ail_stream)
else:
self.computeNone()
# Wait before next process
self.redis_logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s")
time.sleep(self.pending_seconds)
def compute(self, ail_stream):
# # TODO: SANITYZE AIL STREAM
# # TODO: CHECK FILTER
# import Object
b64_gzip_content = ail_stream['payload']['raw']
# # TODO: create default id
item_id = ail_stream['meta']['ail:id'] + 'test'
message = f'{item_id} {b64_gzip_content}'
print(message)
self.send_message_to_queue(message, 'Mixer')
# # increase nb of paste by feeder name
# server_cache.hincrby("mixer_cache:list_feeder", Sync, 1)
if __name__ == '__main__':
module = Sync_importer()
module.run()

22
bin/core/Sync_manager.py Executable file
View file

@ -0,0 +1,22 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import time
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import ail_2_ail
# # TODO: lauch me in core screen
if __name__ == '__main__':
Client_Manager = ail_2_ail.AIL2AILClientManager()
while True:
command = Client_Manager.get_manager_command()
if command:
Client_Manager.execute_manager_command(command)
else:
time.sleep(5)

96
bin/core/Sync_module.py Executable file
View file

@ -0,0 +1,96 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The SYNC Module
================================
This module .
"""
##################################
# Import External packages
##################################
import os
import sys
import time
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from core import ail_2_ail
from modules.abstract_module import AbstractModule
from packages.Item import Item
from packages import Tag
class Sync_module(AbstractModule):
"""
Sync_module module for AIL framework
"""
def __init__(self):
super(Sync_module, self).__init__()
# Waiting time in secondes between to message proccessed
self.pending_seconds = 10
self.dict_sync_queues = ail_2_ail.get_all_sync_queue_dict()
self.last_refresh = time.time()
print(self.dict_sync_queues)
# Send module state to logs
self.redis_logger.info(f'Module {self.module_name} Launched')
def compute(self, message):
print(message)
### REFRESH DICT
if self.last_refresh < ail_2_ail.get_last_updated_sync_config():
self.last_refresh = time.time()
self.dict_sync_queues = ail_2_ail.get_all_sync_queue_dict()
print('sync queues refreshed')
print(self.dict_sync_queues)
# Extract object from message
# # TODO: USE JSON DICT ????
mess_split = message.split(';')
if len(mess_split) == 3:
obj_type = mess_split[0]
obj_subtype = mess_split[1]
obj_id = mess_split[2]
# OBJECT => Item
if obj_type == 'item':
obj = Item(obj_id)
tags = obj.get_tags(r_set=True)
# check filter + tags
for queue_uuid in self.dict_sync_queues:
filter_tags = self.dict_sync_queues[queue_uuid]['filter']
print(tags)
print(filter_tags)
print(tags.issubset(filter_tags))
if filter_tags and tags:
if tags.issubset(filter_tags):
obj_dict = obj.get_default_meta()
# send to queue push and/or pull
for dict_ail in self.dict_sync_queues[queue_uuid]['ail_instances']:
ail_2_ail.add_object_to_sync_queue(queue_uuid, dict_ail['ail_uuid'], obj_dict,
push=dict_ail['push'], pull=dict_ail['pull'])
else:
# Malformed message
raise Exception(f'too many values to unpack (expected 3) given {len(mess_split)} with message {message}')
if __name__ == '__main__':
module = Sync_module()
module.run()

883
bin/core/ail_2_ail.py Executable file
View file

@ -0,0 +1,883 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import json
import secrets
import re
import sys
import time
import uuid
import subprocess
from flask import escape
from pubsublogger import publisher
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'core/'))
import screen
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
from Item import Item
import Tag
config_loader = ConfigLoader.ConfigLoader()
r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_sync = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
#### LOGS ####
# redis_logger = publisher
# redis_logger.port = 6380
# redis_logger.channel = 'AIL_SYNC'
##-- LOGS --##
def is_valid_uuid_v4(UUID):
if not UUID:
return False
UUID = UUID.replace('-', '')
try:
uuid_test = uuid.UUID(hex=UUID, version=4)
return uuid_test.hex == UUID
except:
return False
def sanityze_uuid(UUID):
sanityzed_uuid = uuid.UUID(hex=UUID, version=4)
return str(sanityzed_uuid)
def generate_uuid():
return str(uuid.uuid4()).replace('-', '')
def generate_sync_api_key():
return secrets.token_urlsafe(42)
def get_ail_uuid():
return r_serv_db.get('ail:uuid')
def get_sync_server_version():
return '0.1'
def is_valid_websocket_url(websocket_url):
regex_websocket_url = r'^(wss:\/\/)([0-9]{1,3}(?:\.[0-9]{1,3}){3}|(?=[^\/]{1,254}(?![^\/]))(?:(?=[a-zA-Z0-9-]{1,63}\.?)(?:xn--+)?[a-zA-Z0-9]+(?:-[a-zA-Z0-9]+)*\.?)+[a-zA-Z]{2,63}):([0-9]{1,5})$'
if re.match(regex_websocket_url, websocket_url):
return True
return False
def is_valid_websocket_key(ail_key):
regex_key = r'^[A-Za-z0-9-_]{56}$'
if re.match(regex_key, ail_key):
return True
return False
#### HANDLE CONFIG UPDATE ####
def get_last_updated_sync_config():
epoch = r_serv_sync.get(f'ail:instance:queue:last_updated_sync_config')
if not epoch:
epoch = 0
return float(epoch)
def set_last_updated_sync_config():
epoch = int(time.time())
r_serv_sync.set(f'ail:instance:queue:last_updated_sync_config', epoch)
return epoch
# # TODO: get connection status
# # TODO: get connection METADATA
#############################
# #
#### SYNC CLIENT MANAGER ####
def get_all_sync_clients(r_set=False):
res = r_cache.smembers('ail_2_ail:all_sync_clients')
if r_set:
return set(res)
else:
return res
def get_sync_client_ail_uuid(client_id):
return r_cache.hget(f'ail_2_ail:sync_client:{client_id}', 'ail_uuid')
# current: only one push registred
def get_client_id_by_ail_uuid(ail_uuid):
res = r_cache.smembers(f'ail_2_ail:ail_uuid:{ail_uuid}')
if res:
return int(res.pop())
def get_all_running_sync_servers():
running_ail_servers= []
for client_id in get_all_sync_clients():
ail_uuid = get_sync_client_ail_uuid(client_id)
running_ail_servers.append(ail_uuid)
return running_ail_servers
def delete_sync_client_cache(client_id):
ail_uuid = get_sync_client_ail_uuid(client_id)
# map ail_uuid/queue_uuid
r_cache.srem(f'ail_2_ail:ail_uuid:{ail_uuid}', client_id)
r_cache.srem(f'ail_2_ail:queue_uuid:{queue_uuid}', client_id)
r_cache.delete(f'ail_2_ail:sync_client:{client_id}')
r_cache.srem('ail_2_ail:all_sync_clients', client_id)
def delete_all_sync_clients_cache():
for client_id in get_all_sync_clients():
delete_sync_client_cache(client_id)
r_cache.delete('ail_2_ail:all_sync_clients')
# command: -launch
# -kill
# -relaunch
## TODO: check command
def send_command_to_manager(command, client_id=-1, ail_uuid=None):
dict_action = {'command': command, 'client_id': client_id}
if ail_uuid:
dict_action['ail_uuid'] = ail_uuid
str_command = json.dumps(dict_action)
r_cache.sadd('ail_2_ail:client_manager:command', str_command)
def refresh_ail_instance_connection(ail_uuid):
client_id = get_client_id_by_ail_uuid(ail_uuid)
launch_required = is_ail_instance_push_enabled(ail_uuid)
# relaunch
if client_id and launch_required:
send_command_to_manager('relaunch', client_id=client_id)
# kill
elif client_id:
send_command_to_manager('kill', client_id=client_id)
# launch
elif launch_required:
send_command_to_manager('launch', ail_uuid=ail_uuid)
class AIL2AILClientManager(object):
"""AIL2AILClientManager."""
SCREEN_NAME = 'AIL_2_AIL'
SCRIPT_NAME = 'ail_2_ail_client.py'
SCRIPT_DIR = os.path.join(os.environ['AIL_BIN'], 'core')
def __init__(self):
# dict client_id: AIL2AILCLIENT or websocket
self.clients = {}
# launch all sync clients
self.relaunch_all_sync_clients()
def get_all_clients(self):
return self.clients
# return new client id
def get_new_sync_client_id(self):
for new_id in range(1, 100000):
new_id = str(new_id)
if new_id not in self.clients:
return str(new_id)
def get_sync_client_ail_uuid(self, client_id):
return self.clients[client_id]['ail_uuid']
# def get_sync_client_queue_uuid(self, client_id):
# return self.clients[client_id]['queue_uuid']
def get_all_sync_clients_to_launch(self):
ail_instances_to_launch = []
for ail_uuid in get_all_ail_instance():
if is_ail_instance_push_enabled(ail_uuid):
ail_instances_to_launch.append(ail_uuid)
return ail_instances_to_launch
def relaunch_all_sync_clients(self):
delete_all_sync_clients_cache()
self.clients = {}
for ail_uuid in self.get_all_sync_clients_to_launch():
self.launch_sync_client(ail_uuid)
def launch_sync_client(self, ail_uuid):
dir_project = os.environ['AIL_HOME']
client_id = self.get_new_sync_client_id()
script_options = f'-u {ail_uuid} -m push -i {client_id}'
screen.create_screen(AIL2AILClientManager.SCREEN_NAME)
screen.launch_uniq_windows_script(AIL2AILClientManager.SCREEN_NAME,
client_id, dir_project,
AIL2AILClientManager.SCRIPT_DIR,
AIL2AILClientManager.SCRIPT_NAME,
script_options=script_options, kill_previous_windows=True)
# save sync client status
r_cache.hset(f'ail_2_ail:sync_client:{client_id}', 'ail_uuid', ail_uuid)
r_cache.hset(f'ail_2_ail:sync_client:{client_id}', 'launch_time', int(time.time()))
r_cache.sadd('ail_2_ail:all_sync_clients', client_id)
# create map ail_uuid/queue_uuid
r_cache.sadd(f'ail_2_ail:ail_uuid:{ail_uuid}', client_id)
self.clients[client_id] = {'ail_uuid': ail_uuid}
# # TODO: FORCE KILL ????????????
# # TODO: check if exists
def kill_sync_client(self, client_id):
if not screen.kill_screen_window('AIL_2_AIL',client_id):
# # TODO: log kill error
pass
delete_sync_client_cache(client_id)
self.clients.pop(client_id)
## COMMANDS ##
def get_manager_command(self):
res = r_cache.spop('ail_2_ail:client_manager:command')
if res:
return json.loads(res)
else:
return None
def execute_manager_command(self, command_dict):
command = command_dict.get('command')
if command == 'launch':
ail_uuid = command_dict.get('ail_uuid')
self.launch_sync_client(ail_uuid)
elif command == 'relaunch_all':
self.relaunch_all_sync_clients()
else:
# only one sync client
client_id = int(command_dict.get('client_id'))
if client_id < 1:
print('Invalid client id')
return None
client_id = str(client_id)
if command == 'kill':
self.kill_sync_client(client_id)
elif command == 'relaunch':
ail_uuid = self.get_sync_client_ail_uuid(client_id)
self.kill_sync_client(client_id)
self.launch_sync_client(ail_uuid)
########################################
########################################
########################################
# # TODO: ADD METADATA
def get_sync_client_status(client_id):
dict_client = {'id': client_id}
dict_client['ail_uuid'] = get_sync_client_ail_uuid(client_id)
return dict_client
def get_all_sync_client_status():
sync_clients = []
all_sync_clients = r_cache.smembers('ail_2_ail:all_sync_clients')
for client_id in all_sync_clients:
sync_clients.append(get_sync_client_status(client_id))
return sync_clients
######################
# #
#### AIL INSTANCE ####
## AIL KEYS ##
def get_all_ail_instance_keys():
return r_serv_sync.smembers(f'ail:instance:key:all')
def is_allowed_ail_instance_key(key):
return r_serv_sync.sismember(f'ail:instance:key:all', key)
def get_ail_instance_key(ail_uuid):
return r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'api_key')
def get_ail_instance_by_key(key):
return r_serv_sync.get(f'ail:instance:key:{key}')
# def check_acl_sync_queue_ail(ail_uuid, queue_uuid, key):
# return is_ail_instance_queue(ail_uuid, queue_uuid)
def update_ail_instance_key(ail_uuid, new_key):
old_key = get_ail_instance_key(ail_uuid)
r_serv_sync.srem(f'ail:instance:key:all', old_key)
r_serv_sync.delete(f'ail:instance:key:{old_key}')
r_serv_sync.sadd(f'ail:instance:key:all', new_key)
r_serv_sync.delete(f'ail:instance:key:{new_key}', ail_uuid)
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'api_key', new_key)
#- AIL KEYS -#
def get_all_ail_instance():
return r_serv_sync.smembers('ail:instance:all')
def get_ail_instance_all_sync_queue(ail_uuid):
return r_serv_sync.smembers(f'ail:instance:sync_queue:{ail_uuid}')
def is_ail_instance_queue(ail_uuid, queue_uuid):
return r_serv_sync.sismember(f'ail:instance:sync_queue:{ail_uuid}', queue_uuid)
def exists_ail_instance(ail_uuid):
return r_serv_sync.exists(f'ail:instance:{ail_uuid}')
def get_ail_instance_url(ail_uuid):
return r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'url')
def get_ail_instance_description(ail_uuid):
return r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'description')
def exists_ail_instance(ail_uuid):
return r_serv_sync.sismember('ail:instance:all', ail_uuid)
def is_ail_instance_push_enabled(ail_uuid):
res = r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'push')
return res == 'True'
def is_ail_instance_pull_enabled(ail_uuid):
res = r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'pull')
return res == 'True'
def is_ail_instance_sync_enabled(ail_uuid, sync_mode=None):
if sync_mode is None:
return is_ail_instance_push_enabled(ail_uuid) or is_ail_instance_pull_enabled(ail_uuid)
elif sync_mode == 'pull':
return is_ail_instance_pull_enabled(ail_uuid)
elif sync_mode == 'push':
return is_ail_instance_push_enabled(ail_uuid)
else:
return False
def change_pull_push_state(ail_uuid, pull=False, push=False):
# sanityze pull/push
if pull:
pull = True
else:
pull = False
if push:
push = True
else:
push = False
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'push', push)
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'pull', pull)
set_last_updated_sync_config()
refresh_ail_instance_connection(ail_uuid)
def get_ail_server_version(ail_uuid):
return r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'version')
def get_ail_server_ping(ail_uuid):
res = r_serv_sync.hget(f'ail:instance:{ail_uuid}', 'ping')
return res == 'True'
def get_ail_server_error(ail_uuid):
return r_cache.hget(f'ail_2_ail:all_servers:metadata:{ail_uuid}', 'error')
# # TODO: HIDE ADD GLOBAL FILTER (ON BOTH SIDE)
def get_ail_instance_metadata(ail_uuid, sync_queues=False):
dict_meta = {}
dict_meta['uuid'] = ail_uuid
dict_meta['url'] = get_ail_instance_url(ail_uuid)
dict_meta['description'] = get_ail_instance_description(ail_uuid)
dict_meta['pull'] = is_ail_instance_pull_enabled(ail_uuid)
dict_meta['push'] = is_ail_instance_pull_enabled(ail_uuid)
dict_meta['ping'] = get_ail_server_ping(ail_uuid)
dict_meta['version'] = get_ail_server_version(ail_uuid)
dict_meta['error'] = get_ail_server_error(ail_uuid)
# # TODO: HIDE
dict_meta['api_key'] = get_ail_instance_key(ail_uuid)
if sync_queues:
dict_meta['sync_queues'] = get_ail_instance_all_sync_queue(ail_uuid)
# # TODO:
# - set UUID sync_queue
return dict_meta
def get_all_ail_instances_metadata():
l_servers = []
for ail_uuid in get_all_ail_instance():
l_servers.append(get_ail_instance_metadata(ail_uuid, sync_queues=True))
return l_servers
def get_ail_instances_metadata(l_ail_servers):
l_servers = []
for ail_uuid in l_ail_servers:
l_servers.append(get_ail_instance_metadata(ail_uuid, sync_queues=True))
return l_servers
# # TODO: VALIDATE URL
# API KEY
def create_ail_instance(ail_uuid, url, api_key=None, description=None, pull=True, push=True):
r_serv_sync.sadd('ail:instance:all', ail_uuid)
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'url', url)
## API KEY ##
if not api_key:
api_key = generate_sync_api_key()
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'api_key', api_key)
r_serv_sync.sadd('ail:instance:key:all', api_key)
r_serv_sync.set(f'ail:instance:key:{api_key}', ail_uuid)
#- API KEY -#
if description:
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'description', description)
change_pull_push_state(ail_uuid, pull=pull, push=push)
set_last_updated_sync_config()
refresh_ail_instance_connection(ail_uuid)
return ail_uuid
def delete_ail_instance(ail_uuid):
for queue_uuid in get_ail_instance_all_sync_queue(ail_uuid):
unregister_ail_to_sync_queue(ail_uuid, queue_uuid)
r_serv_sync.delete(f'ail:instance:sync_queue:{ail_uuid}')
key = get_ail_instance_by_key(ail_uuid)
r_serv_sync.delete(f'ail:instance:{ail_uuid}')
r_serv_sync.srem('ail:instance:key:all', ail_uuid)
r_serv_sync.delete(f'ail:instance:key:{key}', ail_uuid)
r_serv_sync.srem('ail:instance:all', ail_uuid)
set_last_updated_sync_config()
refresh_ail_instance_connection(ail_uuid)
return ail_uuid
## WEBSOCKET API - ERRORS ##
def set_ail_server_version(ail_uuid, version):
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'version', version)
def set_ail_server_ping(ail_uuid, pong):
r_serv_sync.hset(f'ail:instance:{ail_uuid}', 'ping', bool(pong))
def save_ail_server_error(ail_uuid, error_message):
r_cache.hset(f'ail_2_ail:all_servers:metadata:{ail_uuid}', 'error', error_message)
def clear_save_ail_server_error(ail_uuid):
r_cache.hdel(f'ail_2_ail:all_servers:metadata:{ail_uuid}', 'error')
def _get_remote_ail_server_response(ail_uuid, api_request):
websocket_client = os.path.join(os.environ['AIL_BIN'], 'core', 'ail_2_ail_client.py')
l_command = ['python', websocket_client, '-u', ail_uuid, '-m', 'api', '-a', api_request]
process = subprocess.Popen(l_command, stdout=subprocess.PIPE)
while process.poll() is None:
time.sleep(1)
if process.returncode == 0:
# Scrapy-Splash ERRORS
if process.stderr:
stderr = process.stderr.read().decode()
if stderr:
print(f'stderr: {stderr}')
if process.stdout:
output = process.stdout.read().decode()
#print(output)
if output:
try:
message = json.loads(output)
return message
except Exception as e:
print(e)
error = f'Error: {e}'
save_ail_server_error(ail_uuid, error)
return
# ERROR
else:
if process.stderr:
stderr = process.stderr.read().decode()
else:
stderr = ''
if process.stdout:
stdout = process.stdout.read().decode()
else:
stdout =''
if stderr or stdout:
error = f'-stderr-\n{stderr}\n-stdout-\n{stdout}'
print(error)
save_ail_server_error(ail_uuid, error)
return
def get_remote_ail_server_version(ail_uuid):
response = _get_remote_ail_server_response(ail_uuid, 'version')
if response:
version = response.get('version')
if version:
version = float(version)
if version >= 0.1:
set_ail_server_version(ail_uuid, version)
return version
# # TODO: CATCH WEBSOCKETS RESPONSE CODE
def ping_remote_ail_server(ail_uuid):
response = _get_remote_ail_server_response(ail_uuid, 'ping')
if response:
response = response.get('message', False)
pong = response == 'pong'
set_ail_server_ping(ail_uuid, pong)
return pong
## API ##
def api_ping_remote_ail_server(json_dict):
ail_uuid = json_dict.get('uuid').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid ail uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
if not exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL server not found"}, 404
res = ping_remote_ail_server(ail_uuid)
return res, 200
def api_get_remote_ail_server_version(json_dict):
ail_uuid = json_dict.get('uuid').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid ail uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
if not exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL server not found"}, 404
res = get_remote_ail_server_version(ail_uuid)
return res, 200
def api_create_ail_instance(json_dict):
ail_uuid = json_dict.get('uuid').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid ail uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
if exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL uuid already exists"}, 400
if json_dict.get('pull'):
pull = True
else:
pull = False
if json_dict.get('push'):
push = True
else:
push = False
description = json_dict.get('description')
ail_url = json_dict.get('url').replace(' ', '')
if not is_valid_websocket_url(ail_url):
return {"status": "error", "reason": "Invalid websocket url"}, 400
ail_key = json_dict.get('key')
if ail_key:
ail_key = ail_key.replace(' ', '')
if not is_valid_websocket_key(ail_key):
return {"status": "error", "reason": "Invalid websocket key"}, 400
res = create_ail_instance(ail_uuid, ail_url, api_key=ail_key, description=description,
pull=pull, push=push)
return res, 200
def api_delete_ail_instance(json_dict):
ail_uuid = json_dict.get('uuid', '').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid AIL uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
if not exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL server not found"}, 404
res = delete_ail_instance(ail_uuid)
return res, 200
####################
# #
#### SYNC QUEUE ####
class Sync_Queue(object): # # TODO: use for edit
"""Sync_Queue."""
def __init__(self, uuid):
self.uuid = uuid
def get_all_sync_queue():
return r_serv_sync.smembers('ail2ail:sync_queue:all')
def get_sync_queue_all_ail_instance(queue_uuid):
return r_serv_sync.smembers(f'ail2ail:sync_queue:ail_instance:{queue_uuid}')
def exists_sync_queue(queue_uuid):
return r_serv_sync.exists(f'ail2ail:sync_queue:{queue_uuid}')
# # TODO: check if push or pull enabled ?
def is_queue_used_by_ail_instance(queue_uuid):
return r_serv_sync.exists(f'ail2ail:sync_queue:ail_instance:{queue_uuid}')
# # TODO: add others filter
def get_sync_queue_filter(queue_uuid):
return r_serv_sync.smembers(f'ail2ail:sync_queue:filter:tags:{queue_uuid}')
def get_sync_queue_name(queue_uuid):
return r_serv_sync.hget(f'ail2ail:sync_queue:{queue_uuid}', 'name')
def get_sync_queue_description(queue_uuid):
return r_serv_sync.hget(f'ail2ail:sync_queue:{queue_uuid}', 'description')
def get_sync_queue_max_size(queue_uuid):
return r_serv_sync.hget(f'ail2ail:sync_queue:{queue_uuid}', 'max_size')
# # TODO: ADD FILTER
def get_sync_queue_metadata(queue_uuid):
dict_meta = {}
dict_meta['uuid'] = queue_uuid
dict_meta['name'] = get_sync_queue_name(queue_uuid)
dict_meta['description'] = get_sync_queue_description(queue_uuid)
dict_meta['max_size'] = get_sync_queue_max_size(queue_uuid)
dict_meta['tags'] = get_sync_queue_filter(queue_uuid)
# # TODO: TO ADD:
# - get uuid instance
return dict_meta
def get_all_queues_metadata():
l_queues = []
for queue_uuid in get_all_sync_queue():
l_queues.append(get_sync_queue_metadata(queue_uuid))
return l_queues
def get_queues_metadata(l_queues_uuid):
l_queues = []
for queue_uuid in l_queues_uuid:
l_queues.append(get_sync_queue_metadata(queue_uuid))
return l_queues
#####################################################
def get_all_sync_queue_dict():
dict_sync_queues = {}
for queue_uuid in get_all_sync_queue():
if is_queue_used_by_ail_instance(queue_uuid):
dict_queue = {}
dict_queue['filter'] = get_sync_queue_filter(queue_uuid)
dict_queue['ail_instances'] = [] ############ USE DICT ?????????
for ail_uuid in get_sync_queue_all_ail_instance(queue_uuid):
dict_ail = {'ail_uuid': ail_uuid,
'pull': is_ail_instance_pull_enabled(ail_uuid),
'push': is_ail_instance_push_enabled(ail_uuid)}
if dict_ail['pull'] or dict_ail['push']:
dict_queue['ail_instances'].append(dict_ail)
if dict_queue['ail_instances']:
dict_sync_queues[queue_uuid] = dict_queue
return dict_sync_queues
def is_queue_registred_by_ail_instance(queue_uuid, ail_uuid):
return r_serv_sync.sismember(f'ail:instance:sync_queue:{ail_uuid}', queue_uuid)
def register_ail_to_sync_queue(ail_uuid, queue_uuid):
r_serv_sync.sadd(f'ail2ail:sync_queue:ail_instance:{queue_uuid}', ail_uuid)
r_serv_sync.sadd(f'ail:instance:sync_queue:{ail_uuid}', queue_uuid)
set_last_updated_sync_config()
# # # FIXME: TODO: delete sync queue ????????????????????????????????????????????????????
def unregister_ail_to_sync_queue(ail_uuid, queue_uuid):
r_serv_sync.srem(f'ail2ail:sync_queue:ail_instance:{queue_uuid}', ail_uuid)
r_serv_sync.srem(f'ail:instance:sync_queue:{ail_uuid}', queue_uuid)
set_last_updated_sync_config()
def get_all_unregistred_queue_by_ail_instance(ail_uuid):
return r_serv_sync.sdiff('ail2ail:sync_queue:all', f'ail:instance:sync_queue:{ail_uuid}')
# # TODO: optionnal name ???
# # TODO: SANITYZE TAGS
def create_sync_queue(name, tags=[], description=None, max_size=100):
queue_uuid = generate_uuid()
r_serv_sync.sadd('ail2ail:sync_queue:all', queue_uuid)
r_serv_sync.hset(f'ail2ail:sync_queue:{queue_uuid}', 'name', name)
if description:
r_serv_sync.hset(f'ail2ail:sync_queue:{queue_uuid}', 'description', description)
r_serv_sync.hset(f'ail2ail:sync_queue:{queue_uuid}', 'max_size', max_size)
for tag in tags:
r_serv_sync.sadd(f'ail2ail:sync_queue:filter:tags:{queue_uuid}', tag)
set_last_updated_sync_config()
return queue_uuid
def delete_sync_queue(queue_uuid):
for ail_uuid in get_sync_queue_all_ail_instance(queue_uuid):
unregister_ail_to_sync_queue(ail_uuid, queue_uuid)
r_serv_sync.delete(f'ail2ail:sync_queue:{queue_uuid}')
r_serv_sync.delete(f'ail2ail:sync_queue:filter:tags:{queue_uuid}')
r_serv_sync.srem('ail2ail:sync_queue:all', queue_uuid)
set_last_updated_sync_config()
return queue_uuid
## API ##
# # TODO: sanityze queue_name
def api_create_sync_queue(json_dict):
description = json_dict.get('description')
description = escape(description)
queue_name = json_dict.get('name')
if queue_name: #################################################
queue_name = escape(queue_name)
tags = json_dict.get('tags')
if not tags:
{"status": "error", "reason": "no tags provided"}, 400
if not Tag.are_enabled_tags(tags):
{"status": "error", "reason": "Invalid/Disabled tags"}, 400
max_size = json_dict.get('max_size')
if not max_size:
max_size = 100
try:
max_size = int(max_size)
except ValueError:
{"status": "error", "reason": "Invalid queue size value"}, 400
if not max_size > 0:
return {"status": "error", "reason": "Invalid queue size value"}, 400
queue_uuid = create_sync_queue(queue_name, tags=tags, description=description,
max_size=max_size)
return queue_uuid, 200
def api_delete_sync_queue(json_dict):
queue_uuid = json_dict.get('uuid', '').replace(' ', '').replace('-', '')
if not is_valid_uuid_v4(queue_uuid):
return {"status": "error", "reason": "Invalid Queue uuid"}, 400
if not exists_sync_queue(queue_uuid):
return {"status": "error", "reason": "Queue Sync not found"}, 404
res = delete_sync_queue(queue_uuid)
return res, 200
def api_register_ail_to_sync_queue(json_dict):
ail_uuid = json_dict.get('ail_uuid', '').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid AIL uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
queue_uuid = json_dict.get('queue_uuid', '').replace(' ', '').replace('-', '')
if not is_valid_uuid_v4(queue_uuid):
return {"status": "error", "reason": "Invalid Queue uuid"}, 400
if not exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL server not found"}, 404
if not exists_sync_queue(queue_uuid):
return {"status": "error", "reason": "Queue Sync not found"}, 404
if is_queue_registred_by_ail_instance(queue_uuid, ail_uuid):
return {"status": "error", "reason": "Queue already registred"}, 400
res = register_ail_to_sync_queue(ail_uuid, queue_uuid)
return res, 200
def api_unregister_ail_to_sync_queue(json_dict):
ail_uuid = json_dict.get('ail_uuid', '').replace(' ', '')
if not is_valid_uuid_v4(ail_uuid):
return {"status": "error", "reason": "Invalid ail uuid"}, 400
ail_uuid = sanityze_uuid(ail_uuid)
queue_uuid = json_dict.get('queue_uuid', '').replace(' ', '').replace('-', '')
if not is_valid_uuid_v4(queue_uuid):
return {"status": "error", "reason": "Invalid ail uuid"}, 400
if not exists_ail_instance(ail_uuid):
return {"status": "error", "reason": "AIL server not found"}, 404
if not exists_sync_queue(queue_uuid):
return {"status": "error", "reason": "Queue Sync not found"}, 404
if not is_queue_registred_by_ail_instance(queue_uuid, ail_uuid):
return {"status": "error", "reason": "Queue not registred"}, 400
res = unregister_ail_to_sync_queue(ail_uuid, queue_uuid)
return res, 200
#############################
# #
#### SYNC REDIS QUEUE #######
def get_sync_queue_object(ail_uuid, push=True):
for queue_uuid in get_ail_instance_all_sync_queue(ail_uuid):
obj_dict = get_sync_queue_object_by_queue_uuid(queue_uuid, ail_uuid, push=push)
if obj_dict:
return obj_dict
return None
def get_sync_queue_object_by_queue_uuid(queue_uuid, ail_uuid, push=True):
if push:
sync_mode = 'push'
else:
sync_mode = 'pull'
obj_dict = r_serv_sync.lpop(f'sync:queue:{sync_mode}:{queue_uuid}:{ail_uuid}')
if obj_dict:
obj_dict = json.loads(obj_dict)
# # REVIEW: # TODO: create by obj type
return Item(obj_dict['id'])
def add_object_to_sync_queue(queue_uuid, ail_uuid, obj_dict, push=True, pull=True):
obj = json.dumps(obj_dict)
# # TODO: # FIXME: USE CACHE ??????
if push:
r_serv_sync.lpush(f'sync:queue:push:{queue_uuid}:{ail_uuid}', obj)
r_serv_sync.ltrim(f'sync:queue:push:{queue_uuid}:{ail_uuid}', 0, 200)
if pull:
r_serv_sync.lpush(f'sync:queue:pull:{queue_uuid}:{ail_uuid}', obj)
r_serv_sync.ltrim(f'sync:queue:pull:{queue_uuid}:{ail_uuid}', 0, 200)
# # TODO: # REVIEW: USE CACHE ????? USE QUEUE FACTORY ?????
def get_sync_importer_ail_stream():
return r_serv_sync.spop('sync:queue:importer')
def add_ail_stream_to_sync_importer(ail_stream):
ail_stream = json.dumps(ail_stream)
r_serv_sync.sadd('sync:queue:importer', ail_stream)
#############################
# #
#### AIL EXCHANGE FORMAT ####
def create_ail_stream(Object):
ail_stream = {'format': 'ail',
'version': 1,
'type': Object.get_type()}
# OBJECT META
ail_stream['meta'] = {'ail_mime-type': 'text/plain'}
ail_stream['meta']['ail:id'] = Object.get_id()
ail_stream['meta']['ail:tags'] = Object.get_tags()
# GLOBAL PAYLOAD
ail_stream['meta']['ail:uuid'] = get_ail_uuid()
# OBJECT PAYLOAD
ail_stream['payload'] = Object.get_ail_2_ail_payload()
return ail_stream
if __name__ == '__main__':
ail_uuid = '03c51929-eeab-4d47-9dc0-c667f94c7d2d'
url = "wss://localhost:4443"
api_key = 'secret'
#description = 'first test instance'
queue_uuid = '79bcafc0a6d644deb2c75fb5a83d7caa'
tags = ['infoleak:submission="manual"']
name = 'submitted queue'
description = 'first test queue, all submitted items'
#queue_uuid = ''
#res = create_ail_instance(ail_uuid, url, api_key=api_key, description=description)
#res = create_sync_queue(name, tags=tags, description=description, max_size=100)
#res = delete_sync_queue(queue_uuid)
#res = register_ail_to_sync_queue(ail_uuid, queue_uuid)
#res = change_pull_push_state(ail_uuid, push=True, pull=True)
# print(get_ail_instance_all_sync_queue(ail_uuid))
# print(get_all_sync_queue())
# res = get_all_unregistred_queue_by_ail_instance(ail_uuid)
ail_uuid = 'c3c2f3ef-ca53-4ff6-8317-51169b73f731'
ail_uuid = '03c51929-eeab-4d47-9dc0-c667f94c7d2d'
# res = ping_remote_ail_server(ail_uuid)
# print(res)
#
res = get_remote_ail_server_version(ail_uuid)
#res = _get_remote_ail_server_response(ail_uuid, 'pin')
print(res)

174
bin/core/ail_2_ail_client.py Executable file
View file

@ -0,0 +1,174 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import argparse
import json
import os
import sys
import time
from pubsublogger import publisher
from urllib.parse import urljoin
import asyncio
import http
import ssl
import websockets
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from core import ail_2_ail
#### LOGS ####
redis_logger = publisher
redis_logger.port = 6380
redis_logger.channel = 'AIL_SYNC_client'
##-- LOGS --##
####################################################################
class AIL2AILClient(object):
"""AIL2AILClient."""
def __init__(self, client_id, ail_uuid, sync_mode):
self.client_id
self.ail_uuid = ail_uuid
self.sync_mode = sync_mode
self.uri = f"{ail_url}/{sync_mode}/{ail_uuid}"
####################################################################
# # TODO: ADD TIMEOUT => 30s
async def api_request(websocket, ail_uuid):
res = await websocket.recv()
# API OUTPUT
sys.stdout.write(res)
# # TODO: ADD TIMEOUT
async def pull(websocket, ail_uuid):
while True:
obj = await websocket.recv()
sys.stdout.write(res)
async def push(websocket, ail_uuid):
while True:
# get elem to send
Obj = ail_2_ail.get_sync_queue_object(ail_uuid)
if Obj:
obj_ail_stream = ail_2_ail.create_ail_stream(Obj)
obj_ail_stream = json.dumps(obj_ail_stream)
print(obj_ail_stream)
# send objects
await websocket.send(obj_ail_stream)
# DEBUG:
await asyncio.sleep(0.1)
else:
await asyncio.sleep(10)
async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None):
if not ail_2_ail.exists_ail_instance(ail_uuid):
print('AIL server not found')
return
if not ail_key:
ail_key = ail_2_ail.get_ail_instance_key(ail_uuid)
# # TODO: raise exception
ail_url = ail_2_ail.get_ail_instance_url(ail_uuid)
local_ail_uuid = ail_2_ail.get_ail_uuid()
if sync_mode == 'api':
uri = f"{ail_url}/{sync_mode}/{api}/{local_ail_uuid}"
else:
uri = f"{ail_url}/{sync_mode}/{local_ail_uuid}"
#print(uri)
ail_2_ail.clear_save_ail_server_error(ail_uuid)
try:
async with websockets.connect(
uri,
ssl=ssl_context,
extra_headers={"Authorization": f"{ail_key}"}
) as websocket:
if sync_mode == 'pull':
await pull(websocket, ail_uuid)
elif sync_mode == 'push':
await push(websocket, ail_uuid)
await websocket.close()
elif sync_mode == 'api':
await api_request(websocket, ail_uuid)
await websocket.close()
except websockets.exceptions.InvalidStatusCode as e:
status_code = e.status_code
error_message = ''
# success
if status_code == 1000:
print('connection closed')
elif status_code == 400:
error_message = 'BAD_REQUEST: Invalid path'
elif status_code == 401:
error_message = 'UNAUTHORIZED: Invalid Key'
elif status_code == 403:
error_message = 'FORBIDDEN: SYNC mode disabled'
else:
error_message = str(e)
if error_message:
sys.stderr.write(error_message)
redis_logger.warning(f'{error_message}: {ail_uuid}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except websockets.exceptions.InvalidURI as e:
error_message = f'Invalid AIL url: {e.uri}'
sys.stderr.write(error_message)
redis_logger.warning(f'{error_message}: {ail_uuid}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except ConnectionError as e:
error_message = str(e)
sys.stderr.write(error_message)
redis_logger.info(f'{error_message}: {ail_uuid}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except websockets.exceptions.ConnectionClosedOK as e:
print('connection closed')
# except Exception as e:
# print(e)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Websocket SYNC Client')
parser.add_argument('-u', '--uuid', help='AIL UUID', type=str, dest='ail_uuid', required=True, default=None)
parser.add_argument('-i', '--client_id', help='Client ID', type=str, dest='client_id', default=None)
parser.add_argument('-m', '--mode', help='SYNC Mode, pull, push or api', type=str, dest='sync_mode', default='pull')
parser.add_argument('-a', '--api', help='API, ping or version', type=str, dest='api', default=None)
#parser.add_argument('-k', '--key', type=str, default='', help='AIL Key')
args = parser.parse_args()
ail_uuid = args.ail_uuid
sync_mode = args.sync_mode
api = args.api
if ail_uuid is None or sync_mode not in ['api', 'pull', 'push']:
parser.print_help()
sys.exit(0)
if api:
if api not in ['ping', 'version']:
parser.print_help()
sys.exit(0)
# SELF SIGNED CERTIFICATES
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# SELF SIGNED CERTIFICATES
asyncio.get_event_loop().run_until_complete(ail_to_ail_client(ail_uuid, sync_mode, api))

281
bin/core/ail_2_ail_server.py Executable file
View file

@ -0,0 +1,281 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import json
import os
import sys
import uuid
import asyncio
import http
import ssl
import websockets
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from pubsublogger import publisher
from core import ail_2_ail
# # TODO: refactor logging
#### LOGS ####
redis_logger = publisher
redis_logger.port = 6380
redis_logger.channel = 'AIL_SYNC_Server'
#############################
CONNECTED_CLIENT = set()
# # TODO: Store in redis
#############################
# # # # # # #
# #
# UTILS #
# #
# # # # # # #
def is_valid_uuid_v4(UUID):
if not UUID:
return False
UUID = UUID.replace('-', '')
try:
uuid_test = uuid.UUID(hex=UUID, version=4)
return uuid_test.hex == UUID
except:
return False
def unpack_path(path):
dict_path = {}
path = path.split('/')
if len(path) < 3:
raise Exception('Invalid url path')
if not len(path[-1]):
path = path[:-1]
dict_path['sync_mode'] = path[1]
dict_path['ail_uuid'] = path[-1]
dict_path['api'] = path[2:-1]
return dict_path
# # # # # # #
# async def send_object():
# if CONNECTED_CLIENT:
# message = 'new json object {"id": "test01"}'
# await asyncio.wait([user.send(message) for user in USERS])
async def register(websocket):
ail_uuid = websocket.ail_uuid
remote_address = websocket.remote_address
redis_logger.info(f'Client Connected: {ail_uuid} {remote_address}')
print(f'Client Connected: {ail_uuid} {remote_address}')
CONNECTED_CLIENT.add(websocket)
#print(CONNECTED_CLIENT)
async def unregister(websocket):
CONNECTED_CLIENT.remove(websocket)
# PULL: Send data to client
# # TODO: ADD TIMEOUT ???
async def pull(websocket, ail_uuid):
for queue_uuid in ail_2_ail.get_ail_instance_all_sync_queue(ail_uuid):
while True:
# get elem to send
Obj = ail_2_ail.get_sync_queue_object_by_queue_uuid(queue_uuid, ail_uuid, push=False)
if Obj:
obj_ail_stream = ail_2_ail.create_ail_stream(Obj)
Obj = json.dumps(obj_ail_stream)
#print(Obj)
# send objects
await websocket.send(Obj)
# END PULL
else:
break
# END PULL
return None
# PUSH: receive data from client
# # TODO: optional queue_uuid
async def push(websocket, ail_uuid):
#print(ail_uuid)
while True:
ail_stream = await websocket.recv()
# # TODO: CHECK ail_stream
ail_stream = json.loads(ail_stream)
#print(ail_stream)
ail_2_ail.add_ail_stream_to_sync_importer(ail_stream)
# API: server API
# # TODO: ADD TIMEOUT ???
async def api(websocket, ail_uuid, api):
api = api[0]
if api == 'ping':
message = {'message':'pong'}
message = json.dumps(message)
await websocket.send(message)
elif api == 'version':
sync_version = ail_2_ail.get_sync_server_version()
message = {'version': sync_version}
message = json.dumps(message)
await websocket.send(message)
# END API
return
async def ail_to_ail_serv(websocket, path):
# # TODO: save in class
ail_uuid = websocket.ail_uuid
remote_address = websocket.remote_address
path = unpack_path(path)
sync_mode = path['sync_mode']
# # TODO: check if it works
# # DEBUG:
# print(websocket.ail_uuid)
# print(websocket.remote_address)
# print(f'sync mode: {sync_mode}')
await register(websocket)
try:
if sync_mode == 'pull':
await pull(websocket, websocket.ail_uuid)
await websocket.close()
redis_logger.info(f'Connection closed: {ail_uuid} {remote_address}')
print(f'Connection closed: {ail_uuid} {remote_address}')
elif sync_mode == 'push':
await push(websocket, websocket.ail_uuid)
elif sync_mode == 'api':
await api(websocket, websocket.ail_uuid, path['api'])
await websocket.close()
redis_logger.info(f'Connection closed: {ail_uuid} {remote_address}')
print(f'Connection closed: {ail_uuid} {remote_address}')
finally:
await unregister(websocket)
###########################################
# CHECK Authorization HEADER and URL PATH #
# # TODO: check AIL UUID (optional header)
class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
"""AIL_2_AIL_Protocol websockets server."""
async def process_request(self, path, request_headers):
# DEBUG:
# print(self.remote_address)
# print(request_headers)
# API TOKEN
api_key = request_headers.get('Authorization', '')
if api_key is None:
redis_logger.warning(f'Missing token: {self.remote_address}')
print(f'Missing token: {self.remote_address}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Missing token\n"
if not ail_2_ail.is_allowed_ail_instance_key(api_key):
redis_logger.warning(f'Invalid token: {self.remote_address}')
print(f'Invalid token: {self.remote_address}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
# PATH
try:
dict_path = unpack_path(path)
except Exception as e:
redis_logger.warning(f'Invalid path: {self.remote_address}')
print(f'Invalid path: {self.remote_address}')
return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n"
ail_uuid = ail_2_ail.get_ail_instance_by_key(api_key)
if ail_uuid != dict_path['ail_uuid']:
redis_logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}')
print(f'Invalid token: {self.remote_address} {ail_uuid}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
if not api_key != ail_2_ail.get_ail_instance_key(api_key):
redis_logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}')
print(f'Invalid token: {self.remote_address} {ail_uuid}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
self.ail_key = api_key
self.ail_uuid = ail_uuid
if dict_path['sync_mode'] == 'pull' or dict_path['sync_mode'] == 'push':
# QUEUE UUID
# if dict_path['queue_uuid']:
#
# if not is_valid_uuid_v4(dict_path['queue_uuid']):
# print('Invalid UUID')
# return http.HTTPStatus.BAD_REQUEST, [], b"Invalid UUID\n"
#
# self.queue_uuid = dict_path['queue_uuid']
# else:
# self.queue_uuid = None
#
# if not ail_2_ail.is_ail_instance_queue(ail_uuid, dict_path['queue_uuid']):
# print('UUID not found')
# return http.HTTPStatus.FORBIDDEN, [], b"UUID not found\n"
# SYNC MODE
if not ail_2_ail.is_ail_instance_sync_enabled(self.ail_uuid, sync_mode=dict_path['sync_mode']):
sync_mode = dict_path['sync_mode']
redis_logger.warning(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}')
print(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}')
return http.HTTPStatus.FORBIDDEN, [], b"SYNC mode disabled\n"
# # TODO: CHECK API
elif dict_path['sync_mode'] == 'api':
pass
else:
print(f'Invalid path: {self.remote_address}')
redis_logger.info(f'Invalid path: {self.remote_address}')
return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n"
###########################################
# # TODO: clean shutdown / kill all connections
# # TODO: API
# # TODO: Filter object
# # TODO: IP/uuid to block
if __name__ == '__main__':
host = 'localhost'
port = 4443
print('Launching Server...')
redis_logger.info('Launching Server...')
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
cert_dir = os.environ['AIL_FLASK']
ssl_context.load_cert_chain(certfile=os.path.join(cert_dir, 'server.crt'), keyfile=os.path.join(cert_dir, 'server.key'))
start_server = websockets.serve(ail_to_ail_serv, "localhost", 4443, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol)
print(f'Server Launched: wss://{host}:{port}')
redis_logger.info(f'Server Launched: wss://{host}:{port}')
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()

View file

@ -9,9 +9,16 @@ import redis
from abc import ABC
from flask import url_for
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
import Tag
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
config_loader = ConfigLoader.ConfigLoader()
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
config_loader = None
class AbstractObject(ABC):
"""
Abstract Object
@ -22,7 +29,7 @@ class AbstractObject(ABC):
# - handle + refactor coorelations
# - creates others objects
def __init__(self, obj_type, id):
def __init__(self, obj_type, id, subtype=None):
""" Abstract for all the AIL object
:param obj_type: object type (item, ...)
@ -30,17 +37,43 @@ class AbstractObject(ABC):
"""
self.id = id
self.type = obj_type
def get_type(self):
return self.type
self.subtype = None
def get_id(self):
return self.id
def get_type(self):
return self.type
def get_subtype(self, r_str=False):
if not self.subtype:
if r_str:
return ''
return self.subtype
def get_default_meta(self):
dict_meta = {'id': self.get_id(),
'type': self.get_type()}
if self.subtype:
dict_meta['subtype'] = self.subtype
return dict_meta
def get_tags(self, r_set=False):
tags = Tag.get_obj_tag(self.id)
if r_set:
tags = set(tags)
return tags
## ADD TAGS ????
#def add_tags(self):
def _delete(self):
# DELETE TAGS
Tag.delete_obj_all_tags(self.id, self.type)
if self.type == 'item':
# delete tracker
pass
config_loader = ConfigLoader.ConfigLoader()
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
config_loader = None
def is_valid_object_type(object_type):
if object_type in ['domain', 'item', 'image', 'decoded']:

View file

@ -479,6 +479,15 @@ def is_crawler_activated():
def get_crawler_all_types():
return ['onion', 'regular']
def sanitize_crawler_types(l_crawler_types):
all_crawler_types = get_crawler_all_types()
if not l_crawler_types:
return all_crawler_types
for crawler_type in l_crawler_types:
if crawler_type not in all_crawler_types:
return all_crawler_types
return l_crawler_types
def get_all_spash_crawler_status():
crawler_metadata = []
all_crawlers = r_cache.smembers('all_splash_crawlers')
@ -600,9 +609,40 @@ def api_set_nb_crawlers_to_launch(dict_splash_name):
else:
return ({'error':'invalid input'}, 400)
##-- CRAWLER GLOBAL --##
#### AUTOMATIC CRAWLER ####
def get_auto_crawler_all_domain(l_crawler_types=[]):
l_crawler_types = sanitize_crawler_types(l_crawler_types)
if len(l_crawler_types) == 1:
return r_serv_onion.smembers(f'auto_crawler_url:{crawler_type[0]}')
else:
l_keys_name = []
for crawler_type in l_crawler_types:
l_keys_name.append(f'auto_crawler_url:{crawler_type}')
return r_serv_onion.sunion(l_keys_name[0], *l_keys_name[1:])
def add_auto_crawler_in_queue(domain, domain_type, port, epoch, delta, message):
r_serv_onion.zadd('crawler_auto_queue', int(time.time() + delta) , f'{message};{domain_type}')
# update list, last auto crawled domains
r_serv_onion.lpush('last_auto_crawled', f'{domain}:{port};{epoch}')
r_serv_onion.ltrim('last_auto_crawled', 0, 9)
def update_auto_crawler_queue():
current_epoch = int(time.time())
current_epoch = 1631096842
# check if current_epoch > domain_next_epoch
l_queue = r_serv_onion.zrangebyscore('crawler_auto_queue', 0, current_epoch)
for elem in l_queue:
mess, domain_type = elem.rsplit(';', 1)
print(domain_type)
print(mess)
r_serv_onion.sadd(f'{domain_type}_crawler_priority_queue', mess)
##-- AUTOMATIC CRAWLER --##
#### CRAWLER TASK ####
def create_crawler_task(url, screenshot=True, har=True, depth_limit=1, max_pages=100, auto_crawler=False, crawler_delta=3600, crawler_type=None, cookiejar_uuid=None, user_agent=None):
@ -1350,6 +1390,7 @@ def save_test_ail_crawlers_result(test_success, message):
r_serv_onion.hset('crawler:tor:test', 'success', bool(test_success))
r_serv_onion.hset('crawler:tor:test', 'message', message)
# # FIXME: # TODO: stderr CATCH ?????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????
def test_ail_crawlers():
# # TODO: test regular domain
if not ping_splash_manager():
@ -1448,10 +1489,14 @@ def test_ail_crawlers():
#### ---- ####
#if __name__ == '__main__':
if __name__ == '__main__':
# res = get_splash_manager_version()
# res = test_ail_crawlers()
# res = is_test_ail_crawlers_successful()
# print(res)
# print(get_test_ail_crawlers_message())
#print(get_all_queues_stats())
#res = get_auto_crawler_all_domain()
res = update_auto_crawler_queue()
print(res)

View file

@ -102,6 +102,13 @@ class Categ(AbstractModule):
if r_result:
return categ_found
# DIRTY FIX AIL SYNC
# # FIXME: DIRTY FIX
message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}'
print(message)
self.send_message_to_queue(message, 'SyncModule')
if __name__ == '__main__':
# SCRIPT PARSER #

View file

@ -45,14 +45,17 @@ class Tags(AbstractModule):
if len(mess_split) == 2:
tag = mess_split[0]
item = Item(mess_split[1])
item_id = item.get_id()
# Create a new tag
Tag.add_tag('item', tag, item.get_id())
print(f'{item_id}: Tagged {tag}')
print(f'{item.get_id()}: Tagged {tag}')
# Forward message to channel
self.send_message_to_queue(message, 'MISP_The_Hive_feeder')
message = f'{item.get_type()};{item.get_subtype(r_str=True)};{item.get_id()}'
self.send_message_to_queue(message, 'Sync_module')
else:
# Malformed message
raise Exception(f'too many values to unpack (expected 2) given {len(mess_split)} with message {message}')

View file

@ -72,6 +72,7 @@ class AbstractModule(ABC):
ex: send_to_queue(item_id, 'Global')
"""
self.process.populate_set_out(message, queue_name)
# add to new set_module
def run(self):
"""
@ -98,6 +99,8 @@ class AbstractModule(ABC):
print('TRACEBACK:')
for line in trace:
print(line)
# remove from set_module
## check if item process == completed
else:
self.computeNone()

View file

@ -1,5 +1,6 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import base64
import os
import re
@ -534,7 +535,6 @@ def delete_item(obj_id):
if not exist_item(obj_id):
return False
else:
Tag.delete_obj_tags(obj_id, 'item', Tag.get_obj_tag(obj_id))
delete_item_duplicate(obj_id)
# delete MISP event
r_serv_metadata.delete('misp_events:{}'.format(obj_id))
@ -564,6 +564,8 @@ def delete_item(obj_id):
### TODO in inport V2
# delete from tracked items
# # # TODO: # FIXME: LATER
# delete from queue
###
return False
@ -626,6 +628,18 @@ class Item(AbstractObject):
"""
return item_basic.get_item_content(self.id)
def get_gzip_content(self, b64=False):
with open(self.get_filename(), 'rb') as f:
content = f.read()
if b64:
content = base64.b64encode(content)
return content.decode()
def get_ail_2_ail_payload(self):
payload = {'raw': self.get_gzip_content(b64=True),
'compress': 'gzip'}
return payload
# # TODO:
def create(self):
pass
@ -639,6 +653,22 @@ class Item(AbstractObject):
except FileNotFoundError:
return False
############################################################################
############################################################################
############################################################################
def exist_correlation(self):
pass
############################################################################
############################################################################
############################################################################
############################################################################
############################################################################
############################################################################
############################################################################
############################################################################
#if __name__ == '__main__':

View file

@ -63,11 +63,17 @@ def get_galaxy_from_tag(tag):
except IndexError:
return None
def get_active_taxonomies():
return r_serv_tags.smembers('active_taxonomies')
def get_active_taxonomies(r_set=False):
res = r_serv_tags.smembers('active_taxonomies')
if r_set:
return set(res)
return res
def get_active_galaxies():
return r_serv_tags.smembers('active_galaxies')
def get_active_galaxies(r_set=False):
res = r_serv_tags.smembers('active_galaxies')
if r_set:
return set(res)
return res
def get_all_taxonomies_tags(): # # TODO: add + REMOVE + Update
return r_serv_tags.smembers('active_taxonomies_tags')
@ -75,6 +81,44 @@ def get_all_taxonomies_tags(): # # TODO: add + REMOVE + Update
def get_all_galaxies_tags(): # # TODO: add + REMOVE + Update
return r_serv_tags.smembers('active_galaxies_tags')
def get_taxonomies_enabled_tags(r_list=False):
l_tag_keys = []
for taxonomie in get_active_taxonomies():
l_tag_keys.append(f'active_tag_{taxonomie}')
if len(l_tag_keys) > 1:
res = r_serv_tags.sunion(l_tag_keys[0], *l_tag_keys[1:])
elif l_tag_keys:
res = r_serv_tags.smembers(l_tag_keys[0])
if r_list:
return list(res)
else:
return res
def get_galaxies_enabled_tags():
l_tag_keys = []
for galaxy in get_active_galaxies():
l_tag_keys.append(f'active_tag_galaxies_{galaxy}')
if len(l_tag_keys) > 1:
return r_serv_tags.sunion(l_tag_keys[0], *l_tag_keys[1:])
elif l_tag_keys:
return r_serv_tags.smembers(l_tag_keys[0])
else:
return []
def get_taxonomie_enabled_tags(taxonomie, r_list=False):
res = r_serv_tags.smembers(f'active_tag_{taxonomie}')
if r_list:
return list(res)
else:
return res
def get_galaxy_enabled_tags(galaxy, r_list=False):
res = r_serv_tags.smembers(f'active_tag_galaxies_{galaxy}')
if r_list:
return list(res)
else:
return res
def is_taxonomie_tag_enabled(taxonomie, tag):
if tag in r_serv_tags.smembers('active_tag_' + taxonomie):
return True
@ -136,6 +180,67 @@ def is_valid_tags_taxonomies_galaxy(list_tags, list_tags_galaxy):
return False
return True
def is_taxonomie_tag(tag, namespace=None):
if not namespace:
namespace = tag.split(':')[0]
if namespace != 'misp-galaxy':
return True
else:
return False
def is_galaxy_tag(tag, namespace=None):
if not namespace:
namespace = tag.split(':')[0]
if namespace == 'misp-galaxy':
return True
else:
return False
# # TODO:
# def is_valid_tag(tag):
# pass
def is_enabled_tag(tag, enabled_namespace=None):
if is_taxonomie_tag(tag):
return is_enabled_taxonomie_tag(tag, enabled_taxonomies=enabled_namespace)
else:
return is_enabled_galaxy_tag(tag, enabled_galaxies=enabled_namespace)
def are_enabled_tags(tags):
enabled_taxonomies = get_active_taxonomies(r_set=True)
enabled_galaxies = get_active_galaxies(r_set=True)
for tag in tags:
if is_taxonomie_tag(tag):
res = is_enabled_taxonomie_tag(tag, enabled_taxonomies=enabled_taxonomies)
else:
res = is_enabled_galaxy_tag(tag, enabled_galaxies=enabled_galaxies)
if not res:
return False
return True
def is_enabled_taxonomie_tag(tag, enabled_taxonomies=None):
if not enabled_taxonomies:
enabled_taxonomies = get_active_taxonomies()
taxonomie = get_taxonomie_from_tag(tag)
if taxonomie is None:
return False
if taxonomie not in enabled_taxonomies:
return False
if not is_taxonomie_tag_enabled(taxonomie, tag):
return False
def is_enabled_galaxy_tag(tag, enabled_galaxies=None):
if not enabled_galaxies:
enabled_galaxies = get_active_galaxies()
galaxy = get_galaxy_from_tag(tag)
if galaxy is None:
return False
if galaxy not in enabled_galaxies:
return False
if not is_galaxy_tag_enabled(galaxy, tag):
return False
return True
#### ####
def is_tag_in_all_tag(tag):
@ -144,6 +249,31 @@ def is_tag_in_all_tag(tag):
else:
return False
def get_tag_synonyms(tag):
return r_serv_tags.smembers(f'synonym_tag_{tag}')
def get_tag_dislay_name(tag):
tag_synonyms = get_tag_synonyms(tag)
if not tag_synonyms:
return tag
else:
return tag + ', '.join(tag_synonyms)
def get_tags_selector_dict(tags):
list_tags = []
for tag in tags:
list_tags.append(get_tag_selector_dict(tag))
return list_tags
def get_tag_selector_dict(tag):
return {'name':get_tag_dislay_name(tag),'id':tag}
def get_tags_selector_data():
dict_selector = {}
dict_selector['active_taxonomies'] = get_active_taxonomies()
dict_selector['active_galaxies'] = get_active_galaxies()
return dict_selector
def get_min_tag(tag):
tag = tag.split('=')
if len(tag) > 1:
@ -421,7 +551,7 @@ def add_tag(object_type, tag, object_id, obj_date=None):
r_serv_tags.hincrby('daily_tags:{}'.format(datetime.date.today().strftime("%Y%m%d")), tag, 1)
def delete_obj_tag(object_type, object_id, tag, obj_date):
if object_type=="item": # # TODO: # FIXME: # REVIEW: rename me !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if object_type=="item": # # TODO: # FIXME: # REVIEW: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
obj_date = get_obj_date(object_type, object_id)
r_serv_metadata.srem('tag:{}'.format(object_id), tag)
r_serv_tags.srem('{}:{}'.format(tag, obj_date), object_id)
@ -455,7 +585,7 @@ def api_delete_obj_tags(tags=[], object_id=None, object_type="item"):
if not tags:
return ({'status': 'error', 'reason': 'No Tag(s) specified'}, 400)
res = delete_obj_tags(object_id, object_type, tags=tags)
res = delete_obj_tags(object_id, object_type, tags)
if res:
return res
@ -464,13 +594,16 @@ def api_delete_obj_tags(tags=[], object_id=None, object_type="item"):
dict_res['id'] = object_id
return (dict_res, 200)
def delete_obj_tags(object_id, object_type, tags=[]):
def delete_obj_tags(object_id, object_type, tags):
obj_date = get_obj_date(object_type, object_id)
for tag in tags:
res = delete_tag(object_type, tag, object_id, obj_date=obj_date)
if res:
return res
def delete_obj_all_tags(obj_id, obj_type):
delete_obj_tags(obj_id, obj_type, get_obj_tag(obj_id))
def sanitise_tags_date_range(l_tags, date_from=None, date_to=None):
if date_from is None or date_to is None:
date_from = get_tags_min_last_seen(l_tags, r_int=False)

View file

@ -11,6 +11,9 @@ import ConfigLoader
from flask_login import UserMixin
def get_all_users():
return r_serv_db.hkeys('user:all')
class User(UserMixin):
def __init__(self, id):

View file

@ -2,6 +2,9 @@
subscribe = ZMQ_Global
publish = Redis_Mixer,Redis_preProcess1
[Sync_importer]
publish = Redis_Mixer,Redis_Tags
[Importer_Json]
publish = Redis_Mixer,Redis_Tags
@ -55,7 +58,7 @@ subscribe = Redis_Global
[Categ]
subscribe = Redis_Global
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Urls,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_ApiKey
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Urls,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_ApiKey,Redis_SyncModule
[CreditCards]
subscribe = Redis_CreditCards
@ -96,7 +99,11 @@ subscribe = Redis_ModuleStats
[Tags]
subscribe = Redis_Tags
publish = Redis_Tags_feed
publish = Redis_Tags_feed,Redis_SyncModule
# dirty fix
[Sync_module]
subscribe = Redis_SyncModule
[MISP_The_hive_feeder]
subscribe = Redis_Tags_feed

View file

@ -18,7 +18,7 @@ sudo apt-get install wget -qq
sudo apt-get install tor -qq
#Needed for bloom filters
sudo apt-get install libssl-dev libfreetype6-dev python-numpy -qq
sudo apt-get install libssl-dev libfreetype6-dev python3-numpy -qq
#pyMISP
#sudo apt-get -y install python3-pip
@ -43,8 +43,7 @@ sudo apt-get install build-essential libffi-dev automake autoconf libtool -qq
sudo apt-get install p7zip-full -qq
# SUBMODULES #
git submodule init
git submodule update
git submodule update --init
# REDIS #
test ! -d redis/ && git clone https://github.com/antirez/redis.git

View file

@ -9,6 +9,9 @@ redis==2.10.6
python-magic>0.4.15
yara-python>4.0.2
# AIL Sync
websockets>9.0
# Hashlib
crcmod
mmh3>2.5

26
update/v4.0/Update.py Executable file
View file

@ -0,0 +1,26 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import re
import sys
import time
import redis
import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
sys.path.append(os.path.join(os.environ['AIL_HOME'], 'update', 'bin'))
from ail_updater import AIL_Updater
class Updater(AIL_Updater):
"""default Updater."""
def __init__(self, version):
super(Updater, self).__init__(version)
if __name__ == '__main__':
updater = Updater('v4.0')
updater.run_update()

29
update/v4.0/Update.sh Executable file
View file

@ -0,0 +1,29 @@
#!/bin/bash
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
export PATH=$AIL_HOME:$PATH
export PATH=$AIL_REDIS:$PATH
export PATH=$AIL_ARDB:$PATH
export PATH=$AIL_BIN:$PATH
export PATH=$AIL_FLASK:$PATH
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
echo -e $GREEN"Shutting down AIL ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
# SUBMODULES #
git submodule update
echo ""
echo -e $GREEN"Installing nose ..."$DEFAULT
pip3 install -U websockets
exit 0

View file

@ -45,6 +45,7 @@ from blueprints.import_export import import_export
from blueprints.objects_item import objects_item
from blueprints.hunters import hunters
from blueprints.old_endpoints import old_endpoints
from blueprints.ail_2_ail_sync import ail_2_ail_sync
Flask_dir = os.environ['AIL_FLASK']
@ -103,6 +104,7 @@ app.register_blueprint(import_export, url_prefix=baseUrl)
app.register_blueprint(objects_item, url_prefix=baseUrl)
app.register_blueprint(hunters, url_prefix=baseUrl)
app.register_blueprint(old_endpoints, url_prefix=baseUrl)
app.register_blueprint(ail_2_ail_sync, url_prefix=baseUrl)
# ========= =========#
# ========= Cookie name ========

View file

@ -0,0 +1,271 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
'''
Blueprint Flask: crawler splash endpoints: dashboard, onion crawler ...
'''
import os
import sys
import json
import random
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response, make_response
from flask_login import login_required, current_user, login_user, logout_user
sys.path.append('modules')
import Flask_config
# Import Role_Manager
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
from Role_Manager import login_admin, login_analyst, login_read_only
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
import Tag
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import item_basic
import Tracker
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'core'))
import ail_2_ail
bootstrap_label = Flask_config.bootstrap_label
# ============ BLUEPRINT ============
ail_2_ail_sync = Blueprint('ail_2_ail_sync', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/ail_2_ail'))
# ============ VARIABLES ============
# ============ FUNCTIONS ============
def api_validator(api_response):
if api_response:
return Response(json.dumps(api_response[0], indent=2, sort_keys=True), mimetype='application/json'), api_response[1]
def create_json_response(data, status_code):
return Response(json.dumps(data, indent=2, sort_keys=True), mimetype='application/json'), status_code
# ============= ROUTES ==============
@ail_2_ail_sync.route('/settings/ail_2_ail', methods=['GET'])
@login_required
@login_admin
def ail_2_ail_dashboard():
ail_uuid = ail_2_ail.get_ail_uuid()
l_servers = ail_2_ail.get_all_running_sync_servers()
l_servers = ail_2_ail.get_ail_instances_metadata(l_servers)
return render_template("ail_2_ail_dashboard.html", ail_uuid=ail_uuid,
l_servers=l_servers)
######################
# #
#### AIL INSTANCE ####
# # TODO: add more metadata => queues + connections
@ail_2_ail_sync.route('/settings/ail_2_ail/servers', methods=['GET'])
@login_required
@login_admin
def ail_servers():
l_servers = ail_2_ail.get_all_ail_instances_metadata()
return render_template("ail_servers.html", l_servers=l_servers)
@ail_2_ail_sync.route('/settings/ail_2_ail/server/view', methods=['GET'])
@login_required
@login_admin
def ail_server_view():
ail_uuid = request.args.get('uuid')
server_metadata = ail_2_ail.get_ail_instance_metadata(ail_uuid,sync_queues=True)
server_metadata['sync_queues'] = ail_2_ail.get_queues_metadata(server_metadata['sync_queues'])
return render_template("view_ail_server.html", server_metadata=server_metadata,
bootstrap_label=bootstrap_label)
@ail_2_ail_sync.route('/settings/ail_2_ail/server/api/ping', methods=['GET'])
@login_required
@login_admin
def ail_server_api_ping():
ail_uuid = request.args.get('uuid')
input_dict = {"uuid": ail_uuid}
res = ail_2_ail.api_ping_remote_ail_server(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_server_view', uuid=ail_uuid))
@ail_2_ail_sync.route('/settings/ail_2_ail/server/api/version', methods=['GET'])
@login_required
@login_admin
def ail_server_api_version():
ail_uuid = request.args.get('uuid')
input_dict = {"uuid": ail_uuid}
res = ail_2_ail.api_get_remote_ail_server_version(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_server_view', uuid=ail_uuid))
@ail_2_ail_sync.route('/settings/ail_2_ail/server/add', methods=['GET', 'POST'])
@login_required
@login_admin
def ail_server_add():
if request.method == 'POST':
register_key = request.form.get("register_key")
ail_uuid = request.form.get("ail_uuid")
url = request.form.get("ail_url")
description = request.form.get("ail_description")
pull = request.form.get("ail_pull")
push = request.form.get("ail_push")
input_dict = {"uuid": ail_uuid, "url": url,
"description": description,
"pull": pull, "push": push}
if register_key:
input_dict['key'] = request.form.get("ail_key")
print(input_dict)
res = ail_2_ail.api_create_ail_instance(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_server_view', uuid=res))
else:
return render_template("add_ail_server.html")
@ail_2_ail_sync.route('/settings/ail_2_ail/server/edit', methods=['GET', 'POST'])
@login_required
@login_admin
def ail_server_edit():
ail_uuid = request.args.get('ail_uuid')
@ail_2_ail_sync.route('/settings/ail_2_ail/server/delete', methods=['GET'])
@login_required
@login_admin
def ail_server_delete():
ail_uuid = request.args.get('uuid')
input_dict = {"uuid": ail_uuid}
res = ail_2_ail.api_delete_ail_instance(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_servers'))
@ail_2_ail_sync.route('/settings/ail_2_ail/server/sync_queues', methods=['GET'])
@login_required
@login_admin
def ail_server_sync_queues():
ail_uuid = request.args.get('uuid')
sync_queues = ail_2_ail.get_all_unregistred_queue_by_ail_instance(ail_uuid)
sync_queues = ail_2_ail.get_queues_metadata(sync_queues)
return render_template("register_queue.html", bootstrap_label=bootstrap_label,
ail_uuid=ail_uuid, sync_queues=sync_queues)
@ail_2_ail_sync.route('/settings/ail_2_ail/server/sync_queues/register', methods=['GET'])
@login_required
@login_admin
def ail_server_sync_queues_register():
ail_uuid = request.args.get('ail_uuid')
queue_uuid = request.args.get('queue_uuid')
input_dict = {"ail_uuid": ail_uuid, "queue_uuid": queue_uuid}
res = ail_2_ail.api_register_ail_to_sync_queue(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_server_view', uuid=ail_uuid))
@ail_2_ail_sync.route('/settings/ail_2_ail/server/sync_queues/unregister', methods=['GET'])
@login_required
@login_admin
def ail_server_sync_queues_unregister():
ail_uuid = request.args.get('ail_uuid')
queue_uuid = request.args.get('queue_uuid')
input_dict = {"ail_uuid": ail_uuid, "queue_uuid": queue_uuid}
res = ail_2_ail.api_unregister_ail_to_sync_queue(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.ail_server_view', uuid=ail_uuid))
####################
# #
#### SYNC QUEUE ####
@ail_2_ail_sync.route('/settings/ail_2_ail/sync_queues', methods=['GET'])
# @login_required
# @login_admin
def sync_queues():
ail_uuid = request.args.get('ail_uuid')
l_queues = ail_2_ail.get_all_queues_metadata()
return render_template("sync_queues.html", bootstrap_label=bootstrap_label,
ail_uuid=ail_uuid, l_queues=l_queues)
@ail_2_ail_sync.route('/settings/ail_2_ail/sync_queue/view', methods=['GET'])
# @login_required
# @login_admin
def sync_queue_view():
queue_uuid = request.args.get('uuid')
queue_metadata = ail_2_ail.get_sync_queue_metadata(queue_uuid)
ail_servers = ail_2_ail.get_sync_queue_all_ail_instance(queue_uuid)
queue_metadata['ail_servers'] = ail_2_ail.get_ail_instances_metadata(ail_servers)
return render_template("view_sync_queue.html", queue_metadata=queue_metadata,
bootstrap_label=bootstrap_label)
@ail_2_ail_sync.route('/settings/ail_2_ail/sync_queue/add', methods=['GET', 'POST'])
@login_required
@login_admin
def sync_queue_add():
if request.method == 'POST':
queue_name = request.form.get("queue_name")
description = request.form.get("queue_description")
max_size = request.form.get("queue_max_size")
taxonomies_tags = request.form.get('taxonomies_tags')
if taxonomies_tags:
try:
taxonomies_tags = json.loads(taxonomies_tags)
except Exception:
taxonomies_tags = []
else:
taxonomies_tags = []
galaxies_tags = request.form.get('galaxies_tags')
if galaxies_tags:
try:
galaxies_tags = json.loads(galaxies_tags)
except Exception:
galaxies_tags = []
tags = taxonomies_tags + galaxies_tags
input_dict = {"name": queue_name, "tags": tags,
"description": description,
"max_size": max_size}
res = ail_2_ail.api_create_sync_queue(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.sync_queue_view', uuid=res))
else:
return render_template("add_sync_queue.html", tags_selector_data=Tag.get_tags_selector_data())
@ail_2_ail_sync.route('/settings/ail_2_ail/sync_queue/edit', methods=['GET', 'POST'])
# @login_required
# @login_admin
def sync_queue_edit():
return ''
@ail_2_ail_sync.route('/settings/ail_2_ail/sync_queue/delete', methods=['GET'])
# @login_required
# @login_admin
def sync_queue_delete():
queue_uuid = request.args.get('uuid')
input_dict = {"uuid": queue_uuid}
res = ail_2_ail.api_delete_sync_queue(input_dict)
if res[1] != 200:
return create_json_response(res[0], res[1])
return redirect(url_for('ail_2_ail_sync.sync_queues'))
#### JSON ####
## - - ##

View file

@ -94,6 +94,34 @@ def get_all_obj_tags():
return jsonify(res)
return jsonify(Tag.get_all_obj_tags(object_type))
@tags_ui.route('/tag/taxonomies/tags/enabled/json')
@login_required
@login_read_only
def tag_taxonomies_tags_enabled_json():
return jsonify(Tag.get_taxonomies_enabled_tags(r_list=True))
@tags_ui.route('/tag/galaxies/tags/enabled/json')
@login_required
@login_read_only
def tag_galaxies_tags_enabled_json():
tags = Tag.get_galaxies_enabled_tags()
return jsonify(Tag.get_tags_selector_dict(tags))
@tags_ui.route('/tag/taxonomie/tags/enabled/json')
@login_required
@login_read_only
def tag_taxonomie_tags_enabled_json():
taxonomie = request.args.get('taxonomie')
return jsonify(Tag.get_taxonomie_enabled_tags(taxonomie, r_list=True))
@tags_ui.route('/tag/galaxy/tags/enabled/json')
@login_required
@login_read_only
def tag_galaxy_tags_enabled_json():
galaxy = request.args.get('galaxy')
tags = Tag.get_galaxy_enabled_tags(galaxy, r_list=True)
return jsonify(Tag.get_tags_selector_dict(tags))
@tags_ui.route('/tag/search/item')
@login_required
@login_read_only

View file

@ -59,6 +59,13 @@ for name, tags in clusters.items(): #galaxie name + tags
def one():
return 1
# TODO:
# TODO:
# TODO:
# TODO:
# TODO:
# TODO:
# # TODO: replace me with get_tag_selector_dict()
def get_tags_with_synonyms(tag):
str_synonyms = ' - synonyms: '
synonyms = r_serv_tags.smembers('synonym_tag_' + tag)

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1,170 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-Framework</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card my-3">
<div class="card-header bg-dark text-white">
<h5 class="card-title">Create AIL Server</h5>
</div>
<div class="card-body">
<form action="{{ url_for('ail_2_ail_sync.ail_server_add') }}" method='post'>
<div class="row">
<div class="col-12 col-xl-9">
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-dark text-white"><i class="fas fa-quote-right"></i></div>
</div>
<input id="ail_uuid" name="ail_uuid" class="form-control" placeholder="AIL Server UUID" type="text" required>
</div>
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-secondary text-white"><i class="fas fa-server"></i></div>
</div>
<input id="ail_url" class="form-control" type="text" name="ail_url" placeholder="AIL Server URL">
</div>
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-info text-white"><i class="fas fa-pencil-alt"></i></div>
</div>
<textarea id="ail_description" name="ail_description" class="form-control" placeholder="AIL Server Description (optional)" rows="3"></textarea>
</div>
<h5 class="mt-2 text-secondary">
SYNC Modes:
</h5>
<div class="row">
<div class="col-12 col-xl-6">
<div class="custom-control custom-checkbox">
<input class="custom-control-input" type="checkbox" value="True" id="pull" name="ail_pull" checked>
<label class="custom-control-label" for="ail_pull"><h5>PULL</h5></label>
</div>
</div>
<div class="col-12 col-xl-6">
<div class="custom-control custom-checkbox">
<input class="custom-control-input" type="checkbox" value="True" id="ail_push" name="ail_push" checked>
<label class="custom-control-label" for="ail_push"><h5 class="">PUSH</h5></label>
</div>
</div>
</div>
<h5 class="mt-2 text-secondary">
Server Key <i class="fas fa-key"></i>:
</h5>
<div class="d-flex mt-2">
&nbsp;Generate Server Key&nbsp;&nbsp;
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="register_key" value="True" id="register_key">
<label class="custom-control-label" for="register_key">
&nbsp;Register Server Key
</label>
</div>
</div>
<div class="mt-3" id="div_generate_key">
<b>A new key will be generated for this AIL Server</b>
</div>
<div id="div_register_key">
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-danger text-white"><i class="fas fa-key"></i></div>
</div>
<input id="ail_key" class="form-control" type="text" minlength="56" maxlength="56" name="ail_key" placeholder="AIL key">
</div>
</div>
</div>
<div class="col-12 col-xl-3">
SYNC: create a new AIL Server
</div>
</div>
<br>
<button class="btn btn-primary mt-2">
<i class="fas fa-plus"></i> Create AIL Server
</button>
</form>
</div>
</div>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$("#nav_ail_servers").addClass("active");
$("#div_register_key").hide();
$('#register_key').on("change", function () {
register_key_input_controler();
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
function register_key_input_controler() {
if($('#register_key').is(':checked')){
$("#div_generate_key").hide();
$("#div_register_key").show();
}else{
$("#div_generate_key").show();
$("#div_register_key").hide();
}
}
</script>

View file

@ -0,0 +1,139 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-Framework</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/tags.css') }}" rel="stylesheet" type="text/css" />
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/tags.js') }}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card my-3">
<div class="card-header bg-dark text-white">
<h5 class="card-title">Create SYNC Queue</h5>
</div>
<div class="card-body">
<form action="{{ url_for('ail_2_ail_sync.sync_queue_add') }}" method='post' onsubmit="SubmitCreateQueue();">
<div class="row">
<div class="col-12 col-xl-9">
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-dark text-white"><i class="fas fa-quote-right"></i></div>
</div>
<input id="queue_name" name="queue_name" class="form-control" placeholder="Queue Name" type="text" required>
</div>
<div class="input-group form-group mb-2">
<div class="input-group-prepend">
<span class="input-group-text bg-success"><i class="fas fa-water"></i></span>
</div>
<input class="form-control" type="number" id="queue_max_size" name="queue_max_size" min="1" value="100" required>
<div class="input-group-append">
<span class="input-group-text">Queue Max Size</span>
</div>
</div>
<div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend">
<div class="input-group-text bg-info text-white"><i class="fas fa-pencil-alt"></i></div>
</div>
<textarea id="queue_description" name="queue_description" class="form-control" placeholder="Sync Queue Description (optional)" rows="3"></textarea>
</div>
<div class="card my-4">
<div class="card-header bg-secondary text-white">
<b>Tags Filter</b>
</div>
<div class="card-body">
{% include 'tags/block_tags_selector.html' %}
</div>
</div>
</div>
<div class="col-12 col-xl-3">
SYNC: create a new Sync Queue
</div>
</div>
<br>
<button class="btn btn-primary mt-2">
<i class="fas fa-plus"></i> Create Sync Queue
</button>
</form>
</div>
</div>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$("#nav_ail_servers").addClass("active");
$("#div_register_key").hide();
$('#register_key').on("change", function () {
register_key_input_controler();
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
function SubmitCreateQueue() {
var tags = ltags.getValue();
var tagsgalaxy = ltagsgalaxies.getValue();
console.log(tags);
console.log(tagsgalaxy);
$('#ltags').val(tags);
$('#ltagsgalaxies').val(tagsgalaxy);
return true;
}
</script>

View file

@ -0,0 +1,114 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-SYNC</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="d-flex justify-content-center my-4">
<div class="card border-secondary" style="max-width: 40rem;">
<div class="card-header bg-inf">
<h5 class="card-title">AIL UUID:</h5>
</div>
<div class="card-body text-dark">
<div class="">
{{ ail_uuid }}
</div>
</div>
</div>
</div>
<h1>Connected Servers:</h3>
<table id="table_servers" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>uuid</th>
<th>url</th>
<th>description</th>
<th>sync queues</th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_server in l_servers %}
<tr class="border-color: blue;">
<td>
<a href="{{ url_for('ail_2_ail_sync.ail_server_view') }}?uuid={{ dict_server['uuid'] }}">
{{ dict_server['uuid']}}
</a>
</td>
<td>{{ dict_server['url']}}</td>
<td>{{ dict_server['description']}}</td>
<td class="text-center">
{% for queue_uuid in dict_server['sync_queues'] %}
<a href="{{ url_for('ail_2_ail_sync.sync_queue_view') }}?uuid={{queue_uuid}}">
{{queue_uuid}}</br>
</a>
{% endfor %}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$("#nav_ail_sync").addClass("active");
$('#table_servers').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 1, "desc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -0,0 +1,118 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-SYNC</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="my-4">
<a href="{{ url_for('ail_2_ail_sync.ail_server_add') }}">
<button type="button" class="btn btn-primary">
<i class="fas fa-plus"></i> Add AIL Server
</button>
</a>
</div>
<table id="table_servers" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>url</th>
<th></th>
<th>uuid</th>
<th>description</th>
<th>sync queues</th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_server in l_servers %}
<tr class="border-color: blue;">
<td>{{ dict_server['url']}}</td>
<td>
{% if dict_server['ping'] %}
<div style="color:Green;">
<i class="fas fa-check-circle fa-2x"></i>
</div>
{% else %}
<div style="color:Red;">
<i class="fas fa-times-circle fa-2x"></i>
</div>
{% endif %}
</td>
<td>
<a href="{{ url_for('ail_2_ail_sync.ail_server_view') }}?uuid={{ dict_server['uuid'] }}">
{{ dict_server['uuid']}}
</a>
</td>
<td>{{ dict_server['description']}}</td>
<td class="text-center">
{% for queue_uuid in dict_server['sync_queues'] %}
<a href="{{ url_for('ail_2_ail_sync.sync_queue_view') }}?uuid={{queue_uuid}}">
{{queue_uuid}}</br>
</a>
{% endfor %}
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$("#nav_ail_servers").addClass("active");
$('#table_servers').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 1, "desc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -0,0 +1,110 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-Framework</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card">
<div class="card-header bg-dark text-white">
<h5 class="card-title">{{ail_uuid}} Register a SYNC Queue</h5>
</div>
<div class="card-body">
<table id="table_sync_queues" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>name</th>
<th>uuid</th>
<th>description</th>
<th>max size</th>
<th>Register Sync Queue</th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_queue in sync_queues %}
<tr class="border-color: blue;">
<td>{{ dict_queue['name']}}</td>
<td>
<a href="{{ url_for('ail_2_ail_sync.sync_queue_view') }}?uuid={{ dict_queue['uuid'] }}">
{{ dict_queue['uuid']}}
</a>
<div>
{% for tag in dict_queue['tags'] %}
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }} pull-left">{{ tag }}</span>
{% endfor %}
</div>
</td>
<td>{{ dict_queue['description']}}</td>
<td>{{ dict_queue['max_size']}}</td>
<td class="text-right">
<a href="{{ url_for('ail_2_ail_sync.ail_server_sync_queues_register') }}?ail_uuid={{ ail_uuid }}&queue_uuid={{ dict_queue['uuid'] }}">
<button type="button" class="btn btn-primary"><i class="fas fa-plus"></i></button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$('#table_sync_queues').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 0, "desc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -0,0 +1,105 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-SYNC</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="my-4">
<a href="{{ url_for('ail_2_ail_sync.sync_queue_add') }}">
<button type="button" class="btn btn-primary">
<i class="fas fa-plus"></i> Create Sync Queue
</button>
</a>
</div>
<table id="table_servers" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>name</th>
<th>uuid</th>
<th>description</th>
<th>max size</th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_queue in l_queues %}
<tr class="border-color: blue;">
<td>{{ dict_queue['name']}}</td>
<td>
<a href="{{ url_for('ail_2_ail_sync.sync_queue_view') }}?uuid={{ dict_queue['uuid'] }}">
{{ dict_queue['uuid']}}
<div>
{% for tag in dict_queue['tags'] %}
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }} pull-left">{{ tag }}</span>
{% endfor %}
</div>
</a>
</td>
<td>{{ dict_queue['description']}}</td>
<td>{{ dict_queue['max_size']}}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$("#navsync_queues").addClass("active");
$('#table_servers').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 1, "desc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -0,0 +1,217 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-Framework</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card">
<div class="card-header bg-dark text-white">
<span class="badge badge-pill badge-light flex-row-reverse float-right">
{% if server_metadata['ping'] %}
<div style="color:Green;">
<i class="fas fa-check-circle fa-2x"></i>
PING
</div>
{% else %}
<div style="color:Red;">
<i class="fas fa-times-circle fa-2x"></i>
ERROR
</div>
{% endif %}
</span>
<h5 class="card-title">{{server_metadata['uuid']}}</h5>
</div>
<div class="card-body">
<table class="table table-borderless">
<tbody>
<tr>
<td class="text-right"><b>URL</b></td>
<td>
{{server_metadata['url']}}
{% if server_metadata['ping'] %}
<span style="color:Green;">
<i class="fas fa-check-circle"></i>
</span>
{% else %}
<span style="color:Red;">
<i class="fas fa-times-circle"></i>
</span>
{% endif %}
<div>
<a href="{{ url_for('ail_2_ail_sync.ail_server_api_ping') }}?uuid={{server_metadata['uuid']}}">
<button type="button" class="btn btn-primary px-1 py-0">
<i class="fas fa-redo-alt"></i> ping
</button>
</a>
</div>
</td>
</tr>
<tr>
<td class="text-right"><b>Api Key</b></td>
<td>
{{server_metadata['api_key']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Description</b></td>
<td>
{{server_metadata['description']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Pull</b></td>
<td>
{{server_metadata['pull']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Push</b></td>
<td>
{{server_metadata['push']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Version</b></td>
<td>
{{server_metadata['version']}}
<a href="{{ url_for('ail_2_ail_sync.ail_server_api_version') }}?uuid={{server_metadata['uuid']}}">
<button type="button" class="btn btn-info px-1 py-0">
<i class="fas fa-redo-alt"></i>
</button>
</a>
</td>
</tr>
</tbody>
</table>
{% if server_metadata['error']%}
<pre class="bg-dark text-white">
----------------------------
- ERROR -
----------------------------
{{server_metadata['error']}}
</pre>
{% endif %}
<div class="my-4">
<a href="{{ url_for('ail_2_ail_sync.ail_server_delete') }}?uuid={{server_metadata['uuid']}}">
<button type="button" class="btn btn-danger">
<i class="fas fa-trash-alt"></i> <b>Delete AIL Server</b>
</button>
</a>
</div>
<div class="card">
<div class="card-header bg-secondary text-white">
<h6 class="card-title"><b>SYNC QUEUES:</b></h6>
</div>
<div class="card-body">
<a href="{{ url_for('ail_2_ail_sync.ail_server_sync_queues') }}?uuid={{ server_metadata['uuid'] }}">
<button type="button" class="btn btn-primary my-4">
<i class="fas fa-plus"></i> <b>Register Sync Queue</b>
</button>
</a>
<table id="table_sync_queues" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>name</th>
<th>uuid</th>
<th>description</th>
<th>max size</th>
<th>Unregister Queue</th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_queue in server_metadata['sync_queues'] %}
<tr class="border-color: blue;">
<td>{{ dict_queue['name']}}</td>
<td>
<a href="{{ url_for('ail_2_ail_sync.sync_queue_view') }}?uuid={{ dict_queue['uuid'] }}">
{{ dict_queue['uuid']}}
</a>
<div>
{% for tag in dict_queue['tags'] %}
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }} pull-left">{{ tag }}</span>
{% endfor %}
</div>
</td>
<td>{{ dict_queue['description']}}</td>
<td>{{ dict_queue['max_size']}}</td>
<td class="text-right">
<a href="{{ url_for('ail_2_ail_sync.ail_server_sync_queues_unregister') }}?ail_uuid={{ server_metadata['uuid'] }}&queue_uuid={{ dict_queue['uuid'] }}">
<button type="button" class="btn btn-danger"><i class="fas fa-trash-alt"></i></button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$('#table_sync_queues').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 0, "asc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -0,0 +1,159 @@
<!DOCTYPE html>
<html>
<head>
<title>AIL-Framework</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'settings/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="card">
<div class="card-header bg-dark text-white">
<h4 class="card-title">SYNC Queue: {{queue_metadata['uuid']}}</h4>
</div>
<div class="card-body">
<table class="table table-borderless">
<tbody>
<tr>
<td class="text-right"><b>Name</b></td>
<td>
{{queue_metadata['name']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Tags Filter</b></td>
<td>
<div>
{% for tag in queue_metadata['tags'] %}
<span class="badge badge-{{ bootstrap_label[loop.index0 % 5] }} pull-left">{{ tag }}</span>
{% endfor %}
</div>
</td>
</tr>
<tr>
<td class="text-right"><b>Description</b></td>
<td>
{{queue_metadata['description']}}
</td>
</tr>
<tr>
<td class="text-right"><b>Max Size</b></td>
<td>
{{queue_metadata['max_size']}}
</td>
</tr>
</tbody>
</table>
<div class="my-4">
<a href="{{ url_for('ail_2_ail_sync.sync_queue_delete') }}?uuid={{queue_metadata['uuid']}}">
<button type="button" class="btn btn-danger">
<i class="fas fa-trash-alt"></i> <b>Delete Sync Queue</b>
</button>
</a>
</div>
<div class="card">
<div class="card-header bg-secondary text-white">
<h5 class="card-title"><b>AIL Server:</b></h5>
</div>
<div class="card-body">
<a href="{{ url_for('ail_2_ail_sync.ail_server_add') }}">
<button type="button" class="btn btn-primary my-4">
<i class="fas fa-plus"></i> <b>Create AIL Server</b>
</button>
</a>
<table id="table_sync_queues" class="table table-striped border-primary">
<thead class="bg-dark text-white">
<tr>
<th>uuid</th>
<th>url</th>
<th>description</th>
<th>pull</th>
<th>push</th>
<th></th>
</tr>
</thead>
<tbody style="font-size: 15px;">
{% for dict_server in queue_metadata['ail_servers'] %}
<tr class="border-color: blue;">
<td>{{ dict_server['uuid']}}</td>
<td>{{ dict_server['url']}}</td>
<td>{{ dict_server['description']}}</td>
<td>{{ dict_server['pull']}}</td>
<td>{{ dict_server['push']}}</td>
<td class="text-right">
<a href="{{ url_for('ail_2_ail_sync.ail_server_sync_queues_unregister') }}?ail_uuid={{ dict_server['uuid'] }}&queue_uuid={{ queue_metadata['uuid'] }}">
<button type="button" class="btn btn-danger"><i class="fas fa-trash-alt"></i></button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_sync').removeClass("text-muted");
$('#table_sync_queues').DataTable({
"aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
"iDisplayLength": 10,
"order": [[ 0, "asc" ]]
});
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>

View file

@ -17,6 +17,29 @@
</a>
</li>
</ul>
<h5 class="d-flex text-muted w-100" id="nav_sync">
<span>AIL SYNC</span>
</h5>
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100"> <!--nav-pills-->
<li class="nav-item">
<a class="nav-link" href="{{url_for('ail_2_ail_sync.ail_2_ail_dashboard')}}" id="nav_ail_sync">
<img src="{{ url_for('static', filename='image/ail.png')}}" alt="AIL servers" style="width:25px;">
<span>AIL SYNC</span>
</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{url_for('ail_2_ail_sync.ail_servers')}}" id="nav_ail_servers">
<i class="fas fa-server"></i>
<span>Servers</span>
</a>
</li>
<li class="nav-item">
<a class="nav-link" href="{{url_for('ail_2_ail_sync.sync_queues')}}" id="navsync_queues">
<i class="fas fa-stream"></i>
<span>Sync queues</span>
</a>
</li>
</ul>
<h5 class="d-flex text-muted w-100" id="nav_settings">
<span>Settings</span>
</h5>

View file

@ -0,0 +1,108 @@
<div class="input-group" >
<input id="ltags" type="text" class="form-control" autocomplete="off" style="width: 760px" name="taxonomies_tags">
</div>
<div class="dropdown">
<button type="button" class="btn btn-info dropdown-toggle mt-1 mb-3" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" id="dropdown-taxonomie">
Taxonomie Selected
</button>
<div class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdown-taxonomie"> <!-- TODO: make dropdown-scrollable -->
<h6 class="dropdown-header">Taxonomie Tags</h6>
<button class="dropdown-item" type="button" id="all-tags-taxonomies">All Tags <i class="fas fa-tags"></i></button>
<div class="dropdown-divider"></div>
{% for taxo in tags_selector_data['active_taxonomies'] %}
<button class="dropdown-item" type="button" id="{{ taxo }}-id{{ loop.index0 }}">{{ taxo }}</button>
{% endfor %}
</div>
</div>
<div class="input-group">
<input id="ltagsgalaxies" type="text" class="form-control" autocomplete="off" style="width: 760px" name="galaxies_tags">
</div>
<div class="dropdown">
<button type="button" class="btn btn-info dropdown-toggle mt-1" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" id="dropdown-galaxy">
Galaxy Selected
</button>
<div class="dropdown-menu dropdown-menu-right" aria-labelledby="dropdown-galaxy"> <!-- TODO: make dropdown-scrollable -->
<h6 class="dropdown-header">Galaxy Tags</h6>
<button class="dropdown-item" type="button" id="all-tags-galaxies">All Tags <i class="fas fa-tags"></i></button>
<div class="dropdown-divider"></div>
{% for galaxy in tags_selector_data['active_galaxies'] %}
<button class="dropdown-item" type="button" id="{{ galaxy }}-idgalax{{ loop.index0 }}">{{ galaxy }}</button>
{% endfor %}
</div>
</div>
<!--
<button class="btn btn-primary" onclick="tagsSelector()">
<i class="fas fa-plus"></i>
Add Tags
</button>
-->
<script>
var ltags;
var ltagsgalaxies;
$.getJSON("{{ url_for('tags_ui.tag_taxonomies_tags_enabled_json') }}",
function(data) {
ltags = $('#ltags').tagSuggest({
data: data,
maxDropHeight: 200,
name: 'ltags'
});
});
$.getJSON("{{ url_for('tags_ui.tag_galaxies_tags_enabled_json') }}",
function(data) {
ltagsgalaxies = $('#ltagsgalaxies').tagSuggest({
data: data,
maxDropHeight: 200,
name: 'ltagsgalaxies'
});
});
jQuery("#all-tags-taxonomies").click(function(e){
//change input tags list
$.getJSON("{{ url_for('tags_ui.tag_taxonomies_tags_enabled_json') }}",
function(data) {
ltags.setData(data)
});
});
jQuery("#all-tags-galaxies").click(function(e){
$.getJSON("{{ url_for('tags_ui.tag_galaxies_tags_enabled_json') }}",
function(data) {
ltagsgalaxies.setData(data)
});
});
{% for taxo in tags_selector_data['active_taxonomies'] %}
jQuery("#{{ taxo }}-id{{ loop.index0 }}").click(function(e){
$.getJSON("{{ url_for('tags_ui.tag_taxonomie_tags_enabled_json') }}?taxonomie={{ taxo }}",
function(data) {
ltags.setData(data)
});
});
{% endfor %}
{% for galaxy in tags_selector_data['active_galaxies'] %}
jQuery("#{{ galaxy }}-idgalax{{ loop.index0 }}").click(function(e){
$.getJSON("{{ url_for('tags_ui.tag_galaxy_tags_enabled_json') }}?galaxy={{ galaxy }}",
function(data) {
ltagsgalaxies.setData(data)
});
});
{% endfor %}
// function tagsSelector() {
// var tags = ltags.getValue()
// var tagsgalaxy = ltagsgalaxies.getValue()
// window.location.replace("myurl?tags=" + tags + "&tagsgalaxies=" + tagsgalaxy);
//}
</script>