chg: [logs] add new logger

This commit is contained in:
Terrtia 2023-05-12 15:29:53 +02:00
parent 98cbaad1ad
commit 9b2e8718d7
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
50 changed files with 259 additions and 282 deletions

View file

@ -142,13 +142,7 @@ function launching_logs {
screen -dmS "Logging_AIL" screen -dmS "Logging_AIL"
sleep 0.1 sleep 0.1
echo -e $GREEN"\t* Launching logging process"$DEFAULT echo -e $GREEN"\t* Launching logging process"$DEFAULT
screen -S "Logging_AIL" -X screen -t "LogQueue" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Queuing -l ../logs/ ${syslog_cmd}; read x"
sleep 0.1
screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Script -l ../logs/ ${syslog_cmd}; read x" screen -S "Logging_AIL" -X screen -t "LogScript" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Script -l ../logs/ ${syslog_cmd}; read x"
sleep 0.1
screen -S "Logging_AIL" -X screen -t "LogSync" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Sync -l ../logs/ ${syslog_cmd}; read x"
sleep 0.1
screen -S "Logging_AIL" -X screen -t "LogCrawler" bash -c "cd ${AIL_BIN}; ${AIL_VENV}/bin/log_subscriber -p 6380 -c Crawler -l ../logs/ ${syslog_cmd}; read x"
} }
function checking_configuration { function checking_configuration {
@ -197,8 +191,6 @@ function launching_scripts {
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "D4_client" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./D4_client.py; read x" screen -S "Script_AIL" -X screen -t "D4_client" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./D4_client.py; read x"
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "DbCleaner" bash -c "cd ${AIL_BIN}/core; ${ENV_PY} ./DbCleaner.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "UpdateBackground" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./update-background.py; read x" screen -S "Script_AIL" -X screen -t "UpdateBackground" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./update-background.py; read x"
sleep 0.1 sleep 0.1

View file

@ -5,13 +5,14 @@ import os
import sys import sys
import argparse import argparse
import logging.config
import traceback import traceback
import smtplib import smtplib
from pubsublogger import publisher
from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText from email.mime.text import MIMEText
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
from lib import ail_logger
from lib import ConfigLoader from lib import ConfigLoader
""" """
@ -20,8 +21,8 @@ This module allows the global configuration and management of notification setti
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
publisher.port = 6380 logging.config.dictConfig(ail_logger.get_config())
publisher.channel = "Script" logger = logging.getLogger()
def sendEmailNotification(recipient, mail_subject, mail_body): def sendEmailNotification(recipient, mail_subject, mail_body):
@ -34,10 +35,7 @@ def sendEmailNotification(recipient, mail_subject, mail_body):
sender_pw = None sender_pw = None
# raise an exception if any of these is None # raise an exception if any of these is None
if (sender is None or if sender is None or sender_host is None or sender_port is None:
sender_host is None or
sender_port is None
):
raise Exception('SMTP configuration (host, port, sender) is missing or incomplete!') raise Exception('SMTP configuration (host, port, sender) is missing or incomplete!')
try: try:
@ -70,7 +68,7 @@ def sendEmailNotification(recipient, mail_subject, mail_body):
except Exception as err: except Exception as err:
traceback.print_tb(err.__traceback__) traceback.print_tb(err.__traceback__)
publisher.warning(err) logger.warning(err)
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -36,7 +36,7 @@ class D4Client(AbstractModule):
self.last_refresh = time.time() self.last_refresh = time.time()
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, dns_record): def compute(self, dns_record):
# Refresh D4 Client # Refresh D4 Client

View file

@ -1,44 +0,0 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The DbCleaner Module
===================
"""
import os
import sys
import time
import datetime
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from pubsublogger import publisher
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
publisher.info("DbCleaner started")
# low priority
time.sleep(180)
daily_cleaner = True
current_date = datetime.datetime.now().strftime("%Y%m%d")
while True:
if daily_cleaner:
daily_cleaner = False
else:
sys.exit(0)
time.sleep(600)
new_date = datetime.datetime.now().strftime("%Y%m%d")
if new_date != current_date:
current_date = new_date
daily_cleaner = True

View file

@ -46,7 +46,7 @@ class Sync_importer(AbstractModule):
# self.last_refresh = time.time() # self.last_refresh = time.time()
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} Launched') self.logger.info(f'Module {self.module_name} Launched')
def run(self): def run(self):
while self.proceed: while self.proceed:
@ -63,7 +63,7 @@ class Sync_importer(AbstractModule):
else: else:
self.computeNone() self.computeNone()
# Wait before next process # Wait before next process
self.redis_logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s") self.logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s")
time.sleep(self.pending_seconds) time.sleep(self.pending_seconds)
def compute(self, ail_stream): def compute(self, ail_stream):

View file

@ -42,7 +42,7 @@ class Sync_module(AbstractModule):
print(self.dict_sync_queues) print(self.dict_sync_queues)
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} Launched') self.logger.info(f'Module {self.module_name} Launched')
def compute(self, message): def compute(self, message):

View file

@ -12,7 +12,6 @@ import uuid
import subprocess import subprocess
from flask import escape from flask import escape
from pubsublogger import publisher
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
@ -47,11 +46,6 @@ WEBSOCKETS_CLOSE_CODES = {
1015: 'TLS Handshake', 1015: 'TLS Handshake',
} }
#### LOGS ####
# redis_logger = publisher
# redis_logger.port = 6380
# redis_logger.channel = 'Sync'
def get_websockets_close_message(code): def get_websockets_close_message(code):
if code in WEBSOCKETS_CLOSE_CODES: if code in WEBSOCKETS_CLOSE_CODES:
msg = f'{code} {WEBSOCKETS_CLOSE_CODES[code]}' msg = f'{code} {WEBSOCKETS_CLOSE_CODES[code]}'

View file

@ -4,10 +4,10 @@
import argparse import argparse
import json import json
import os import os
import logging.config
import sys import sys
import time import time
import traceback import traceback
from pubsublogger import publisher
from urllib.parse import urljoin from urllib.parse import urljoin
import asyncio import asyncio
@ -19,9 +19,15 @@ sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
# Import Project packages # Import Project packages
################################## ##################################
from lib import ail_logger
from core import ail_2_ail from core import ail_2_ail
from lib.ConfigLoader import ConfigLoader from lib.ConfigLoader import ConfigLoader
#### LOGS ####
logging.config.dictConfig(ail_logger.get_config(name='syncs'))
logger = logging.getLogger()
config_loader = ConfigLoader() config_loader = ConfigLoader()
local_addr = config_loader.get_config_str('AIL_2_AIL', 'local_addr') local_addr = config_loader.get_config_str('AIL_2_AIL', 'local_addr')
if not local_addr or local_addr == None: if not local_addr or local_addr == None:
@ -30,13 +36,6 @@ else:
local_addr = (local_addr, 0) local_addr = (local_addr, 0)
config_loader = None config_loader = None
#### LOGS ####
redis_logger = publisher
redis_logger.port = 6380
redis_logger.channel = 'Sync'
##-- LOGS --##
#################################################################### ####################################################################
class AIL2AILClient(object): class AIL2AILClient(object):
@ -154,29 +153,29 @@ async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None, client_id=No
error_message = str(e) error_message = str(e)
if error_message: if error_message:
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.warning(f'{ail_uuid}: {error_message}') logger.warning(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except websockets.exceptions.ConnectionClosedError as e: except websockets.exceptions.ConnectionClosedError as e:
error_message = ail_2_ail.get_websockets_close_message(e.code) error_message = ail_2_ail.get_websockets_close_message(e.code)
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.info(f'{ail_uuid}: {error_message}') logger.info(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except websockets.exceptions.InvalidURI as e: except websockets.exceptions.InvalidURI as e:
error_message = f'Invalid AIL url: {e.uri}' error_message = f'Invalid AIL url: {e.uri}'
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.warning(f'{ail_uuid}: {error_message}') logger.warning(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except ConnectionError as e: except ConnectionError as e:
error_message = str(e) error_message = str(e)
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.info(f'{ail_uuid}: {error_message}') logger.info(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
# OSError: Multiple exceptions # OSError: Multiple exceptions
except OSError as e: # # TODO: check if we need to check if is connection error except OSError as e: # # TODO: check if we need to check if is connection error
error_message = str(e) error_message = str(e)
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.info(f'{ail_uuid}: {error_message}') logger.info(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
except websockets.exceptions.ConnectionClosedOK as e: except websockets.exceptions.ConnectionClosedOK as e:
print('connection closed') print('connection closed')
@ -186,7 +185,7 @@ async def ail_to_ail_client(ail_uuid, sync_mode, api, ail_key=None, client_id=No
trace = str(trace) trace = str(trace)
error_message = f'{trace}\n{str(err)}' error_message = f'{trace}\n{str(err)}'
sys.stderr.write(error_message) sys.stderr.write(error_message)
redis_logger.critical(f'{ail_uuid}: {error_message}') logger.critical(f'{ail_uuid}: {error_message}')
ail_2_ail.save_ail_server_error(ail_uuid, error_message) ail_2_ail.save_ail_server_error(ail_uuid, error_message)
ail_2_ail.delete_sync_client_cache(client_id) ail_2_ail.delete_sync_client_cache(client_id)

View file

@ -3,6 +3,7 @@
import json import json
import os import os
import logging.config
import sys import sys
import uuid import uuid
@ -15,21 +16,20 @@ sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
# Import Project packages # Import Project packages
################################## ##################################
from pubsublogger import publisher from lib import ail_logger
from core import ail_2_ail from core import ail_2_ail
from lib.ConfigLoader import ConfigLoader from lib.ConfigLoader import ConfigLoader
logging.config.dictConfig(ail_logger.get_config(name='syncs'))
logger = logging.getLogger()
config_loader = ConfigLoader() config_loader = ConfigLoader()
host = config_loader.get_config_str('AIL_2_AIL', 'server_host') host = config_loader.get_config_str('AIL_2_AIL', 'server_host')
port = config_loader.get_config_int('AIL_2_AIL', 'server_port') port = config_loader.get_config_int('AIL_2_AIL', 'server_port')
config_loader = None config_loader = None
# # TODO: refactor logging
#### LOGS ####
redis_logger = publisher
redis_logger.port = 6380
redis_logger.channel = 'Sync'
############################# #############################
CONNECTED_CLIENTS = {} CONNECTED_CLIENTS = {}
@ -80,7 +80,7 @@ async def server_controller():
connected_clients = CONNECTED_CLIENTS[ail_uuid].copy() connected_clients = CONNECTED_CLIENTS[ail_uuid].copy()
for c_websocket in connected_clients: for c_websocket in connected_clients:
await c_websocket.close(code=1000) await c_websocket.close(code=1000)
redis_logger.info(f'Server Command Connection closed: {ail_uuid}') logger.info(f'Server Command Connection closed: {ail_uuid}')
print(f'Server Command Connection closed: {ail_uuid}') print(f'Server Command Connection closed: {ail_uuid}')
await asyncio.sleep(10) await asyncio.sleep(10)
@ -91,7 +91,7 @@ async def register(websocket):
ail_uuid = websocket.ail_uuid ail_uuid = websocket.ail_uuid
remote_address = websocket.remote_address remote_address = websocket.remote_address
sync_mode = websocket.sync_mode sync_mode = websocket.sync_mode
redis_logger.info(f'Client Connected: {ail_uuid} {remote_address}') logger.info(f'Client Connected: {ail_uuid} {remote_address}')
print(f'Client Connected: {ail_uuid} {remote_address}') print(f'Client Connected: {ail_uuid} {remote_address}')
if not ail_uuid in CONNECTED_CLIENTS: if not ail_uuid in CONNECTED_CLIENTS:
@ -201,7 +201,7 @@ async def ail_to_ail_serv(websocket, path):
if sync_mode == 'pull': if sync_mode == 'pull':
await pull(websocket, websocket.ail_uuid) await pull(websocket, websocket.ail_uuid)
await websocket.close() await websocket.close()
redis_logger.info(f'Connection closed: {ail_uuid} {remote_address}') logger.info(f'Connection closed: {ail_uuid} {remote_address}')
print(f'Connection closed: {ail_uuid} {remote_address}') print(f'Connection closed: {ail_uuid} {remote_address}')
elif sync_mode == 'push': elif sync_mode == 'push':
@ -210,7 +210,7 @@ async def ail_to_ail_serv(websocket, path):
elif sync_mode == 'api': elif sync_mode == 'api':
await api(websocket, websocket.ail_uuid, path['api']) await api(websocket, websocket.ail_uuid, path['api'])
await websocket.close() await websocket.close()
redis_logger.info(f'Connection closed: {ail_uuid} {remote_address}') logger.info(f'Connection closed: {ail_uuid} {remote_address}')
print(f'Connection closed: {ail_uuid} {remote_address}') print(f'Connection closed: {ail_uuid} {remote_address}')
finally: finally:
@ -234,12 +234,12 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
# API TOKEN # API TOKEN
api_key = request_headers.get('Authorization', '') api_key = request_headers.get('Authorization', '')
if api_key is None: if api_key is None:
redis_logger.warning(f'Missing token: {self.remote_address}') logger.warning(f'Missing token: {self.remote_address}')
print(f'Missing token: {self.remote_address}') print(f'Missing token: {self.remote_address}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Missing token\n" return http.HTTPStatus.UNAUTHORIZED, [], b"Missing token\n"
if not ail_2_ail.is_allowed_ail_instance_key(api_key): if not ail_2_ail.is_allowed_ail_instance_key(api_key):
redis_logger.warning(f'Invalid token: {self.remote_address}') logger.warning(f'Invalid token: {self.remote_address}')
print(f'Invalid token: {self.remote_address}') print(f'Invalid token: {self.remote_address}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n" return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
@ -247,20 +247,20 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
try: try:
dict_path = unpack_path(path) dict_path = unpack_path(path)
except Exception as e: except Exception as e:
redis_logger.warning(f'Invalid path: {self.remote_address}') logger.warning(f'Invalid path: {self.remote_address}')
print(f'Invalid path: {self.remote_address}') print(f'Invalid path: {self.remote_address}')
return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n" return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n"
ail_uuid = ail_2_ail.get_ail_instance_by_key(api_key) ail_uuid = ail_2_ail.get_ail_instance_by_key(api_key)
if ail_uuid != dict_path['ail_uuid']: if ail_uuid != dict_path['ail_uuid']:
redis_logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}') logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}')
print(f'Invalid token: {self.remote_address} {ail_uuid}') print(f'Invalid token: {self.remote_address} {ail_uuid}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n" return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
if not api_key != ail_2_ail.get_ail_instance_key(api_key): if not api_key != ail_2_ail.get_ail_instance_key(api_key):
redis_logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}') logger.warning(f'Invalid token: {self.remote_address} {ail_uuid}')
print(f'Invalid token: {self.remote_address} {ail_uuid}') print(f'Invalid token: {self.remote_address} {ail_uuid}')
return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n" return http.HTTPStatus.UNAUTHORIZED, [], b"Invalid token\n"
@ -288,7 +288,7 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
# SYNC MODE # SYNC MODE
if not ail_2_ail.is_ail_instance_sync_enabled(self.ail_uuid, sync_mode=self.sync_mode): if not ail_2_ail.is_ail_instance_sync_enabled(self.ail_uuid, sync_mode=self.sync_mode):
sync_mode = self.sync_mode sync_mode = self.sync_mode
redis_logger.warning(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}') logger.warning(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}')
print(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}') print(f'SYNC mode disabled: {self.remote_address} {ail_uuid} {sync_mode}')
return http.HTTPStatus.FORBIDDEN, [], b"SYNC mode disabled\n" return http.HTTPStatus.FORBIDDEN, [], b"SYNC mode disabled\n"
@ -298,7 +298,7 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
else: else:
print(f'Invalid path: {self.remote_address}') print(f'Invalid path: {self.remote_address}')
redis_logger.info(f'Invalid path: {self.remote_address}') logger.info(f'Invalid path: {self.remote_address}')
return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n" return http.HTTPStatus.BAD_REQUEST, [], b"Invalid path\n"
########################################### ###########################################
@ -310,7 +310,7 @@ class AIL_2_AIL_Protocol(websockets.WebSocketServerProtocol):
if __name__ == '__main__': if __name__ == '__main__':
print('Launching Server...') print('Launching Server...')
redis_logger.info('Launching Server...') logger.info('Launching Server...')
ail_2_ail.clear_server_connected_clients() ail_2_ail.clear_server_connected_clients()
@ -321,7 +321,7 @@ if __name__ == '__main__':
start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol, max_size=None) start_server = websockets.serve(ail_to_ail_serv, host, port, ssl=ssl_context, create_protocol=AIL_2_AIL_Protocol, max_size=None)
print(f'Server Launched: wss://{host}:{port}') print(f'Server Launched: wss://{host}:{port}')
redis_logger.info(f'Server Launched: wss://{host}:{port}') logger.info(f'Server Launched: wss://{host}:{port}')
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
# server command # server command

View file

@ -2,6 +2,7 @@
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os import os
import logging.config
import sys import sys
import time import time
@ -12,17 +13,21 @@ sys.path.append(os.environ['AIL_BIN'])
# Import Project packages # Import Project packages
################################## ##################################
from modules.abstract_module import AbstractModule from modules.abstract_module import AbstractModule
from lib import ail_logger
from lib import crawlers from lib import crawlers
from lib.ConfigLoader import ConfigLoader from lib.ConfigLoader import ConfigLoader
from lib.objects.Domains import Domain from lib.objects.Domains import Domain
from lib.objects.Items import Item from lib.objects.Items import Item
from lib.objects import Screenshots from lib.objects import Screenshots
logging.config.dictConfig(ail_logger.get_config(name='crawlers'))
class Crawler(AbstractModule): class Crawler(AbstractModule):
def __init__(self): def __init__(self):
super(Crawler, self, ).__init__(logger_channel='Crawler') super(Crawler, self, ).__init__()
self.logger = logging.getLogger(f'{self.__class__.__name__}')
# Waiting time in seconds between to message processed # Waiting time in seconds between to message processed
self.pending_seconds = 1 self.pending_seconds = 1
@ -58,7 +63,7 @@ class Crawler(AbstractModule):
self.placeholder_screenshots = {'27e14ace10b0f96acd2bd919aaa98a964597532c35b6409dff6cc8eec8214748'} self.placeholder_screenshots = {'27e14ace10b0f96acd2bd919aaa98a964597532c35b6409dff6cc8eec8214748'}
# Send module state to logs # Send module state to logs
self.redis_logger.info('Crawler initialized') self.logger.info('Crawler initialized')
def refresh_lacus_status(self): def refresh_lacus_status(self):
try: try:
@ -209,7 +214,7 @@ class Crawler(AbstractModule):
print(entries.keys()) print(entries.keys())
if 'error' in entries: if 'error' in entries:
# TODO IMPROVE ERROR MESSAGE # TODO IMPROVE ERROR MESSAGE
self.redis_logger.warning(str(entries['error'])) self.logger.warning(str(entries['error']))
print(entries['error']) print(entries['error'])
if entries.get('html'): if entries.get('html'):
print('retrieved content') print('retrieved content')
@ -221,7 +226,7 @@ class Crawler(AbstractModule):
current_domain = unpacked_last_url['domain'] current_domain = unpacked_last_url['domain']
# REDIRECTION TODO CHECK IF TYPE CHANGE # REDIRECTION TODO CHECK IF TYPE CHANGE
if current_domain != self.domain.id and not self.root_item: if current_domain != self.domain.id and not self.root_item:
self.redis_logger.warning(f'External redirection {self.domain.id} -> {current_domain}') self.logger.warning(f'External redirection {self.domain.id} -> {current_domain}')
print(f'External redirection {self.domain.id} -> {current_domain}') print(f'External redirection {self.domain.id} -> {current_domain}')
if not self.root_item: if not self.root_item:
self.domain = Domain(current_domain) self.domain = Domain(current_domain)

View file

@ -80,7 +80,7 @@ class MailExporter(AbstractExporter, ABC):
return smtp_server return smtp_server
# except Exception as err: # except Exception as err:
# traceback.print_tb(err.__traceback__) # traceback.print_tb(err.__traceback__)
# publisher.warning(err) # logger.warning(err)
def _export(self, recipient, subject, body): def _export(self, recipient, subject, body):
mime_msg = MIMEMultipart() mime_msg = MIMEMultipart()
@ -95,7 +95,7 @@ class MailExporter(AbstractExporter, ABC):
smtp_client.quit() smtp_client.quit()
# except Exception as err: # except Exception as err:
# traceback.print_tb(err.__traceback__) # traceback.print_tb(err.__traceback__)
# publisher.warning(err) # logger.warning(err)
print(f'Send notification: {subject} to {recipient}') print(f'Send notification: {subject} to {recipient}')
class MailExporterTracker(MailExporter): class MailExporterTracker(MailExporter):

33
bin/lib/ail_logger.py Executable file
View file

@ -0,0 +1,33 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import json
import sys
import logging
sys.path.append(os.environ['AIL_BIN'])
##################################
# Import Project packages
##################################
from lib.ConfigLoader import ConfigLoader
config_loader = ConfigLoader()
r_db = config_loader.get_db_conn("Kvrocks_DB")
config_loader = None
LOGGING_CONFIG = os.path.join(os.environ['AIL_HOME'], 'configs', 'logging.json')
def get_config(name=None):
if not name:
name = 'ail_warning.log'
else:
name = f'{name}.log'
with open(LOGGING_CONFIG, 'r') as f:
config = json.load(f)
config['handlers']['file']['filename'] = os.path.join(os.environ['AIL_HOME'], 'logs', name)
return config
if __name__ == '__main__':
pass

View file

@ -116,15 +116,20 @@ class Decoded(AbstractDaterangeObject):
def get_filepath(self, mimetype=None): def get_filepath(self, mimetype=None):
return os.path.join(os.environ['AIL_HOME'], self.get_rel_path(mimetype=mimetype)) return os.path.join(os.environ['AIL_HOME'], self.get_rel_path(mimetype=mimetype))
def get_content(self, mimetype=None, r_str=False): def get_content(self, mimetype=None, r_type='str'):
filepath = self.get_filepath(mimetype=mimetype) filepath = self.get_filepath(mimetype=mimetype)
if r_str: if r_type == 'str':
with open(filepath, 'r') as f: with open(filepath, 'r') as f:
content = f.read() content = f.read()
else: return content
elif r_type == 'bytes':
with open(filepath, 'rb') as f:
content = f.read()
return content
elif r_str == 'bytesio':
with open(filepath, 'rb') as f: with open(filepath, 'rb') as f:
content = BytesIO(f.read()) content = BytesIO(f.read())
return content return content
def get_zip_content(self): def get_zip_content(self):
# mimetype = self.get_estimated_type() # mimetype = self.get_estimated_type()

View file

@ -80,19 +80,19 @@ class Item(AbstractObject):
else: else:
return filename return filename
def get_content(self, r_str=True, binary=False): def get_content(self, r_type='str'):
""" """
Returns Item content Returns Item content
""" """
if binary: if r_type == 'str':
return item_basic.get_item_content_binary(self.id)
else:
return item_basic.get_item_content(self.id) return item_basic.get_item_content(self.id)
elif r_type == 'bytes':
return item_basic.get_item_content_binary(self.id)
def get_raw_content(self, decompress=False): def get_raw_content(self, decompress=False):
filepath = self.get_filename() filepath = self.get_filename()
if decompress: if decompress:
raw_content = BytesIO(self.get_content(binary=True)) raw_content = BytesIO(self.get_content(r_type='bytes'))
else: else:
with open(filepath, 'rb') as f: with open(filepath, 'rb') as f:
raw_content = BytesIO(f.read()) raw_content = BytesIO(f.read())

View file

@ -6,6 +6,7 @@ Regex Helper
""" """
import os import os
import logging.config
import re import re
import sys import sys
import uuid import uuid
@ -13,24 +14,24 @@ import uuid
from multiprocessing import Process as Proc from multiprocessing import Process as Proc
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
from pubsublogger import publisher
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
# Import Project packages # Import Project packages
################################## ##################################
from lib import ail_logger
from lib import ConfigLoader from lib import ConfigLoader
# from lib import Statistics # from lib import Statistics
logging.config.dictConfig(ail_logger.get_config())
logger = logging.getLogger()
## LOAD CONFIG ## ## LOAD CONFIG ##
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_serv_cache = config_loader.get_redis_conn("Redis_Cache") r_serv_cache = config_loader.get_redis_conn("Redis_Cache")
config_loader = None config_loader = None
## -- ## ## -- ##
publisher.port = 6380
publisher.channel = "Script"
def generate_redis_cache_key(module_name): def generate_redis_cache_key(module_name):
new_uuid = str(uuid.uuid4()) new_uuid = str(uuid.uuid4())
return f'{module_name}_extracted:{new_uuid}' return f'{module_name}_extracted:{new_uuid}'
@ -65,7 +66,7 @@ def regex_findall(module_name, redis_key, regex, item_id, item_content, max_time
# Statistics.incr_module_timeout_statistic(module_name) # Statistics.incr_module_timeout_statistic(module_name)
err_mess = f"{module_name}: processing timeout: {item_id}" err_mess = f"{module_name}: processing timeout: {item_id}"
print(err_mess) print(err_mess)
publisher.info(err_mess) logger.info(err_mess)
return [] return []
else: else:
if r_set: if r_set:
@ -99,7 +100,7 @@ def regex_finditer(r_key, regex, item_id, content, max_time=30):
# Statistics.incr_module_timeout_statistic(r_key) # Statistics.incr_module_timeout_statistic(r_key)
err_mess = f"{r_key}: processing timeout: {item_id}" err_mess = f"{r_key}: processing timeout: {item_id}"
print(err_mess) print(err_mess)
publisher.info(err_mess) logger.info(err_mess)
return [] return []
else: else:
res = r_serv_cache.lrange(r_key, 0, -1) res = r_serv_cache.lrange(r_key, 0, -1)
@ -130,7 +131,7 @@ def regex_search(r_key, regex, item_id, content, max_time=30):
# Statistics.incr_module_timeout_statistic(r_key) # Statistics.incr_module_timeout_statistic(r_key)
err_mess = f"{r_key}: processing timeout: {item_id}" err_mess = f"{r_key}: processing timeout: {item_id}"
print(err_mess) print(err_mess)
publisher.info(err_mess) logger.info(err_mess)
return False return False
else: else:
if r_serv_cache.exists(r_key): if r_serv_cache.exists(r_key):

View file

@ -44,7 +44,7 @@ class ApiKey(AbstractModule):
re.compile(self.re_google_api_key) re.compile(self.re_google_api_key)
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def compute(self, message, r_result=False): def compute(self, message, r_result=False):
item_id, score = message.split() item_id, score = message.split()

View file

@ -148,18 +148,18 @@ class Credential(AbstractModule):
discovered_sites = ', '.join(all_sites) discovered_sites = ', '.join(all_sites)
print(f"=======> Probably on : {discovered_sites}") print(f"=======> Probably on : {discovered_sites}")
date = datetime.now().strftime("%Y%m") # date = datetime.now().strftime("%Y%m")
nb_tlds = {} # nb_tlds = {}
for cred in all_credentials: # for cred in all_credentials:
maildomains = re.findall(r"@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0] # maildomains = re.findall(r"@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,20}", cred.lower())[0]
self.faup.decode(maildomains) # self.faup.decode(maildomains)
tld = self.faup.get()['tld'] # tld = self.faup.get()['tld']
# # TODO: # FIXME: remove me # # # TODO: # FIXME: remove me
try: # try:
tld = tld.decode() # tld = tld.decode()
except: # except:
pass # pass
nb_tlds[tld] = nb_tlds.get(tld, 0) + 1 # nb_tlds[tld] = nb_tlds.get(tld, 0) + 1
# for tld in nb_tlds: # for tld in nb_tlds:
# Statistics.add_module_tld_stats_by_date('credential', date, tld, nb_tlds[tld]) # Statistics.add_module_tld_stats_by_date('credential', date, tld, nb_tlds[tld])
else: else:

View file

@ -51,7 +51,7 @@ class CreditCards(AbstractModule):
self.pending_seconds = 10 self.pending_seconds = 10
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def get_valid_card(self, card): def get_valid_card(self, card):
clean_card = re.sub(self.re_clean_card, '', card) clean_card = re.sub(self.re_clean_card, '', card)
@ -74,7 +74,7 @@ class CreditCards(AbstractModule):
all_cards = self.regex_findall(self.regex, item.id, content) all_cards = self.regex_findall(self.regex, item.id, content)
if len(all_cards) > 0: if len(all_cards) > 0:
# self.redis_logger.debug(f'All matching {all_cards}') # self.logger.debug(f'All matching {all_cards}')
creditcard_set = set() creditcard_set = set()
for card in all_cards: for card in all_cards:
print(card) print(card)

View file

@ -111,7 +111,7 @@ class Cryptocurrencies(AbstractModule, ABC):
self.pending_seconds = 1 self.pending_seconds = 1
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, message): def compute(self, message):
item = Item(message) item = Item(message)

View file

@ -41,7 +41,7 @@ class CveModule(AbstractModule):
self.pending_seconds = 1 self.pending_seconds = 1
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, message): def compute(self, message):

View file

@ -84,7 +84,7 @@ class Decoder(AbstractModule):
self.tracker_yara = Tracker_Yara(queue=False) self.tracker_yara = Tracker_Yara(queue=False)
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, message): def compute(self, message):
@ -122,13 +122,11 @@ class Decoder(AbstractModule):
mimetype = decoded.get_mimetype() mimetype = decoded.get_mimetype()
decoded.add(dname, date, item.id, mimetype=mimetype) decoded.add(dname, date, item.id, mimetype=mimetype)
# DEBUG # new_decodeds.append(decoded.id)
self.redis_logger.debug(f'{item.id} : {dname} - {decoded.id} - {mimetype}') self.logger.info(f'{item.id} : {dname} - {decoded.id} - {mimetype}')
print(f'{item.id} : {dname} - {decoded.id} - {mimetype}')
if find: if find:
self.redis_logger.info(f'{item.id} - {dname}') self.logger.info(f'{item.id} - {dname}')
print(f'{item.id} - {dname}')
# Send to Tags # Send to Tags
msg = f'infoleak:automatic-detection="{dname}";{item.id}' msg = f'infoleak:automatic-detection="{dname}";{item.id}'
@ -137,8 +135,11 @@ class Decoder(AbstractModule):
#################### ####################
# TRACKERS DECODED # TRACKERS DECODED
for decoded_id in new_decodeds: for decoded_id in new_decodeds:
self.tracker_term.compute(decoded_id, obj_type='decoded') try:
self.tracker_regex.compute(decoded_id, obj_type='decoded') self.tracker_term.compute(decoded_id, obj_type='decoded')
self.tracker_regex.compute(decoded_id, obj_type='decoded')
except UnicodeDecodeError:
pass
self.tracker_yara.compute(decoded_id, obj_type='decoded') self.tracker_yara.compute(decoded_id, obj_type='decoded')

View file

@ -48,7 +48,7 @@ class DomClassifier(AbstractModule):
self.cc_tld = config_loader.get_config_str("DomClassifier", "cc_tld") self.cc_tld = config_loader.get_config_str("DomClassifier", "cc_tld")
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
def compute(self, message, r_result=False): def compute(self, message, r_result=False):
host, item_id = message.split() host, item_id = message.split()
@ -62,7 +62,7 @@ class DomClassifier(AbstractModule):
self.c.text(rawtext=host) self.c.text(rawtext=host)
print(self.c.domain) print(self.c.domain)
self.c.validdomain(passive_dns=True, extended=False) self.c.validdomain(passive_dns=True, extended=False)
# self.redis_logger.debug(self.c.vdomain) # self.logger.debug(self.c.vdomain)
print(self.c.vdomain) print(self.c.vdomain)
print() print()

View file

@ -47,7 +47,7 @@ class Duplicates(AbstractModule):
"tlsh": {"threshold": THRESHOLD_TLSH} "tlsh": {"threshold": THRESHOLD_TLSH}
} }
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
def compute(self, message): def compute(self, message):
# IOError: "CRC Checksum Failed on : {id}" # IOError: "CRC Checksum Failed on : {id}"
@ -65,7 +65,7 @@ class Duplicates(AbstractModule):
x = time.time() x = time.time()
# Get Hashs # Get Hashs
content = item.get_content(binary=True) content = item.get_content(r_type='bytes')
self.algos['ssdeep']['hash'] = Duplicate.get_ssdeep_hash(content) self.algos['ssdeep']['hash'] = Duplicate.get_ssdeep_hash(content)
self.algos['tlsh']['hash'] = Duplicate.get_tlsh_hash(content) self.algos['tlsh']['hash'] = Duplicate.get_tlsh_hash(content)

View file

@ -67,8 +67,8 @@ class Global(AbstractModule):
self.pending_seconds = 0.5 self.pending_seconds = 0.5
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
# Send module state to logs # Send module state to logs # TODO MOVE ME IN INIT SCRIPT
self.redis_logger.critical(f"AIL {get_ail_uuid()} started") self.redis_logger.critical(f"AIL {get_ail_uuid()} started")
def computeNone(self): def computeNone(self):
@ -103,7 +103,7 @@ class Global(AbstractModule):
# Incorrect filename # Incorrect filename
if not os.path.commonprefix([filename, self.ITEMS_FOLDER]) == self.ITEMS_FOLDER: if not os.path.commonprefix([filename, self.ITEMS_FOLDER]) == self.ITEMS_FOLDER:
self.redis_logger.warning(f'Global; Path traversal detected {filename}') self.logger.warning(f'Global; Path traversal detected {filename}')
print(f'Global; Path traversal detected {filename}') print(f'Global; Path traversal detected {filename}')
else: else:
@ -146,7 +146,7 @@ class Global(AbstractModule):
return item_id return item_id
else: else:
self.redis_logger.debug(f"Empty Item: {message} not processed") self.logger.debug(f"Empty Item: {message} not processed")
print(f"Empty Item: {message} not processed") print(f"Empty Item: {message} not processed")
def check_filename(self, filename, new_file_content): def check_filename(self, filename, new_file_content):
@ -157,7 +157,7 @@ class Global(AbstractModule):
# check if file exist # check if file exist
if os.path.isfile(filename): if os.path.isfile(filename):
self.redis_logger.warning(f'File already exist {filename}') self.logger.warning(f'File already exist {filename}')
print(f'File already exist {filename}') print(f'File already exist {filename}')
# Check that file already exists but content differs # Check that file already exists but content differs
@ -174,17 +174,17 @@ class Global(AbstractModule):
filename = f'{filename[:-3]}_{new_file_md5}.gz' filename = f'{filename[:-3]}_{new_file_md5}.gz'
else: else:
filename = f'{filename}_{new_file_md5}' filename = f'{filename}_{new_file_md5}'
self.redis_logger.debug(f'new file to check: {filename}') self.logger.debug(f'new file to check: {filename}')
if os.path.isfile(filename): if os.path.isfile(filename):
# Ignore duplicate # Ignore duplicate
self.redis_logger.debug(f'ignore duplicated file {filename}') self.logger.debug(f'ignore duplicated file {filename}')
print(f'ignore duplicated file {filename}') print(f'ignore duplicated file {filename}')
filename = None filename = None
else: else:
# Ignore duplicate checksum equals # Ignore duplicate checksum equals
self.redis_logger.debug(f'ignore duplicated file {filename}') self.logger.debug(f'ignore duplicated file {filename}')
print(f'ignore duplicated file {filename}') print(f'ignore duplicated file {filename}')
filename = None filename = None
@ -205,13 +205,13 @@ class Global(AbstractModule):
with gzip.open(filename, 'rb') as f: with gzip.open(filename, 'rb') as f:
curr_file_content = f.read() curr_file_content = f.read()
except EOFError: except EOFError:
self.redis_logger.warning(f'Global; Incomplete file: {filename}') self.logger.warning(f'Global; Incomplete file: {filename}')
print(f'Global; Incomplete file: {filename}') print(f'Global; Incomplete file: {filename}')
# save daily stats # save daily stats
# self.r_stats.zincrby('module:Global:incomplete_file', 1, datetime.datetime.now().strftime('%Y%m%d')) # self.r_stats.zincrby('module:Global:incomplete_file', 1, datetime.datetime.now().strftime('%Y%m%d'))
# Statistics. # Statistics.
except OSError: except OSError:
self.redis_logger.warning(f'Global; Not a gzipped file: {filename}') self.logger.warning(f'Global; Not a gzipped file: {filename}')
print(f'Global; Not a gzipped file: {filename}') print(f'Global; Not a gzipped file: {filename}')
# save daily stats # save daily stats
# self.r_stats.zincrby('module:Global:invalid_file', 1, datetime.datetime.now().strftime('%Y%m%d')) # self.r_stats.zincrby('module:Global:invalid_file', 1, datetime.datetime.now().strftime('%Y%m%d'))
@ -229,7 +229,7 @@ class Global(AbstractModule):
with gzip.GzipFile(fileobj=in_, mode='rb') as fo: with gzip.GzipFile(fileobj=in_, mode='rb') as fo:
gunzipped_bytes_obj = fo.read() gunzipped_bytes_obj = fo.read()
except Exception as e: except Exception as e:
self.redis_logger.warning(f'Global; Invalid Gzip file: {filename}, {e}') self.logger.warning(f'Global; Invalid Gzip file: {filename}, {e}')
print(f'Global; Invalid Gzip file: {filename}, {e}') print(f'Global; Invalid Gzip file: {filename}, {e}')
return gunzipped_bytes_obj return gunzipped_bytes_obj

View file

@ -46,7 +46,7 @@ class Hosts(AbstractModule):
self.host_regex = r'\b([a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)\b' self.host_regex = r'\b([a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)\b'
re.compile(self.host_regex) re.compile(self.host_regex)
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
def compute(self, message): def compute(self, message):
item = Item(message) item = Item(message)

View file

@ -63,12 +63,11 @@ class Indexer(AbstractModule):
# create the index register if not present # create the index register if not present
time_now = int(time.time()) time_now = int(time.time())
if not os.path.isfile(self.indexRegister_path): # index are not organised if not os.path.isfile(self.indexRegister_path): # index are not organised
self.redis_logger.debug("Indexes are not organized") self.logger.debug("Indexes are not organized")
self.redis_logger.debug( self.logger.debug("moving all files in folder 'old_index' ")
"moving all files in folder 'old_index' ")
# move all files to old_index folder # move all files to old_index folder
self.move_index_into_old_index_folder() self.move_index_into_old_index_folder()
self.redis_logger.debug("Creating new index") self.logger.debug("Creating new index")
# create all_index.txt # create all_index.txt
with open(self.indexRegister_path, 'w') as f: with open(self.indexRegister_path, 'w') as f:
f.write(str(time_now)) f.write(str(time_now))
@ -100,7 +99,7 @@ class Indexer(AbstractModule):
item_id = item.get_id() item_id = item.get_id()
item_content = item.get_content() item_content = item.get_content()
self.redis_logger.debug(f"Indexing - {self.indexname}: {docpath}") self.logger.debug(f"Indexing - {self.indexname}: {docpath}")
print(f"Indexing - {self.indexname}: {docpath}") print(f"Indexing - {self.indexname}: {docpath}")
try: try:
@ -109,7 +108,7 @@ class Indexer(AbstractModule):
self.last_refresh = time.time() self.last_refresh = time.time()
if self.check_index_size() >= self.INDEX_SIZE_THRESHOLD*(1000*1000): if self.check_index_size() >= self.INDEX_SIZE_THRESHOLD*(1000*1000):
timestamp = int(time.time()) timestamp = int(time.time())
self.redis_logger.debug(f"Creating new index {timestamp}") self.logger.debug(f"Creating new index {timestamp}")
print(f"Creating new index {timestamp}") print(f"Creating new index {timestamp}")
self.indexpath = join(self.baseindexpath, str(timestamp)) self.indexpath = join(self.baseindexpath, str(timestamp))
self.indexname = str(timestamp) self.indexname = str(timestamp)
@ -129,9 +128,9 @@ class Indexer(AbstractModule):
indexwriter.commit() indexwriter.commit()
except IOError: except IOError:
self.redis_logger.debug(f"CRC Checksum Failed on: {item_id}") self.logger.debug(f"CRC Checksum Failed on: {item_id}")
print(f"CRC Checksum Failed on: {item_id}") print(f"CRC Checksum Failed on: {item_id}")
self.redis_logger.error(f'Duplicate;{item.get_source()};{item.get_date()};{item.get_basename()};CRC Checksum Failed') self.logger.error(f'{item_id} CRC Checksum Failed')
def check_index_size(self): def check_index_size(self):
""" """

View file

@ -170,7 +170,7 @@ class Keys(AbstractModule):
# if find : # if find :
# # Send to duplicate # # Send to duplicate
# self.add_message_to_queue(item.get_id(), 'Duplicate') # self.add_message_to_queue(item.get_id(), 'Duplicate')
# self.redis_logger.debug(f'{item.get_id()} has key(s)') # self.logger.debug(f'{item.get_id()} has key(s)')
# print(f'{item.get_id()} has key(s)') # print(f'{item.get_id()} has key(s)')

View file

@ -22,7 +22,7 @@ class Languages(AbstractModule):
super(Languages, self).__init__() super(Languages, self).__init__()
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, message): def compute(self, message):
item = Item(message) item = Item(message)

View file

@ -71,7 +71,7 @@ class Mixer(AbstractModule):
self.feeders_processed = {} self.feeders_processed = {}
self.feeders_duplicate = {} self.feeders_duplicate = {}
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
# TODO Save stats in cache # TODO Save stats in cache
# def get_feeders(self): # def get_feeders(self):
@ -154,7 +154,7 @@ class Mixer(AbstractModule):
feeder_name, item_id, gzip64encoded = splitted feeder_name, item_id, gzip64encoded = splitted
else: else:
print('Invalid message: not processed') print('Invalid message: not processed')
self.redis_logger.debug('Invalid Item: {message} not processed') self.logger.debug(f'Invalid Item: {item_id} not processed')
return None return None
# remove absolute path # remove absolute path

View file

@ -49,7 +49,7 @@ class Onion(AbstractModule):
re.compile(self.onion_regex) re.compile(self.onion_regex)
# re.compile(self.i2p_regex) # re.compile(self.i2p_regex)
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
# TEMP var: SAVE I2P Domain (future I2P crawler) # TEMP var: SAVE I2P Domain (future I2P crawler)
# self.save_i2p = config_loader.get_config_boolean("Onion", "save_i2p") # self.save_i2p = config_loader.get_config_boolean("Onion", "save_i2p")

View file

@ -69,7 +69,7 @@ class PgpDump(AbstractModule):
self.symmetrically_encrypted = False self.symmetrically_encrypted = False
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def remove_html(self, pgp_block): def remove_html(self, pgp_block):
try: try:
@ -130,7 +130,7 @@ class PgpDump(AbstractModule):
try: try:
output = output.decode() output = output.decode()
except UnicodeDecodeError: except UnicodeDecodeError:
self.redis_logger.error(f'Error PgpDump UnicodeDecodeError: {self.item_id}') self.logger.error(f'Error PgpDump UnicodeDecodeError: {self.item_id}')
output = '' output = ''
return output return output

View file

@ -50,7 +50,7 @@ class Phone(AbstractModule):
# If the list is greater than 4, we consider the Item may contain a list of phone numbers # If the list is greater than 4, we consider the Item may contain a list of phone numbers
if len(results) > 4: if len(results) > 4:
self.redis_logger.debug(results) self.logger.debug(results)
self.redis_logger.warning(f'{item.get_id()} contains PID (phone numbers)') self.redis_logger.warning(f'{item.get_id()} contains PID (phone numbers)')
msg = f'infoleak:automatic-detection="phone-number";{item.get_id()}' msg = f'infoleak:automatic-detection="phone-number";{item.get_id()}'

View file

@ -41,7 +41,7 @@ class SQLInjectionDetection(AbstractModule):
self.faup = Faup() self.faup = Faup()
self.redis_logger.info(f"Module: {self.module_name} Launched") self.logger.info(f"Module: {self.module_name} Launched")
def compute(self, message): def compute(self, message):
url, item_id = message.split() url, item_id = message.split()

View file

@ -70,7 +70,7 @@ class SentimentAnalysis(AbstractModule):
self.pending_seconds = 1 self.pending_seconds = 1
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def compute(self, message): def compute(self, message):
# Max time to compute one entry # Max time to compute one entry
@ -78,7 +78,7 @@ class SentimentAnalysis(AbstractModule):
try: try:
self.analyse(message) self.analyse(message)
except TimeoutException: except TimeoutException:
self.redis_logger.debug(f"{message} processing timeout") self.logger.debug(f"{message} processing timeout")
else: else:
signal.alarm(0) signal.alarm(0)
@ -114,7 +114,7 @@ class SentimentAnalysis(AbstractModule):
p_MimeType = "JSON" p_MimeType = "JSON"
if p_MimeType in SentimentAnalysis.accepted_Mime_type: if p_MimeType in SentimentAnalysis.accepted_Mime_type:
self.redis_logger.debug(f'Accepted :{p_MimeType}') self.logger.debug(f'Accepted :{p_MimeType}')
the_date = datetime.date(int(p_date[0:4]), int(p_date[4:6]), int(p_date[6:8])) the_date = datetime.date(int(p_date[0:4]), int(p_date[4:6]), int(p_date[6:8]))
the_time = datetime.datetime.now() the_time = datetime.datetime.now()

View file

@ -60,7 +60,7 @@ class SubmitPaste(AbstractModule):
""" """
Main method of the Module to implement Main method of the Module to implement
""" """
self.redis_logger.debug(f'compute UUID {uuid}') self.logger.debug(f'compute UUID {uuid}')
# get temp value save on disk # get temp value save on disk
ltags = self.r_serv_db.smembers(f'{uuid}:ltags') ltags = self.r_serv_db.smembers(f'{uuid}:ltags')
@ -73,9 +73,9 @@ class SubmitPaste(AbstractModule):
if source in ['crawled', 'tests']: if source in ['crawled', 'tests']:
source = 'submitted' source = 'submitted'
self.redis_logger.debug(f'isfile UUID {isfile}') self.logger.debug(f'isfile UUID {isfile}')
self.redis_logger.debug(f'source UUID {source}') self.logger.debug(f'source UUID {source}')
self.redis_logger.debug(f'paste_content UUID {paste_content}') self.logger.debug(f'paste_content UUID {paste_content}')
# needed if redis is restarted # needed if redis is restarted
self.r_serv_log_submit.set(f'{uuid}:end', 0) self.r_serv_log_submit.set(f'{uuid}:end', 0)
@ -114,15 +114,15 @@ class SubmitPaste(AbstractModule):
if isinstance(uuid, list): if isinstance(uuid, list):
uuid = uuid[0] uuid = uuid[0]
# Module processing with the message from the queue # Module processing with the message from the queue
self.redis_logger.debug(uuid) self.logger.debug(uuid)
self.compute(uuid) self.compute(uuid)
except Exception as err: except Exception as err:
self.redis_logger.error(f'Error in module {self.module_name}: {err}') self.logger.critical(err)
# Remove uuid ref # Remove uuid ref
self.remove_submit_uuid(uuid) self.remove_submit_uuid(uuid)
else: else:
# Wait before next process # Wait before next process
self.redis_logger.debug(f'{self.module_name}, waiting for new message, Idling {self.pending_seconds}s') self.logger.debug(f'{self.module_name}, waiting for new message, Idling {self.pending_seconds}s')
time.sleep(self.pending_seconds) time.sleep(self.pending_seconds)
def _manage_text(self, uuid, paste_content, ltags, ltagsgalaxies, source): def _manage_text(self, uuid, paste_content, ltags, ltagsgalaxies, source):

View file

@ -36,7 +36,7 @@ class Tags(AbstractModule):
self.pending_seconds = 10 self.pending_seconds = 10
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def compute(self, message): def compute(self, message):
# Extract item ID and tag from message # Extract item ID and tag from message

View file

@ -38,7 +38,7 @@ class Telegram(AbstractModule):
self.max_execution_time = 60 self.max_execution_time = 60
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def compute(self, message, r_result=False): def compute(self, message, r_result=False):
item = Item(message) item = Item(message)

View file

@ -402,7 +402,7 @@ class Tools(AbstractModule):
# Waiting time in seconds between to message processed # Waiting time in seconds between to message processed
self.pending_seconds = 10 self.pending_seconds = 10
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def get_tools(self): def get_tools(self):
return TOOLS.keys() return TOOLS.keys()

View file

@ -56,7 +56,7 @@ class Urls(AbstractModule):
")\://(?:[a-zA-Z0-9\.\-]+(?:\:[a-zA-Z0-9\.&%\$\-]+)*@)*(?:(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|(?:[a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(?:[a-zA-Z]{2,15}))(?:\:[0-9]+)*(?:/?(?:[a-zA-Z0-9\.\,\?'\\+&%\$#\=~_\-]+))*)" ")\://(?:[a-zA-Z0-9\.\-]+(?:\:[a-zA-Z0-9\.&%\$\-]+)*@)*(?:(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(?:25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|(?:[a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(?:[a-zA-Z]{2,15}))(?:\:[0-9]+)*(?:/?(?:[a-zA-Z0-9\.\,\?'\\+&%\$#\=~_\-]+))*)"
# Send module state to logs # Send module state to logs
self.redis_logger.info(f"Module {self.module_name} initialized") self.logger.info(f"Module {self.module_name} initialized")
def compute(self, message): def compute(self, message):
""" """
@ -82,7 +82,7 @@ class Urls(AbstractModule):
to_send = f"{url} {item.get_id()}" to_send = f"{url} {item.get_id()}"
print(to_send) print(to_send)
self.add_message_to_queue(to_send, 'Url') self.add_message_to_queue(to_send, 'Url')
self.redis_logger.debug(f"url_parsed: {to_send}") self.logger.debug(f"url_parsed: {to_send}")
if len(l_urls) > 0: if len(l_urls) > 0:
to_print = f'Urls;{item.get_source()};{item.get_date()};{item.get_basename()};' to_print = f'Urls;{item.get_source()};{item.get_date()};{item.get_basename()};'

View file

@ -39,13 +39,13 @@ class Zerobins(AbstractModule):
self.pending_seconds = 10 self.pending_seconds = 10
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
def computeNone(self): def computeNone(self):
""" """
Compute when no message in queue Compute when no message in queue
""" """
self.redis_logger.debug("No message in queue") self.logger.debug("No message in queue")
def compute(self, message): def compute(self, message):
""" """
@ -63,7 +63,7 @@ class Zerobins(AbstractModule):
crawlers.create_task(bin_url, depth=0, har=False, screenshot=False, proxy='force_tor', crawlers.create_task(bin_url, depth=0, har=False, screenshot=False, proxy='force_tor',
parent='manual', priority=60) parent='manual', priority=60)
self.redis_logger.debug("Compute message in queue") self.logger.debug("Compute message in queue")
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -8,6 +8,8 @@ Base Class for AIL Modules
################################## ##################################
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
import os import os
import logging
import logging.config
import sys import sys
import time import time
import traceback import traceback
@ -17,22 +19,27 @@ sys.path.append(os.environ['AIL_BIN'])
# Import Project packages # Import Project packages
################################## ##################################
from pubsublogger import publisher from pubsublogger import publisher
from lib import ail_logger
from lib.ail_queues import AILQueue from lib.ail_queues import AILQueue
from lib import regex_helper from lib import regex_helper
from lib.exceptions import ModuleQueueError from lib.exceptions import ModuleQueueError
logging.config.dictConfig(ail_logger.get_config(name='modules'))
class AbstractModule(ABC): class AbstractModule(ABC):
""" """
Abstract Module class Abstract Module class
""" """
def __init__(self, module_name=None, logger_channel='Script', queue=True): def __init__(self, module_name=None, queue=True):
""" """
Init Module Init Module
module_name: str; set the module name if different from the instance ClassName module_name: str; set the module name if different from the instance ClassName
queue_name: str; set the queue name if different from the instance ClassName queue_name: str; set the queue name if different from the instance ClassName
logger_channel: str; set the logger channel name, 'Script' by default logger_channel: str; set the logger channel name, 'Script' by default
""" """
self.logger = logging.getLogger(f'{self.__class__.__name__}')
# Module name if provided else instance className # Module name if provided else instance className
self.module_name = module_name if module_name else self._module_name() self.module_name = module_name if module_name else self._module_name()
@ -44,14 +51,12 @@ class AbstractModule(ABC):
# Init Redis Logger # Init Redis Logger
self.redis_logger = publisher self.redis_logger = publisher
# Port of the redis instance used by pubsublogger # Port of the redis instance used by pubsublogger
self.redis_logger.port = 6380 self.redis_logger.port = 6380
# Channel name to publish logs # Channel name to publish logs
# # TODO: refactor logging # # TODO: refactor logging
# If provided could be a namespaced channel like script:<ModuleName> # If provided could be a namespaced channel like script:<ModuleName>
self.redis_logger.channel = logger_channel self.redis_logger.channel = 'Script'
# Cache key # Cache key
self.r_cache_key = regex_helper.generate_redis_cache_key(self.module_name) self.r_cache_key = regex_helper.generate_redis_cache_key(self.module_name)
@ -127,14 +132,9 @@ class AbstractModule(ABC):
# LOG ERROR # LOG ERROR
trace = traceback.format_tb(err.__traceback__) trace = traceback.format_tb(err.__traceback__)
trace = ''.join(trace) trace = ''.join(trace)
self.redis_logger.critical(f"Error in module {self.module_name}: {err}") self.logger.critical(f"Error in module {self.module_name}: {__name__} : {err}")
self.redis_logger.critical(f"Module {self.module_name} input message: {message}") self.logger.critical(f"Module {self.module_name} input message: {message}")
self.redis_logger.critical(trace) self.logger.critical(trace)
print()
print(f"ERROR: {err}")
print(f'MESSAGE: {message}')
print('TRACEBACK:')
print(trace)
if isinstance(err, ModuleQueueError): if isinstance(err, ModuleQueueError):
self.queue.error() self.queue.error()
@ -145,7 +145,7 @@ class AbstractModule(ABC):
else: else:
self.computeNone() self.computeNone()
# Wait before next process # Wait before next process
self.redis_logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s") self.logger.debug(f"{self.module_name}, waiting for new message, Idling {self.pending_seconds}s")
time.sleep(self.pending_seconds) time.sleep(self.pending_seconds)
def _module_name(self): def _module_name(self):

View file

@ -4,8 +4,7 @@ import os
import sys import sys
import dns.resolver import dns.resolver
import dns.exception import dns.exception
import logging.config
from pubsublogger import publisher
from datetime import timedelta from datetime import timedelta
@ -13,8 +12,13 @@ sys.path.append(os.environ['AIL_BIN'])
################################## ##################################
# Import Project packages # Import Project packages
################################## ##################################
from lib import ail_logger
from lib import ConfigLoader from lib import ConfigLoader
logging.config.dictConfig(ail_logger.get_config(name='modules'))
logger = logging.getLogger()
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
dns_server = config_loader.get_config_str("DomClassifier", "dns") dns_server = config_loader.get_config_str("DomClassifier", "dns")
config_loader = None config_loader = None
@ -73,24 +77,24 @@ def checking_MX_record(r_serv, MXdomains, addr_dns):
pass pass
except dns.resolver.NoNameservers: except dns.resolver.NoNameservers:
publisher.debug('NoNameserver, No non-broken nameservers are available to answer the query.') logger.debug('NoNameserver, No non-broken nameservers are available to answer the query.')
print('NoNameserver, No non-broken nameservers are available to answer the query.') print('NoNameserver, No non-broken nameservers are available to answer the query.')
except dns.resolver.NoAnswer: except dns.resolver.NoAnswer:
publisher.debug('NoAnswer, The response did not contain an answer to the question.') logger.debug('NoAnswer, The response did not contain an answer to the question.')
print('NoAnswer, The response did not contain an answer to the question.') print('NoAnswer, The response did not contain an answer to the question.')
except dns.name.EmptyLabel: except dns.name.EmptyLabel:
publisher.debug('SyntaxError: EmptyLabel') logger.debug('SyntaxError: EmptyLabel')
print('SyntaxError: EmptyLabel') print('SyntaxError: EmptyLabel')
except dns.resolver.NXDOMAIN: except dns.resolver.NXDOMAIN:
r_serv.setex(MXdomain[1:], 1, timedelta(days=1)) r_serv.setex(MXdomain[1:], 1, timedelta(days=1))
publisher.debug('The query name does not exist.') logger.debug('The query name does not exist.')
print('The query name does not exist.') print('The query name does not exist.')
except dns.name.LabelTooLong: except dns.name.LabelTooLong:
publisher.debug('The Label is too long') logger.debug('The Label is too long')
print('The Label is too long') print('The Label is too long')
except dns.exception.Timeout: except dns.exception.Timeout:
@ -100,7 +104,7 @@ def checking_MX_record(r_serv, MXdomains, addr_dns):
except Exception as e: except Exception as e:
print(e) print(e)
publisher.debug("emails before: {0} after: {1} (valid)".format(num, score)) logger.debug("emails before: {0} after: {1} (valid)".format(num, score))
#return (num, WalidMX) #return (num, WalidMX)
return (num, validMX) return (num, validMX)
@ -133,23 +137,23 @@ def checking_A_record(r_serv, domains_set):
pass pass
except dns.resolver.NoNameservers: except dns.resolver.NoNameservers:
publisher.debug('NoNameserver, No non-broken nameservers are available to answer the query.') logger.debug('NoNameserver, No non-broken nameservers are available to answer the query.')
except dns.resolver.NoAnswer: except dns.resolver.NoAnswer:
publisher.debug('NoAnswer, The response did not contain an answer to the question.') logger.debug('NoAnswer, The response did not contain an answer to the question.')
except dns.name.EmptyLabel: except dns.name.EmptyLabel:
publisher.debug('SyntaxError: EmptyLabel') logger.debug('SyntaxError: EmptyLabel')
except dns.resolver.NXDOMAIN: except dns.resolver.NXDOMAIN:
r_serv.setex(Adomain[1:], 1, timedelta(days=1)) r_serv.setex(Adomain[1:], 1, timedelta(days=1))
publisher.debug('The query name does not exist.') logger.debug('The query name does not exist.')
except dns.name.LabelTooLong: except dns.name.LabelTooLong:
publisher.debug('The Label is too long') logger.debug('The Label is too long')
except Exception as e: except Exception as e:
print(e) print(e)
publisher.debug("URLs before: {0} after: {1} (valid)".format(num, score)) logger.debug("URLs before: {0} after: {1} (valid)".format(num, score))
return (num, WalidA) return (num, WalidA)

View file

@ -34,19 +34,19 @@ class Template(AbstractModule):
self.pending_seconds = 10 self.pending_seconds = 10
# Send module state to logs # Send module state to logs
self.redis_logger.info(f'Module {self.module_name} initialized') self.logger.info(f'Module {self.module_name} initialized')
# def computeNone(self): # def computeNone(self):
# """ # """
# Do something when there is no message in the queue # Do something when there is no message in the queue
# """ # """
# self.redis_logger.debug("No message in queue") # self.logger.debug("No message in queue")
def compute(self, message): def compute(self, message):
""" """
Compute a message in queue / process the message (item_id, ...) Compute a message in queue / process the message (item_id, ...)
""" """
self.redis_logger.debug("Compute message in queue") self.logger.debug("Compute message in queue")
# # if message is an item_id: # # if message is an item_id:
# item = Item(message) # item = Item(message)
# content = item.get_content() # content = item.get_content()

View file

@ -87,7 +87,7 @@ class Retro_Hunt_Module(AbstractModule):
last_obj_type = None last_obj_type = None
for obj in ail_objects.obj_iterator(obj_type, filters): for obj in ail_objects.obj_iterator(obj_type, filters):
self.obj = obj self.obj = obj
content = obj.get_content(r_str=True) content = obj.get_content(r_type='bytes')
rule.match(data=content, callback=self.yara_rules_match, rule.match(data=content, callback=self.yara_rules_match,
which_callbacks=yara.CALLBACK_MATCHES, timeout=timeout) which_callbacks=yara.CALLBACK_MATCHES, timeout=timeout)

View file

@ -63,7 +63,7 @@ class Tracker_Regex(AbstractModule):
if obj_type not in self.tracked_regexs: if obj_type not in self.tracked_regexs:
return None return None
content = obj.get_content(r_str=True) content = obj.get_content()
for dict_regex in self.tracked_regexs[obj_type]: for dict_regex in self.tracked_regexs[obj_type]:
matched = self.regex_findall(dict_regex['regex'], obj_id, content) matched = self.regex_findall(dict_regex['regex'], obj_id, content)

View file

@ -85,7 +85,7 @@ class Tracker_Term(AbstractModule):
if obj_type not in self.tracked_words and obj_type not in self.tracked_sets: if obj_type not in self.tracked_words and obj_type not in self.tracked_sets:
return None return None
content = obj.get_content(r_str=True) content = obj.get_content()
signal.alarm(self.max_execution_time) signal.alarm(self.max_execution_time)

View file

@ -61,7 +61,7 @@ class Tracker_Yara(AbstractModule):
if obj_type not in self.rules: if obj_type not in self.rules:
return None return None
content = self.obj.get_content(r_str=True) content = self.obj.get_content(r_type='bytes')
try: try:
yara_match = self.rules[obj_type].match(data=content, callback=self.yara_rules_match, yara_match = self.rules[obj_type].match(data=content, callback=self.yara_rules_match,
@ -109,4 +109,3 @@ class Tracker_Yara(AbstractModule):
if __name__ == '__main__': if __name__ == '__main__':
module = Tracker_Yara() module = Tracker_Yara()
module.run() module.run()
# module.compute('archive/gist.github.com/2023/04/13/chipzoller_d8d6d2d737d02ad4fe9d30a897170761.gz')

33
configs/logging.json Executable file
View file

@ -0,0 +1,33 @@
{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"simple": {
"format": "%(asctime)s %(name)s %(levelname)s:%(message)s"
}
},
"handlers": {
"stdout": {
"class": "logging.StreamHandler",
"level": "INFO",
"stream": "ext://sys.stdout"
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"level": "WARNING",
"formatter": "simple",
"filename": "logs/flask_warning.log",
"mode": "a",
"maxBytes": 1000000,
"backupCount": 5
}
},
"root": {
"level": "DEBUG",
"handlers": [
"stdout",
"file"
]
}
}

View file

@ -1,33 +0,0 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The Empty queue module
====================
This simple module can be used to clean all queues.
Requirements:
-------------
"""
import os
import time
from pubsublogger import publisher
from Helper import Process
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = ['Global', 'Duplicates', 'Indexer', 'Attributes', 'Lines', 'DomClassifier', 'Tokenize', 'Curve', 'Categ', 'CreditCards', 'Mail', 'Onion', 'DumpValidOnion', 'Web', 'WebStats', 'Release', 'Credential', 'Cve', 'Phone', 'SourceCode', 'Keys']
config_section = ['Curve']
for queue in config_section:
print('dropping: ' + queue)
p = Process(queue)
while True:
message = p.get_from_set()
if message is None:
break

View file

@ -7,10 +7,9 @@ import ssl
import json import json
import time import time
import uuid import uuid
import random import random
import logging import logging
import logging.handlers import logging.config
from flask import Flask, render_template, jsonify, request, Request, Response, session, redirect, url_for from flask import Flask, render_template, jsonify, request, Request, Response, session, redirect, url_for
from flask_login import LoginManager, current_user, login_user, logout_user, login_required from flask_login import LoginManager, current_user, login_user, logout_user, login_required
@ -30,6 +29,7 @@ sys.path.append(os.environ['AIL_BIN'])
from lib.ConfigLoader import ConfigLoader from lib.ConfigLoader import ConfigLoader
from lib.Users import User from lib.Users import User
from lib import Tag from lib import Tag
from lib import ail_logger
# Import config # Import config
import Flask_config import Flask_config
@ -50,7 +50,6 @@ from blueprints.objects_cve import objects_cve
from blueprints.objects_decoded import objects_decoded from blueprints.objects_decoded import objects_decoded
from blueprints.objects_subtypes import objects_subtypes from blueprints.objects_subtypes import objects_subtypes
Flask_dir = os.environ['AIL_FLASK'] Flask_dir = os.environ['AIL_FLASK']
# CONFIG # # CONFIG #
@ -74,15 +73,7 @@ log_dir = os.path.join(os.environ['AIL_HOME'], 'logs')
if not os.path.isdir(log_dir): if not os.path.isdir(log_dir):
os.makedirs(log_dir) os.makedirs(log_dir)
# log_filename = os.path.join(log_dir, 'flask_server.logs') logging.config.dictConfig(ail_logger.get_config(name='flask'))
# logger = logging.getLogger()
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
# handler_log = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", interval=1)
# handler_log.suffix = '%Y-%m-%d.log'
# handler_log.setFormatter(formatter)
# handler_log.setLevel(30)
# logger.addHandler(handler_log)
# logger.setLevel(30)
# ========= =========# # ========= =========#