mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
chg: [module] refactor Iban module + fix pgp migration
This commit is contained in:
parent
aa6ba61050
commit
3b07d88709
6 changed files with 197 additions and 219 deletions
|
@ -1,122 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*-coding:UTF-8 -*
|
|
||||||
|
|
||||||
"""
|
|
||||||
The BankAccount Module
|
|
||||||
======================
|
|
||||||
|
|
||||||
It apply IBAN regexes on item content and warn if above a threshold.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import redis
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
import re
|
|
||||||
import string
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
sys.path.append(os.environ['AIL_BIN'])
|
|
||||||
##################################
|
|
||||||
# Import Project packages #
|
|
||||||
##################################
|
|
||||||
from lib import Statistics
|
|
||||||
|
|
||||||
|
|
||||||
from packages import Item
|
|
||||||
from pubsublogger import publisher
|
|
||||||
|
|
||||||
from Helper import Process
|
|
||||||
|
|
||||||
import signal
|
|
||||||
|
|
||||||
class TimeoutException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def timeout_handler(signum, frame):
|
|
||||||
raise TimeoutException
|
|
||||||
|
|
||||||
signal.signal(signal.SIGALRM, timeout_handler)
|
|
||||||
|
|
||||||
_LETTERS_IBAN = chain(enumerate(string.digits + string.ascii_uppercase),
|
|
||||||
enumerate(string.ascii_lowercase, 10))
|
|
||||||
LETTERS_IBAN = {ord(d): str(i) for i, d in _LETTERS_IBAN}
|
|
||||||
|
|
||||||
def iban_number(iban):
|
|
||||||
return (iban[4:] + iban[:4]).translate(LETTERS_IBAN)
|
|
||||||
|
|
||||||
def is_valid_iban(iban):
|
|
||||||
iban_numb = iban_number(iban)
|
|
||||||
iban_numb_check = iban_number(iban[:2] + '00' + iban[4:])
|
|
||||||
check_digit = '{:0>2}'.format(98 - (int(iban_numb_check) % 97))
|
|
||||||
if check_digit == iban[2:4] and int(iban_numb) % 97 == 1:
|
|
||||||
# valid iban
|
|
||||||
print('valid iban')
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
# # TODO: SET
|
|
||||||
def check_all_iban(l_iban, obj_id):
|
|
||||||
nb_valid_iban = 0
|
|
||||||
for iban in l_iban:
|
|
||||||
iban = iban[0]+iban[1]+iban[2]
|
|
||||||
iban = ''.join(e for e in iban if e.isalnum())
|
|
||||||
#iban = iban.upper()
|
|
||||||
res = iban_regex_verify.findall(iban)
|
|
||||||
date = datetime.datetime.now().strftime("%Y%m")
|
|
||||||
if res:
|
|
||||||
print('checking '+iban)
|
|
||||||
if is_valid_iban(iban):
|
|
||||||
print('------')
|
|
||||||
nb_valid_iban = nb_valid_iban + 1
|
|
||||||
Statistics.add_iban_country_stats_by_date(date, iban[0:2], 1)
|
|
||||||
|
|
||||||
|
|
||||||
if(nb_valid_iban > 0):
|
|
||||||
to_print = 'Iban;{};{};{};'.format(Item.get_source(obj_id), Item.get_item_date(obj_id), Item.get_basename(obj_id))
|
|
||||||
publisher.warning('{}Checked found {} IBAN;{}'.format(
|
|
||||||
to_print, nb_valid_iban, obj_id))
|
|
||||||
msg = 'infoleak:automatic-detection="iban";{}'.format(obj_id)
|
|
||||||
p.populate_set_out(msg, 'Tags')
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
publisher.port = 6380
|
|
||||||
publisher.channel = "Script"
|
|
||||||
|
|
||||||
config_section = 'BankAccount'
|
|
||||||
|
|
||||||
p = Process(config_section)
|
|
||||||
max_execution_time = p.config.getint("BankAccount", "max_execution_time")
|
|
||||||
|
|
||||||
publisher.info("BankAccount started")
|
|
||||||
|
|
||||||
#iban_regex = re.compile(r'\b[A-Za-z]{2}[0-9]{2}(?:[ ]?[0-9]{4}){4}(?:[ ]?[0-9]{1,2})?\b')
|
|
||||||
iban_regex = re.compile(r'\b([A-Za-z]{2}[ \-]?[0-9]{2})(?=(?:[ \-]?[A-Za-z0-9]){9,30})((?:[ \-]?[A-Za-z0-9]{3,5}){2,6})([ \-]?[A-Za-z0-9]{1,3})\b')
|
|
||||||
iban_regex_verify = re.compile(r'^([A-Z]{2})([0-9]{2})([A-Z0-9]{9,30})$')
|
|
||||||
|
|
||||||
|
|
||||||
while True:
|
|
||||||
|
|
||||||
message = p.get_from_set()
|
|
||||||
|
|
||||||
if message is not None:
|
|
||||||
|
|
||||||
obj_id = Item.get_item_id(message)
|
|
||||||
|
|
||||||
content = Item.get_item_content(obj_id)
|
|
||||||
|
|
||||||
signal.alarm(max_execution_time)
|
|
||||||
try:
|
|
||||||
l_iban = iban_regex.findall(content)
|
|
||||||
except TimeoutException:
|
|
||||||
print ("{0} processing timeout".format(obj_id))
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
signal.alarm(0)
|
|
||||||
|
|
||||||
if(len(l_iban) > 0):
|
|
||||||
check_all_iban(l_iban, obj_id)
|
|
||||||
|
|
||||||
else:
|
|
||||||
publisher.debug("Script BankAccount is Idling 10s")
|
|
||||||
time.sleep(10)
|
|
|
@ -307,15 +307,15 @@ def tags_migration():
|
||||||
for galaxy in get_active_galaxies():
|
for galaxy in get_active_galaxies():
|
||||||
Tag.enable_galaxy(galaxy)
|
Tag.enable_galaxy(galaxy)
|
||||||
|
|
||||||
# for tag in get_all_items_tags():
|
for tag in get_all_items_tags():
|
||||||
# print(tag)
|
print(tag)
|
||||||
# tag_first = get_tag_first_seen(tag)
|
tag_first = get_tag_first_seen(tag)
|
||||||
# if tag_first:
|
if tag_first:
|
||||||
# for date in Date.get_date_range_today(tag_first):
|
for date in Date.get_date_range_today(tag_first):
|
||||||
# print(date)
|
print(date)
|
||||||
# for item_id in get_all_items_tags_by_day(tag, date):
|
for item_id in get_all_items_tags_by_day(tag, date):
|
||||||
# item = Items.Item(item_id)
|
item = Items.Item(item_id)
|
||||||
# item.add_tag(tag)
|
item.add_tag(tag)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -485,60 +485,59 @@ def domain_migration():
|
||||||
if not crawlers.is_valid_onion_domain(dom):
|
if not crawlers.is_valid_onion_domain(dom):
|
||||||
print(dom)
|
print(dom)
|
||||||
continue
|
continue
|
||||||
# ports = get_domain_ports(domain_type, dom)
|
ports = get_domain_ports(domain_type, dom)
|
||||||
# first_seen = get_domain_first_seen(domain_type, dom)
|
first_seen = get_domain_first_seen(domain_type, dom)
|
||||||
# last_check = get_domain_last_check(domain_type, dom)
|
last_check = get_domain_last_check(domain_type, dom)
|
||||||
# last_origin = get_domain_last_origin(domain_type, dom)
|
last_origin = get_domain_last_origin(domain_type, dom)
|
||||||
# languages = get_domain_languages(dom)
|
languages = get_domain_languages(dom)
|
||||||
#
|
|
||||||
# domain = Domains.Domain(dom)
|
domain = Domains.Domain(dom)
|
||||||
# # domain.update_daterange(first_seen)
|
domain.update_daterange(first_seen)
|
||||||
# # domain.update_daterange(last_check)
|
domain.update_daterange(last_check)
|
||||||
# # domain._set_ports(ports)
|
domain._set_ports(ports)
|
||||||
# # if last_origin:
|
if last_origin:
|
||||||
# # domain.set_last_origin(last_origin)
|
domain.set_last_origin(last_origin)
|
||||||
# for language in languages:
|
for language in languages:
|
||||||
# print(language)
|
print(language)
|
||||||
# # domain.add_language(language)
|
domain.add_language(language)
|
||||||
# for tag in get_domain_tags(domain):
|
for tag in get_domain_tags(domain):
|
||||||
# domain.add_tag(tag)
|
domain.add_tag(tag)
|
||||||
# #print('------------------')
|
#print('------------------')
|
||||||
# #print('------------------')
|
#print('------------------')
|
||||||
# #print('------------------')
|
#print('------------------')
|
||||||
# #print('------------------')
|
#print('------------------')
|
||||||
# #print('------------------')
|
#print('------------------')
|
||||||
# print(dom)
|
print(dom)
|
||||||
# #print(first_seen)
|
#print(first_seen)
|
||||||
# #print(last_check)
|
#print(last_check)
|
||||||
# #print(ports)
|
#print(ports)
|
||||||
#
|
|
||||||
# # # TODO: FIXME filter invalid hostname
|
# # TODO: FIXME filter invalid hostname
|
||||||
#
|
|
||||||
# # CREATE DOMAIN HISTORY
|
# CREATE DOMAIN HISTORY
|
||||||
# for port in ports:
|
for port in ports:
|
||||||
# for history in get_domain_history_by_port(domain_type, dom, port):
|
for history in get_domain_history_by_port(domain_type, dom, port):
|
||||||
# epoch = history['epoch']
|
epoch = history['epoch']
|
||||||
# # DOMAIN DOWN
|
# DOMAIN DOWN
|
||||||
# if not history.get('status'): # domain DOWN
|
if not history.get('status'): # domain DOWN
|
||||||
# # domain.add_history(epoch, port)
|
domain.add_history(epoch, port)
|
||||||
# print(f'DOWN {epoch}')
|
print(f'DOWN {epoch}')
|
||||||
# # DOMAIN UP
|
# DOMAIN UP
|
||||||
# else:
|
else:
|
||||||
# root_id = history.get('root')
|
root_id = history.get('root')
|
||||||
# if root_id:
|
if root_id:
|
||||||
# # domain.add_history(epoch, port, root_item=root_id)
|
domain.add_history(epoch, port, root_item=root_id)
|
||||||
# #print(f'UP {root_id}')
|
print(f'UP {root_id}')
|
||||||
# crawled_items = get_crawled_items(dom, root_id)
|
crawled_items = get_crawled_items(dom, root_id)
|
||||||
# for item_id in crawled_items:
|
for item_id in crawled_items:
|
||||||
# url = get_item_link(item_id)
|
url = get_item_link(item_id)
|
||||||
# item_father = get_item_father(item_id)
|
item_father = get_item_father(item_id)
|
||||||
# if item_father and url:
|
if item_father and url:
|
||||||
# #print(f'{url} {item_id}')
|
print(f'{url} {item_id}')
|
||||||
# pass
|
domain.add_crawled_item(url, port, item_id, item_father)
|
||||||
# # domain.add_crawled_item(url, port, item_id, item_father)
|
|
||||||
#
|
|
||||||
#
|
#print()
|
||||||
# #print()
|
|
||||||
|
|
||||||
for domain_type in ['onion', 'regular']:
|
for domain_type in ['onion', 'regular']:
|
||||||
for date in Date.get_date_range_today('20190101'):
|
for date in Date.get_date_range_today('20190101'):
|
||||||
|
@ -552,11 +551,11 @@ def domain_migration():
|
||||||
last_origin = get_domain_last_origin(domain_type, dom)
|
last_origin = get_domain_last_origin(domain_type, dom)
|
||||||
|
|
||||||
domain = Domains.Domain(dom)
|
domain = Domains.Domain(dom)
|
||||||
# domain.update_daterange(first_seen)
|
domain.update_daterange(first_seen)
|
||||||
# domain.update_daterange(last_check)
|
domain.update_daterange(last_check)
|
||||||
# if last_origin:
|
if last_origin:
|
||||||
# domain.set_last_origin(last_origin)
|
domain.set_last_origin(last_origin)
|
||||||
# domain.add_history(None, None, date=date)
|
domain.add_history(None, None, date=date)
|
||||||
|
|
||||||
|
|
||||||
###############################
|
###############################
|
||||||
|
@ -719,7 +718,10 @@ def get_top_stats_module(module_name, date):
|
||||||
return r_serv_trend.zrange(f'top_{module_name}_set_{date}', 0, -1, withscores=True)
|
return r_serv_trend.zrange(f'top_{module_name}_set_{date}', 0, -1, withscores=True)
|
||||||
|
|
||||||
def get_module_tld_stats_by_date(module, date):
|
def get_module_tld_stats_by_date(module, date):
|
||||||
return r_statistics.hgetall(f'{module}_by_tld:{date}')
|
return r_serv_trend.hgetall(f'{module}_by_tld:{date}')
|
||||||
|
|
||||||
|
def get_iban_country_stats_by_date(date):
|
||||||
|
return r_serv_trend.hgetall(f'iban_by_country:{date}')
|
||||||
|
|
||||||
def statistics_migration():
|
def statistics_migration():
|
||||||
# paste_by_modules_timeout
|
# paste_by_modules_timeout
|
||||||
|
@ -753,19 +755,24 @@ def statistics_migration():
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # MODULE STATS
|
# MODULE STATS
|
||||||
# for module in ['credential', 'mail', 'SQLInjection']:
|
for module in ['credential', 'mail', 'SQLInjection']:
|
||||||
# stats = get_module_tld_stats_by_date(module, date)
|
stats = get_module_tld_stats_by_date(module, date)
|
||||||
# for tld in stats:
|
for tld in stats:
|
||||||
# if tld:
|
if tld:
|
||||||
# print(module, date, tld, stats[tld])
|
print(module, date, tld, stats[tld])
|
||||||
# Statistics.add_module_tld_stats_by_date(module, date, tld, stats[tld])
|
Statistics.add_module_tld_stats_by_date(module, date, tld, stats[tld])
|
||||||
# for module in ['credential']:
|
stats = get_iban_country_stats_by_date(date)
|
||||||
# # TOP STATS
|
for tld in stats:
|
||||||
# top_module = get_top_stats_module(module, date)
|
if tld:
|
||||||
# for keyword, total_sum in top_module:
|
print('iban', date, tld, stats[tld])
|
||||||
# print(date, module, keyword, total_sum)
|
Statistics.add_module_tld_stats_by_date('iban', date, tld, stats[tld])
|
||||||
# #Statistics._add_module_stats(module, total_sum, keyword, date)
|
for module in ['credential']:
|
||||||
|
# TOP STATS
|
||||||
|
top_module = get_top_stats_module(module, date)
|
||||||
|
for keyword, total_sum in top_module:
|
||||||
|
print(date, module, keyword, total_sum)
|
||||||
|
Statistics._add_module_stats(module, total_sum, keyword, date)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -781,17 +788,17 @@ def statistics_migration():
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
#core_migration()
|
#core_migration()
|
||||||
# user_migration()
|
#user_migration()
|
||||||
# tags_migration()
|
#tags_migration()
|
||||||
#items_migration()
|
#items_migration()
|
||||||
#crawler_migration()
|
#crawler_migration()
|
||||||
# domain_migration() # TO TEST
|
# domain_migration() # TO TEST ###########################
|
||||||
#decodeds_migration()
|
#decodeds_migration()
|
||||||
# screenshots_migration()
|
#screenshots_migration()
|
||||||
#subtypes_obj_migration()
|
#subtypes_obj_migration()
|
||||||
# ail_2_ail_migration()
|
ail_2_ail_migration()
|
||||||
# trackers_migration()
|
trackers_migration()
|
||||||
# investigations_migration()
|
investigations_migration()
|
||||||
statistics_migration()
|
statistics_migration()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -131,12 +131,5 @@ def get_module_tld_stats_by_date(module, date):
|
||||||
def add_module_tld_stats_by_date(module, date, tld, nb):
|
def add_module_tld_stats_by_date(module, date, tld, nb):
|
||||||
r_statistics.hincrby(f'{module}_by_tld:{date}', tld, int(nb))
|
r_statistics.hincrby(f'{module}_by_tld:{date}', tld, int(nb))
|
||||||
|
|
||||||
|
|
||||||
def get_iban_country_stats_by_date(date):
|
|
||||||
return r_statistics.hgetall(f'iban_by_country:{date}')
|
|
||||||
|
|
||||||
def add_iban_country_stats_by_date(date, tld, nb):
|
|
||||||
r_statistics.hincrby(f'iban_by_country:{date}', tld, int(nb))
|
|
||||||
|
|
||||||
# r_stats.zincrby('module:Global:incomplete_file', datetime.datetime.now().strftime('%Y%m%d'), 1)
|
# r_stats.zincrby('module:Global:incomplete_file', datetime.datetime.now().strftime('%Y%m%d'), 1)
|
||||||
# r_stats.zincrby('module:Global:invalid_file', datetime.datetime.now().strftime('%Y%m%d'), 1)
|
# r_stats.zincrby('module:Global:invalid_file', datetime.datetime.now().strftime('%Y%m%d'), 1)
|
||||||
|
|
|
@ -40,6 +40,10 @@ class Pgp(AbstractSubtypeObject):
|
||||||
# # TODO:
|
# # TODO:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# # TODO:
|
||||||
|
def get_meta(self):
|
||||||
|
return None
|
||||||
|
|
||||||
def get_link(self, flask_context=False):
|
def get_link(self, flask_context=False):
|
||||||
if flask_context:
|
if flask_context:
|
||||||
url = url_for('correlation.show_correlation', object_type=self.type, type_id=self.subtype, correlation_id=self.id)
|
url = url_for('correlation.show_correlation', object_type=self.type, type_id=self.subtype, correlation_id=self.id)
|
||||||
|
|
96
bin/modules/Iban.py
Executable file
96
bin/modules/Iban.py
Executable file
|
@ -0,0 +1,96 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
"""
|
||||||
|
The Iban Module
|
||||||
|
================================
|
||||||
|
|
||||||
|
This module add tags to an item.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Import External packages
|
||||||
|
##################################
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import string
|
||||||
|
import sys
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
sys.path.append(os.environ['AIL_BIN'])
|
||||||
|
##################################
|
||||||
|
# Import Project packages
|
||||||
|
##################################
|
||||||
|
from modules.abstract_module import AbstractModule
|
||||||
|
from lib.objects.Items import Item
|
||||||
|
from lib.ConfigLoader import ConfigLoader
|
||||||
|
from lib import Statistics
|
||||||
|
|
||||||
|
class Iban(AbstractModule):
|
||||||
|
"""
|
||||||
|
Iban module for AIL framework
|
||||||
|
"""
|
||||||
|
|
||||||
|
_LETTERS_IBAN = chain(enumerate(string.digits + string.ascii_uppercase),
|
||||||
|
enumerate(string.ascii_lowercase, 10))
|
||||||
|
LETTERS_IBAN = {ord(d): str(i) for i, d in _LETTERS_IBAN}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Iban, self).__init__()
|
||||||
|
|
||||||
|
# Waiting time in secondes between to message proccessed
|
||||||
|
self.pending_seconds = 10
|
||||||
|
|
||||||
|
self.regex_timeout = 30
|
||||||
|
#iban_regex = re.compile(r'\b[A-Za-z]{2}[0-9]{2}(?:[ ]?[0-9]{4}){4}(?:[ ]?[0-9]{1,2})?\b')
|
||||||
|
self.iban_regex = re.compile(r'\b([A-Za-z]{2}[ \-]?[0-9]{2})(?=(?:[ \-]?[A-Za-z0-9]){9,30})((?:[ \-]?[A-Za-z0-9]{3,5}){2,6})([ \-]?[A-Za-z0-9]{1,3})\b')
|
||||||
|
self.iban_regex_verify = re.compile(r'^([A-Z]{2})([0-9]{2})([A-Z0-9]{9,30})$')
|
||||||
|
|
||||||
|
# Send module state to logs
|
||||||
|
self.redis_logger.info(f'Module {self.module_name} initialized')
|
||||||
|
|
||||||
|
def get_iban_number(self, iban):
|
||||||
|
return (iban[4:] + iban[:4]).translate(Iban.LETTERS_IBAN)
|
||||||
|
|
||||||
|
def is_valid_iban(self, iban):
|
||||||
|
iban_numb = self.get_iban_number(iban)
|
||||||
|
iban_numb_check = self.get_iban_number(iban[:2] + '00' + iban[4:])
|
||||||
|
check_digit = '{:0>2}'.format(98 - (int(iban_numb_check) % 97))
|
||||||
|
if check_digit == iban[2:4] and int(iban_numb) % 97 == 1:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def compute(self, message):
|
||||||
|
item = Item(message)
|
||||||
|
item_id = item.get_id()
|
||||||
|
|
||||||
|
ibans = self.regex_findall(self.iban_regex, item_id, item.get_content())
|
||||||
|
if ibans:
|
||||||
|
valid_ibans = set()
|
||||||
|
for iban in ibans:
|
||||||
|
iban = iban[1:-1].replace("'", "").split(',')
|
||||||
|
iban = iban[0]+iban[1]+iban[2]
|
||||||
|
iban = ''.join(e for e in iban if e.isalnum())
|
||||||
|
if self.regex_findall(self.iban_regex_verify, item_id, iban):
|
||||||
|
print(f'checking {iban}')
|
||||||
|
if self.is_valid_iban(iban):
|
||||||
|
valid_ibans.add(iban)
|
||||||
|
|
||||||
|
if valid_ibans:
|
||||||
|
print(f'{valid_ibans} ibans {item_id}')
|
||||||
|
date = datetime.datetime.now().strftime("%Y%m")
|
||||||
|
for iban in valid_ibans:
|
||||||
|
Statistics.add_module_tld_stats_by_date('iban', date, iban[0:2], 1)
|
||||||
|
|
||||||
|
to_print = f'Iban;{item.get_source()};{item.get_date()};{item.get_basename()};'
|
||||||
|
self.redis_logger.warning(f'{to_print}Checked found {len(valid_ibans)} IBAN;{item_id}')
|
||||||
|
# Tags
|
||||||
|
msg = f'infoleak:automatic-detection="iban";{item_id}'
|
||||||
|
self.send_message_to_queue(msg, 'Tags')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
module = Iban()
|
||||||
|
module.run()
|
|
@ -68,7 +68,7 @@ publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Urls,Redis_Credential,R
|
||||||
subscribe = Redis_CreditCards
|
subscribe = Redis_CreditCards
|
||||||
publish = Redis_Tags
|
publish = Redis_Tags
|
||||||
|
|
||||||
[BankAccount]
|
[Iban]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Tags
|
publish = Redis_Tags
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue