chg: [merge master]

This commit is contained in:
Terrtia 2021-02-10 15:50:48 +01:00
commit fc7a61f67c
No known key found for this signature in database
GPG key ID: 1E1B1F50D84613D0
92 changed files with 3192 additions and 511 deletions

6
.gitignore vendored
View file

@ -19,6 +19,9 @@ DATA_ARDB
indexdir/ indexdir/
logs/ logs/
old/ old/
pgpdump/
temp/
DEFAULT_PASSWORD DEFAULT_PASSWORD
@ -44,6 +47,9 @@ configs/update.cfg
update/current_version update/current_version
files files
# Trackers
bin/trackers/yara/custom-rules/*
# Helper # Helper
bin/helper/gen_cert/rootCA.* bin/helper/gen_cert/rootCA.*
bin/helper/gen_cert/server.* bin/helper/gen_cert/server.*

View file

@ -12,7 +12,7 @@ env:
AIL_FLASK=$TRAVIS_BUILD_DIR/var/www/ AIL_REDIS=$TRAVIS_BUILD_DIR/redis/src/ \ AIL_FLASK=$TRAVIS_BUILD_DIR/var/www/ AIL_REDIS=$TRAVIS_BUILD_DIR/redis/src/ \
AIL_LEVELDB=$TRAVIS_BUILD_DIR/redis-leveldb/ PATH=$AIL_HOME:$AIL_REDIS:$AIL_LEVELDB:$PATH AIL_LEVELDB=$TRAVIS_BUILD_DIR/redis-leveldb/ PATH=$AIL_HOME:$AIL_REDIS:$AIL_LEVELDB:$PATH
dist: xenial dist: bionic
install: install:
- ./installing_deps.sh - ./installing_deps.sh

View file

@ -16,7 +16,7 @@ Redis and ARDB overview
DB 1 - Curve DB 1 - Curve
DB 2 - TermFreq DB 2 - TermFreq
DB 3 - Trending DB 3 - Trending/Trackers
DB 4 - Sentiments DB 4 - Sentiments
DB 5 - TermCred DB 5 - TermCred
DB 6 - Tags DB 6 - Tags

View file

@ -1,50 +0,0 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
"""
The ZMQ_Sub_Attribute Module
============================
This module is saving Attribute of the paste into redis
"""
import time
from packages import Paste
from pubsublogger import publisher
from Helper import Process
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = 'Attributes'
p = Process(config_section)
# FUNCTIONS #
publisher.info("Attribute is Running")
while True:
try:
message = p.get_from_set()
if message is not None:
PST = Paste.Paste(message)
else:
publisher.debug("Script Attribute is idling 1s")
print('sleeping')
time.sleep(1)
continue
# FIXME do it directly in the class
PST.save_attribute_redis("p_encoding", PST._get_p_encoding())
#PST.save_attribute_redis("p_language", PST._get_p_language())
# FIXME why not all saving everything there.
PST.save_all_attributes_redis()
# FIXME Not used.
PST.store.sadd("Pastes_Objects", PST.p_rel_path)
except IOError:
print("CRC Checksum Failed on :", PST.p_rel_path)
publisher.error('Duplicate;{};{};{};CRC Checksum Failed'.format(
PST.p_source, PST.p_date, PST.p_name))

View file

@ -31,14 +31,10 @@ if __name__ == "__main__":
p = Process(config_section) p = Process(config_section)
# FUNCTIONS # # FUNCTIONS #
publisher.info("Creditcard script subscribed to channel creditcard_categ") publisher.info("CreditCards script started")
creditcard_regex = "4[0-9]{12}(?:[0-9]{3})?" creditcard_regex = "4[0-9]{12}(?:[0-9]{3})?"
# FIXME For retro compatibility
channel = 'creditcard_categ'
# Source: http://www.richardsramblings.com/regex/credit-card-numbers/ # Source: http://www.richardsramblings.com/regex/credit-card-numbers/
cards = [ cards = [
r'\b4\d{3}(?:[\ \-]?)\d{4}(?:[\ \-]?)\d{4}(?:[\ \-]?)\d{4}\b', # 16-digit VISA, with separators r'\b4\d{3}(?:[\ \-]?)\d{4}(?:[\ \-]?)\d{4}(?:[\ \-]?)\d{4}\b', # 16-digit VISA, with separators
@ -69,9 +65,6 @@ if __name__ == "__main__":
print(clean_card, 'is valid') print(clean_card, 'is valid')
creditcard_set.add(clean_card) creditcard_set.add(clean_card)
paste.__setattr__(channel, creditcard_set)
paste.save_attribute_redis(channel, creditcard_set)
pprint.pprint(creditcard_set) pprint.pprint(creditcard_set)
to_print = 'CreditCard;{};{};{};'.format( to_print = 'CreditCard;{};{};{};'.format(
paste.p_source, paste.p_date, paste.p_name) paste.p_source, paste.p_date, paste.p_name)

View file

@ -44,8 +44,9 @@ def search_crytocurrency(item_id, item_content):
is_cryptocurrency_found = False is_cryptocurrency_found = False
for crypto_name in cryptocurrency_dict: for dict_field in cryptocurrency_dict:
crypto_dict = cryptocurrency_dict[crypto_name] crypto_dict = cryptocurrency_dict[dict_field]
crypto_name = crypto_dict['name']
signal.alarm(crypto_dict['max_execution_time']) signal.alarm(crypto_dict['max_execution_time'])
try: try:
@ -62,7 +63,7 @@ def search_crytocurrency(item_id, item_content):
is_valid_crypto_addr = False is_valid_crypto_addr = False
# validate cryptocurrency address # validate cryptocurrency address
for address in crypto_addr: for address in crypto_addr:
if(Cryptocurrency.verify_cryptocurrency_address(crypto_name, address)): if(Cryptocurrency.verify_cryptocurrency_address(dict_field, address)):
is_valid_crypto_addr = True is_valid_crypto_addr = True
print('{} address found : {}'.format(crypto_name, address)) print('{} address found : {}'.format(crypto_name, address))
# build bitcoin correlation # build bitcoin correlation
@ -127,6 +128,17 @@ cryptocurrency_dict = {
'tag': 'infoleak:automatic-detection="bitcoin-private-key"', 'tag': 'infoleak:automatic-detection="bitcoin-private-key"',
}, },
}, },
'bitcoin-bech32': {
'name': 'bitcoin', # e.g. bc1qar0srrr7xfkvy5l643lydnw9re59gtzzwf5mdq
'regex': r'\bbc(?:0(?:[ac-hj-np-z02-9]{39}|[ac-hj-np-z02-9]{59})|1[ac-hj-np-z02-9]{8,87})\b',
'max_execution_time': default_max_execution_time,
'tag': 'infoleak:automatic-detection="bitcoin-address"',
'private_key': {
'regex': r'\b(?<![+/=])[5KL][1-9A-HJ-NP-Za-km-z]{50,51}(?![+/=])\b',
'max_execution_time': default_max_execution_time,
'tag': 'infoleak:automatic-detection="bitcoin-private-key"',
},
},
'ethereum': { 'ethereum': {
'name': 'ethereum', # e.g. 0x8466b50B53c521d0B4B163d186596F94fB8466f1 'name': 'ethereum', # e.g. 0x8466b50B53c521d0B4B163d186596F94fB8466f1
'regex': r'\b(?<![+/=])0x[A-Za-z0-9]{40}(?![+/=])\b', 'regex': r'\b(?<![+/=])0x[A-Za-z0-9]{40}(?![+/=])\b',

View file

@ -18,22 +18,20 @@ cd ${AIL_HOME}
if [ -e "${DIR}/AILENV/bin/python" ]; then if [ -e "${DIR}/AILENV/bin/python" ]; then
ENV_PY="${DIR}/AILENV/bin/python" ENV_PY="${DIR}/AILENV/bin/python"
export AIL_VENV=${AIL_HOME}/AILENV/ export AIL_VENV=${AIL_HOME}/AILENV/
. ./AILENV/bin/activate
elif [ ! -z "$TRAVIS" ]; then elif [ ! -z "$TRAVIS" ]; then
echo "Travis detected" echo "Travis detected"
ENV_PY="~/virtualenv/python3.6/bin/python" ENV_PY="~/virtualenv/python3.6/bin/python"
export AIL_VENV="~/virtualenv/python3.6/" export AIL_VENV="~/virtualenv/python3.6/"
else
echo "Please make sure you have a AIL-framework environment, au revoir"
exit 1
fi
# redis-server is bundled during install
## [ ! -f "`which redis-server`" ] && echo "'redis-server' is not installed/not on PATH. Please fix and run again." && exit 1
export AIL_BIN=${AIL_HOME}/bin/ export AIL_BIN=${AIL_HOME}/bin/
export AIL_FLASK=${AIL_HOME}/var/www/ export AIL_FLASK=${AIL_HOME}/var/www/
export AIL_REDIS=${AIL_HOME}/redis/src/ export AIL_REDIS=${AIL_HOME}/redis/src/
export AIL_ARDB=${AIL_HOME}/ardb/src/ export AIL_ARDB=${AIL_HOME}/ardb/src/
else
echo "Please make sure you have a AIL-framework environment, au revoir"
exit 1
fi
export PATH=$AIL_VENV/bin:$PATH export PATH=$AIL_VENV/bin:$PATH
export PATH=$AIL_HOME:$PATH export PATH=$AIL_HOME:$PATH
@ -218,6 +216,8 @@ function launching_scripts {
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "Tags" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tags.py; read x" screen -S "Script_AIL" -X screen -t "Tags" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tags.py; read x"
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "Languages" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Languages.py; read x"
sleep 0.1
screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./SentimentAnalysis.py; read x" screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./SentimentAnalysis.py; read x"
sleep 0.1 sleep 0.1
screen -S "Script_AIL" -X screen -t "DbCleaner" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./DbCleaner.py; read x" screen -S "Script_AIL" -X screen -t "DbCleaner" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./DbCleaner.py; read x"

33
bin/Languages.py Executable file
View file

@ -0,0 +1,33 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import cld3
import time
from packages import Item
from lib import Domain
from pubsublogger import publisher
from Helper import Process
if __name__ == '__main__':
publisher.port = 6380
publisher.channel = 'Script'
# Section name in bin/packages/modules.cfg
config_section = 'Languages'
# Setup the I/O queues
p = Process(config_section)
while True:
message = p.get_from_set()
if message is None:
publisher.debug("{} queue is empty, waiting".format(config_section))
time.sleep(1)
continue
item_id = Item.get_item_id(message)
if Item.is_crawled(item_id):
domain = Item.get_item_domain(item_id)
Domain.add_domain_languages_by_item_id(domain, item_id)

View file

@ -57,7 +57,7 @@ def compute_most_posted(server, message):
print(redis_progression_name_set) print(redis_progression_name_set)
def compute_provider_info(server_trend, server_pasteName, path): def compute_provider_info(server_trend, path):
redis_all_provider = 'all_provider_set' redis_all_provider = 'all_provider_set'
paste = Paste.Paste(path) paste = Paste.Paste(path)
@ -71,7 +71,6 @@ def compute_provider_info(server_trend, server_pasteName, path):
redis_providers_name_set = 'providers_set_' + paste_date redis_providers_name_set = 'providers_set_' + paste_date
# Add/Update in Redis # Add/Update in Redis
server_pasteName.sadd(paste_baseName, path)
server_trend.sadd(redis_all_provider, paste_provider) server_trend.sadd(redis_all_provider, paste_provider)
num_paste = int(server_trend.hincrby(paste_provider+'_num', paste_date, 1)) num_paste = int(server_trend.hincrby(paste_provider+'_num', paste_date, 1))
@ -137,12 +136,6 @@ if __name__ == '__main__':
db=p.config.get("ARDB_Trending", "db"), db=p.config.get("ARDB_Trending", "db"),
decode_responses=True) decode_responses=True)
r_serv_pasteName = redis.StrictRedis(
host=p.config.get("Redis_Paste_Name", "host"),
port=p.config.get("Redis_Paste_Name", "port"),
db=p.config.get("Redis_Paste_Name", "db"),
decode_responses=True)
# Endless loop getting messages from the input queue # Endless loop getting messages from the input queue
while True: while True:
# Get one message from the input queue # Get one message from the input queue
@ -159,4 +152,4 @@ if __name__ == '__main__':
if len(message.split(';')) > 1: if len(message.split(';')) > 1:
compute_most_posted(r_serv_trend, message) compute_most_posted(r_serv_trend, message)
else: else:
compute_provider_info(r_serv_trend, r_serv_pasteName, message) compute_provider_info(r_serv_trend, message)

View file

@ -193,9 +193,6 @@ if __name__ == "__main__":
r_onion.sadd('i2p_crawler_queue', msg) r_onion.sadd('i2p_crawler_queue', msg)
''' '''
# Saving the list of extracted onion domains.
PST.__setattr__(channel, domains_list)
PST.save_attribute_redis(channel, domains_list)
to_print = 'Onion;{};{};{};'.format(PST.p_source, PST.p_date, to_print = 'Onion;{};{};{};'.format(PST.p_source, PST.p_date,
PST.p_name) PST.p_name)

View file

@ -23,7 +23,7 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import Tracker import Tracker
import regex_helper import regex_helper
full_item_url = "/showsavedpaste/?paste=" full_item_url = "/object/item?id="
mail_body_template = "AIL Framework,\nNew occurrence for term tracked regex: {}\nitem id: {}\nurl: {}{}" mail_body_template = "AIL Framework,\nNew occurrence for term tracked regex: {}\nitem id: {}\nurl: {}{}"
dict_regex_tracked = Term.get_regex_tracked_words_dict() dict_regex_tracked = Term.get_regex_tracked_words_dict()
@ -58,7 +58,7 @@ if __name__ == "__main__":
p = Process(config_section) p = Process(config_section)
max_execution_time = p.config.getint(config_section, "max_execution_time") max_execution_time = p.config.getint(config_section, "max_execution_time")
ull_item_url = p.config.get("Notifications", "ail_domain") + full_item_url full_item_url = p.config.get("Notifications", "ail_domain") + full_item_url
redis_cache_key = regex_helper.generate_redis_cache_key(module_name) redis_cache_key = regex_helper.generate_redis_cache_key(module_name)

View file

@ -20,7 +20,7 @@ from packages import Term
from lib import Tracker from lib import Tracker
full_item_url = "/showsavedpaste/?paste=" full_item_url = "/object/item?id="
mail_body_template = "AIL Framework,\nNew occurrence for term tracked term: {}\nitem id: {}\nurl: {}{}" mail_body_template = "AIL Framework,\nNew occurrence for term tracked term: {}\nitem id: {}\nurl: {}{}"

View file

@ -58,10 +58,7 @@ if __name__ == "__main__":
cc_critical = p.config.get("Url", "cc_critical") cc_critical = p.config.get("Url", "cc_critical")
# FUNCTIONS # # FUNCTIONS #
publisher.info("Script URL subscribed to channel web_categ") publisher.info("Script URL Started")
# FIXME For retro compatibility
channel = 'web_categ'
message = p.get_from_set() message = p.get_from_set()
prec_filename = None prec_filename = None
@ -150,11 +147,6 @@ if __name__ == "__main__":
domains_list) domains_list)
if A_values[0] >= 1: if A_values[0] >= 1:
PST.__setattr__(channel, A_values)
PST.save_attribute_redis(channel, (A_values[0],
list(A_values[1])))
pprint.pprint(A_values) pprint.pprint(A_values)
publisher.info('Url;{};{};{};Checked {} URL;{}'.format( publisher.info('Url;{};{};{};Checked {} URL;{}'.format(
PST.p_source, PST.p_date, PST.p_name, A_values[0], PST.p_rel_path)) PST.p_source, PST.p_date, PST.p_name, A_values[0], PST.p_rel_path))

View file

@ -9,6 +9,24 @@ from uuid import uuid4
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import ConfigLoader import ConfigLoader
sys.path.append('../../configs/keys')
try:
from thehive4py.api import TheHiveApi
import thehive4py.exceptions
from theHiveKEYS import the_hive_url, the_hive_key, the_hive_verifycert
if the_hive_url == '':
is_hive_connected = False
else:
is_hive_connected = TheHiveApi(the_hive_url, the_hive_key, cert=the_hive_verifycert)
except:
is_hive_connected = False
if is_hive_connected != False:
try:
is_hive_connected.get_alert(0)
is_hive_connected = True
except thehive4py.exceptions.AlertException:
is_hive_connected = False
## LOAD CONFIG ## ## LOAD CONFIG ##
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_serv_cache = config_loader.get_redis_conn("Redis_Cache") r_serv_cache = config_loader.get_redis_conn("Redis_Cache")
@ -37,6 +55,16 @@ def load_tags_to_export_in_cache():
# save combinaison of tags in cache # save combinaison of tags in cache
pass pass
def is_hive_connected(): # # TODO: REFRACTOR, put in cache (with retry)
return is_hive_connected
def get_item_hive_cases(item_id):
hive_case = r_serv_metadata.get('hive_cases:{}'.format(item_id))
if hive_case:
hive_case = the_hive_url + '/index.html#/case/{}/details'.format(hive_case)
return hive_case
########################################################### ###########################################################
# # set default # # set default
# if r_serv_db.get('hive:auto-alerts') is None: # if r_serv_db.get('hive:auto-alerts') is None:

View file

@ -41,6 +41,15 @@ class ConfigLoader(object):
db=self.cfg.getint(redis_name, "db"), db=self.cfg.getint(redis_name, "db"),
decode_responses=decode_responses ) decode_responses=decode_responses )
def get_files_directory(self, key_name):
directory_path = self.cfg.get('Directories', key_name)
# full path
if directory_path[0] == '/':
return directory_path
else:
directory_path = os.path.join(os.environ['AIL_HOME'], directory_path)
return directory_path
def get_config_str(self, section, key_name): def get_config_str(self, section, key_name):
return self.cfg.get(section, key_name) return self.cfg.get(section, key_name)

View file

@ -223,11 +223,11 @@ def get_item_url(correlation_name, value, correlation_type=None):
endpoint = 'crawler_splash.showDomain' endpoint = 'crawler_splash.showDomain'
url = url_for(endpoint, domain=value) url = url_for(endpoint, domain=value)
elif correlation_name == 'item': elif correlation_name == 'item':
endpoint = 'showsavedpastes.showsavedpaste' endpoint = 'objects_item.showItem'
url = url_for(endpoint, paste=value) url = url_for(endpoint, id=value)
elif correlation_name == 'paste': ### # TODO: remove me elif correlation_name == 'paste': ### # TODO: remove me
endpoint = 'showsavedpastes.showsavedpaste' endpoint = 'objects_item.showItem'
url = url_for(endpoint, paste=value) url = url_for(endpoint, id=value)
return url return url
def get_obj_tag_table_keys(object_type): def get_obj_tag_table_keys(object_type):

View file

@ -9,9 +9,11 @@ The ``Domain``
import os import os
import sys import sys
import time import itertools
import re
import redis import redis
import random import random
import time
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages/'))
import Cryptocurrency import Cryptocurrency
@ -24,6 +26,7 @@ import Tag
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader import ConfigLoader
import Correlate_object import Correlate_object
import Language
import Screenshot import Screenshot
import Username import Username
@ -66,6 +69,15 @@ def sanitize_domain_type(domain_type):
else: else:
return 'regular' return 'regular'
def sanitize_domain_types(l_domain_type):
all_domain_types = get_all_domains_type()
if not l_domain_type:
return all_domain_types
for domain_type in l_domain_type:
if domain_type not in all_domain_types:
return all_domain_types
return l_domain_type
######## DOMAINS ######## ######## DOMAINS ########
def get_all_domains_type(): def get_all_domains_type():
return ['onion', 'regular'] return ['onion', 'regular']
@ -210,6 +222,15 @@ def get_domains_up_by_filers(domain_type, date_from=None, date_to=None, tags=[],
else: else:
return None return None
## TODO: filters:
# - tags
# - languages
# - daterange UP
def get_domains_by_filters():
pass
def create_domains_metadata_list(list_domains, domain_type): def create_domains_metadata_list(list_domains, domain_type):
l_domains = [] l_domains = []
for domain in list_domains: for domain in list_domains:
@ -218,9 +239,144 @@ def create_domains_metadata_list(list_domains, domain_type):
else: else:
dom_type = domain_type dom_type = domain_type
l_domains.append(get_domain_metadata(domain, dom_type, first_seen=True, last_ckeck=True, status=True, l_domains.append(get_domain_metadata(domain, dom_type, first_seen=True, last_ckeck=True, status=True,
ports=True, tags=True, screenshot=True, tags_safe=True)) ports=True, tags=True, languages=True, screenshot=True, tags_safe=True))
return l_domains return l_domains
def sanithyse_domain_name_to_search(name_to_search, domain_type):
if domain_type == 'onion':
r_name = r'[a-z0-9\.]+'
else:
r_name = r'[a-zA-Z0-9\.-_]+'
# invalid domain name
if not re.fullmatch(r_name, name_to_search):
return None
return name_to_search.replace('.', '\.')
def search_domains_by_name(name_to_search, domain_types, r_pos=False):
domains_dict = {}
for domain_type in domain_types:
r_name = sanithyse_domain_name_to_search(name_to_search, domain_type)
if not name_to_search:
break
r_name = re.compile(r_name)
for domain in get_all_domains_up(domain_type):
res = re.search(r_name, domain)
if res:
domains_dict[domain] = {}
if r_pos:
domains_dict[domain]['hl-start'] = res.start()
domains_dict[domain]['hl-end'] = res.end()
return domains_dict
def api_search_domains_by_name(name_to_search, domains_types, domains_metadata=False, page=1):
domains_types = sanitize_domain_types(domains_types)
domains_dict = search_domains_by_name(name_to_search, domains_types, r_pos=True)
l_domains = sorted(domains_dict.keys())
l_domains = paginate_iterator(l_domains, nb_obj=28, page=page)
if not domains_metadata:
return l_domains
else:
l_dict_domains = []
for domain in l_domains['list_elem']:
dict_domain = get_domain_metadata(domain, get_domain_type(domain), first_seen=True, last_ckeck=True,
status=True, ports=True, tags=True, tags_safe=True,
languages=True, screenshot=True)
dict_domain = {**domains_dict[domain], **dict_domain}
l_dict_domains.append(dict_domain)
l_domains['list_elem'] = l_dict_domains
l_domains['search'] = name_to_search
return l_domains
######## LANGUAGES ########
def get_all_domains_languages():
return r_serv_onion.smembers('all_domains_languages')
def get_domains_by_languages(languages, l_domain_type=[]):
l_domain_type = sanitize_domain_types(l_domain_type)
if not languages:
return []
elif len(languages) == 1:
return get_all_domains_by_language(languages[0], l_domain_type=l_domain_type)
else:
all_domains_t = []
for domain_type in l_domain_type:
l_keys_name = []
for language in languages:
l_keys_name.append('language:domains:{}:{}'.format(domain_type, language))
res = r_serv_onion.sinter(l_keys_name[0], *l_keys_name[1:])
if res:
all_domains_t.append(res)
return list(itertools.chain.from_iterable(all_domains_t))
def get_all_domains_by_language(language, l_domain_type=[]):
l_domain_type = sanitize_domain_types(l_domain_type)
if len(l_domain_type) == 1:
return r_serv_onion.smembers('language:domains:{}:{}'.format(l_domain_type[0], language))
else:
l_keys_name = []
for domain_type in l_domain_type:
l_keys_name.append('language:domains:{}:{}'.format(domain_type, language))
return r_serv_onion.sunion(l_keys_name[0], *l_keys_name[1:])
def get_domain_languages(domain, r_list=False):
res = r_serv_onion.smembers('domain:language:{}'.format(domain))
if r_list:
return list(res)
else:
return res
def add_domain_language(domain, language):
language = language.split('-')[0]
domain_type = get_domain_type(domain)
r_serv_onion.sadd('all_domains_languages', language)
r_serv_onion.sadd('all_domains_languages:{}'.format(domain_type), language)
r_serv_onion.sadd('language:domains:{}:{}'.format(domain_type, language), domain)
r_serv_onion.sadd('domain:language:{}'.format(domain), language)
def add_domain_languages_by_item_id(domain, item_id):
for lang in Item.get_item_languages(item_id, min_proportion=0.2, min_probability=0.8):
add_domain_language(domain, lang.language)
def delete_domain_languages(domain):
domain_type = get_domain_type(domain)
for language in get_domain_languages(domain):
r_serv_onion.srem('language:domains:{}:{}'.format(domain_type, language), domain)
if not r_serv_onion.exists('language:domains:{}:{}'.format(domain_type, language)):
r_serv_onion.srem('all_domains_languages:{}'.format(domain_type), language)
exist_domain_type_lang = False
for domain_type in get_all_domains_type():
if r_serv_onion.sismembers('all_domains_languages:{}'.format(domain_type), language):
exist_domain_type_lang = True
continue
if not exist_domain_type_lang:
r_serv_onion.srem('all_domains_languages', language)
r_serv_onion.delete('domain:language:{}'.format(domain))
def _delete_all_domains_languages():
for language in get_all_domains_languages():
for domain in get_all_domains_by_language(language):
delete_domain_languages(domain)
## API ##
## TODO: verify domains type + languages list
## TODO: add pagination
def api_get_domains_by_languages(domains_types, languages, domains_metadata=False, page=1):
l_domains = sorted(get_domains_by_languages(languages, l_domain_type=domains_types))
l_domains = paginate_iterator(l_domains, nb_obj=28, page=page)
if not domains_metadata:
return l_domains
else:
l_dict_domains = []
for domain in l_domains['list_elem']:
l_dict_domains.append(get_domain_metadata(domain, get_domain_type(domain), first_seen=True, last_ckeck=True,
status=True, ports=True, tags=True, tags_safe=True,
languages=True, screenshot=True))
l_domains['list_elem'] = l_dict_domains
return l_domains
####---- ----####
######## DOMAIN ######## ######## DOMAIN ########
def get_domain_type(domain): def get_domain_type(domain):
@ -474,6 +630,14 @@ def get_domain_last_origin(domain, domain_type):
origin_item = r_serv_onion.hget('{}_metadata:{}'.format(domain_type, domain), 'paste_parent') origin_item = r_serv_onion.hget('{}_metadata:{}'.format(domain_type, domain), 'paste_parent')
return origin_item return origin_item
def get_domain_father(domain, domain_type):
dict_father = {}
dict_father['item_father'] = r_serv_onion.hget('{}_metadata:{}'.format(domain_type, domain), 'paste_parent')
if dict_father['item_father'] != 'auto' and dict_father['item_father'] != 'manual':
if Item.is_crawled(dict_father['item_father']):
dict_father['domain_father'] = Item.get_domain(dict_father['item_father'])
return dict_father
def get_domain_tags(domain): def get_domain_tags(domain):
''' '''
Retun all tags of a given domain. Retun all tags of a given domain.
@ -490,7 +654,7 @@ def get_domain_random_screenshot(domain):
''' '''
return Screenshot.get_randon_domain_screenshot(domain) return Screenshot.get_randon_domain_screenshot(domain)
def get_domain_metadata(domain, domain_type, first_seen=True, last_ckeck=True, status=True, ports=True, tags=False, tags_safe=False, screenshot=False): def get_domain_metadata(domain, domain_type, first_seen=True, last_ckeck=True, status=True, ports=True, tags=False, tags_safe=False, languages=False, screenshot=False):
''' '''
Get Domain basic metadata Get Domain basic metadata
@ -508,6 +672,7 @@ def get_domain_metadata(domain, domain_type, first_seen=True, last_ckeck=True, s
''' '''
dict_metadata = {} dict_metadata = {}
dict_metadata['id'] = domain dict_metadata['id'] = domain
dict_metadata['type'] = domain_type
if first_seen: if first_seen:
res = get_domain_first_seen(domain, domain_type=domain_type) res = get_domain_first_seen(domain, domain_type=domain_type)
if res is not None: if res is not None:
@ -527,6 +692,8 @@ def get_domain_metadata(domain, domain_type, first_seen=True, last_ckeck=True, s
dict_metadata['is_tags_safe'] = Tag.is_tags_safe(dict_metadata['tags']) dict_metadata['is_tags_safe'] = Tag.is_tags_safe(dict_metadata['tags'])
else: else:
dict_metadata['is_tags_safe'] = Tag.is_tags_safe(get_domain_tags(domain)) dict_metadata['is_tags_safe'] = Tag.is_tags_safe(get_domain_tags(domain))
if languages:
dict_metadata['languages'] = Language.get_languages_from_iso(get_domain_languages(domain, r_list=True), sort=True)
if screenshot: if screenshot:
dict_metadata['screenshot'] = get_domain_random_screenshot(domain) dict_metadata['screenshot'] = get_domain_random_screenshot(domain)
return dict_metadata return dict_metadata
@ -744,6 +911,9 @@ class Domain(object):
''' '''
return get_domain_last_origin(self.domain, self.type) return get_domain_last_origin(self.domain, self.type)
def get_domain_father(self):
return get_domain_father(self.domain, self.type)
def domain_was_up(self): def domain_was_up(self):
''' '''
Return True if this domain was UP at least one time Return True if this domain was UP at least one time
@ -785,6 +955,14 @@ class Domain(object):
''' '''
return get_domain_tags(self.domain) return get_domain_tags(self.domain)
def get_domain_languages(self):
'''
Retun all languages of a given domain.
:param domain: domain name
'''
return get_domain_languages(self.domain)
def get_domain_correlation(self): def get_domain_correlation(self):
''' '''
Retun all correlation of a given domain. Retun all correlation of a given domain.
@ -809,3 +987,6 @@ class Domain(object):
''' '''
port = sanathyse_port(port, self.domain, self.type, strict=True, current_port=self.current_port) port = sanathyse_port(port, self.domain, self.type, strict=True, current_port=self.current_port)
return get_domain_items_crawled(self.domain, self.type, port, epoch=epoch, items_link=items_link, item_screenshot=item_screenshot, item_tag=item_tag) return get_domain_items_crawled(self.domain, self.type, port, epoch=epoch, items_link=items_link, item_screenshot=item_screenshot, item_tag=item_tag)
if __name__ == '__main__':
search_domains_by_name('c', 'onion')

240
bin/lib/Language.py Executable file
View file

@ -0,0 +1,240 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import redis
dict_iso_languages = {
'af': 'Afrikaans',
'am': 'Amharic',
'ar': 'Arabic',
'bg': 'Bulgarian',
'bn': 'Bangla',
'bs': 'Bosnian',
'ca': 'Catalan',
'ceb': 'Cebuano',
'co': 'Corsican',
'cs': 'Czech',
'cy': 'Welsh',
'da': 'Danish',
'de': 'German',
'el': 'Greek',
'en': 'English',
'eo': 'Esperanto',
'es': 'Spanish',
'et': 'Estonian',
'eu': 'Basque',
'fa': 'Persian',
'fi': 'Finnish',
'fil': 'Filipino',
'fr': 'French',
'fy': 'Western Frisian',
'ga': 'Irish',
'gd': 'Scottish Gaelic',
'gl': 'Galician',
'gu': 'Gujarati',
'ha': 'Hausa',
'haw': 'Hawaiian',
'hi': 'Hindi',
'hmn': 'Hmong',
'hr': 'Croatian',
'ht': 'Haitian Creole',
'hu': 'Hungarian',
'hy': 'Armenian',
'id': 'Indonesian',
'ig': 'Igbo',
'is': 'Icelandic',
'it': 'Italian',
'iw': 'Hebrew',
'ja': 'Japanese',
'jv': 'Javanese',
'ka': 'Georgian',
'kk': 'Kazakh',
'km': 'Khmer',
'kn': 'Kannada',
'ko': 'Korean',
'ku': 'Kurdish',
'ky': 'Kyrgyz',
'la': 'Latin',
'lb': 'Luxembourgish',
'lo': 'Lao',
'lt': 'Lithuanian',
'lv': 'Latvian',
'mg': 'Malagasy',
'mi': 'Maori',
'mk': 'Macedonian',
'ml': 'Malayalam',
'mn': 'Mongolian',
'mr': 'Marathi',
'ms': 'Malay',
'mt': 'Maltese',
'my': 'Burmese',
'ne': 'Nepali',
'nl': 'Dutch',
'no': 'Norwegian',
'ny': 'Nyanja',
'pa': 'Punjabi',
'pl': 'Polish',
'ps': 'Pashto',
'pt': 'Portuguese',
'ro': 'Romanian',
'ru': 'Russian',
'sd': 'Sindhi',
'si': 'Sinhala',
'sk': 'Slovak',
'sl': 'Slovenian',
'sm': 'Samoan',
'sn': 'Shona',
'so': 'Somali',
'sq': 'Albanian',
'sr': 'Serbian',
'st': 'Southern Sotho',
'su': 'Sundanese',
'sv': 'Swedish',
'sw': 'Swahili',
'ta': 'Tamil',
'te': 'Telugu',
'tg': 'Tajik',
'th': 'Thai',
'tr': 'Turkish',
'uk': 'Ukrainian',
'ur': 'Urdu',
'uz': 'Uzbek',
'vi': 'Vietnamese',
'xh': 'Xhosa',
'yi': 'Yiddish',
'yo': 'Yoruba',
'zh': 'Chinese',
'zu': 'Zulu'
}
dict_languages_iso = {
'Afrikaans': 'af',
'Amharic': 'am',
'Arabic': 'ar',
'Bulgarian': 'bg',
'Bangla': 'bn',
'Bosnian': 'bs',
'Catalan': 'ca',
'Cebuano': 'ceb',
'Corsican': 'co',
'Czech': 'cs',
'Welsh': 'cy',
'Danish': 'da',
'German': 'de',
'Greek': 'el',
'English': 'en',
'Esperanto': 'eo',
'Spanish': 'es',
'Estonian': 'et',
'Basque': 'eu',
'Persian': 'fa',
'Finnish': 'fi',
'Filipino': 'fil',
'French': 'fr',
'Western Frisian': 'fy',
'Irish': 'ga',
'Scottish Gaelic': 'gd',
'Galician': 'gl',
'Gujarati': 'gu',
'Hausa': 'ha',
'Hawaiian': 'haw',
'Hindi': 'hi',
'Hmong': 'hmn',
'Croatian': 'hr',
'Haitian Creole': 'ht',
'Hungarian': 'hu',
'Armenian': 'hy',
'Indonesian': 'id',
'Igbo': 'ig',
'Icelandic': 'is',
'Italian': 'it',
'Hebrew': 'iw',
'Japanese': 'ja',
'Javanese': 'jv',
'Georgian': 'ka',
'Kazakh': 'kk',
'Khmer': 'km',
'Kannada': 'kn',
'Korean': 'ko',
'Kurdish': 'ku',
'Kyrgyz': 'ky',
'Latin': 'la',
'Luxembourgish': 'lb',
'Lao': 'lo',
'Lithuanian': 'lt',
'Latvian': 'lv',
'Malagasy': 'mg',
'Maori': 'mi',
'Macedonian': 'mk',
'Malayalam': 'ml',
'Mongolian': 'mn',
'Marathi': 'mr',
'Malay': 'ms',
'Maltese': 'mt',
'Burmese': 'my',
'Nepali': 'ne',
'Dutch': 'nl',
'Norwegian': 'no',
'Nyanja': 'ny',
'Punjabi': 'pa',
'Polish': 'pl',
'Pashto': 'ps',
'Portuguese': 'pt',
'Romanian': 'ro',
'Russian': 'ru',
'Sindhi': 'sd',
'Sinhala': 'si',
'Slovak': 'sk',
'Slovenian': 'sl',
'Samoan': 'sm',
'Shona': 'sn',
'Somali': 'so',
'Albanian': 'sq',
'Serbian': 'sr',
'Southern Sotho': 'st',
'Sundanese': 'su',
'Swedish': 'sv',
'Swahili': 'sw',
'Tamil': 'ta',
'Telugu': 'te',
'Tajik': 'tg',
'Thai': 'th',
'Turkish': 'tr',
'Ukrainian': 'uk',
'Urdu': 'ur',
'Uzbek': 'uz',
'Vietnamese': 'vi',
'Xhosa': 'xh',
'Yiddish': 'yi',
'Yoruba': 'yo',
'Chinese': 'zh',
'Zulu': 'zu'
}
def get_language_from_iso(iso_language):
return dict_iso_languages.get(iso_language, None)
def get_languages_from_iso(l_iso_languages, sort=False):
l_languages = []
for iso_language in l_iso_languages:
language = get_language_from_iso(iso_language)
if language:
l_languages.append(language)
if sort:
l_languages = sorted(l_languages)
return l_languages
def get_iso_from_language(language):
return dict_languages_iso.get(language, None)
def get_iso_from_languages(l_languages, sort=False):
l_iso = []
for language in l_languages:
iso_lang = get_iso_from_language(language)
if iso_lang:
l_iso.append(iso_lang)
if sort:
l_iso = sorted(l_iso)
return l_iso

View file

@ -20,7 +20,7 @@ import ConfigLoader
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"), 'screenshot') SCREENSHOT_FOLDER = config_loader.get_files_directory('screenshot')
config_loader = None config_loader = None
# get screenshot relative path # get screenshot relative path

View file

@ -2,19 +2,78 @@
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os import os
import re
import sys import sys
import time import time
import redis import redis
import uuid
import yara import yara
import datetime
from flask import escape
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader import ConfigLoader
#import item_basic #import item_basic
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker") r_serv_tracker = config_loader.get_redis_conn("ARDB_Tracker")
config_loader = None config_loader = None
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
email_regex = re.compile(email_regex)
special_characters = set('[<>~!?@#$%^&*|()_-+={}":;,.\'\n\r\t]/\\')
special_characters.add('\\s')
###############
#### UTILS ####
def is_valid_uuid_v4(UUID):
if not UUID:
return False
UUID = UUID.replace('-', '')
try:
uuid_test = uuid.UUID(hex=UUID, version=4)
return uuid_test.hex == UUID
except:
return False
def is_valid_regex(tracker_regex):
try:
re.compile(tracker_regex)
return True
except:
return False
def is_valid_mail(email):
result = email_regex.match(email)
if result:
return True
else:
return False
def verify_mail_list(mail_list):
for mail in mail_list:
if not is_valid_mail(mail):
return ({'status': 'error', 'reason': 'Invalid email', 'value': mail}, 400)
return None
##-- UTILS --##
###############
def get_tracker_by_uuid(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'tracked')
def get_tracker_type(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'type')
def get_tracker_level(tracker_uuid):
return int(r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'level'))
def get_tracker_user_id(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'user_id')
def get_tracker_uuid_list(tracker, tracker_type): def get_tracker_uuid_list(tracker, tracker_type):
return list(r_serv_tracker.smembers('all:tracker_uuid:{}:{}'.format(tracker_type, tracker))) return list(r_serv_tracker.smembers('all:tracker_uuid:{}:{}'.format(tracker_type, tracker)))
@ -27,6 +86,51 @@ def get_tracker_mails(tracker_uuid):
def get_tracker_description(tracker_uuid): def get_tracker_description(tracker_uuid):
return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'description') return r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'description')
def get_tracker_first_seen(tracker_uuid):
res = r_serv_tracker.zrange('tracker:stat:{}'.format(tracker_uuid), 0, 0)
if res:
return res[0]
else:
return None
def get_tracker_last_seen(tracker_uuid):
res = r_serv_tracker.zrevrange('tracker:stat:{}'.format(tracker_uuid), 0, 0)
if res:
return res[0]
else:
return None
def get_tracker_metedata(tracker_uuid, user_id=False, description=False, level=False, tags=False, mails=False, sparkline=False):
dict_uuid = {}
dict_uuid['tracker'] = get_tracker_by_uuid(tracker_uuid)
dict_uuid['type'] = get_tracker_type(tracker_uuid)
dict_uuid['date'] = r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'date')
dict_uuid['description'] = get_tracker_description(tracker_uuid)
dict_uuid['first_seen'] = get_tracker_first_seen(tracker_uuid)
dict_uuid['last_seen'] = get_tracker_last_seen(tracker_uuid)
if user_id:
dict_uuid['user_id'] = get_tracker_user_id(tracker_uuid)
if level:
dict_uuid['level'] = get_tracker_level(tracker_uuid)
if mails:
dict_uuid['mails'] = get_tracker_mails(tracker_uuid)
if tags:
dict_uuid['tags'] = get_tracker_tags(tracker_uuid)
if sparkline:
dict_uuid['sparkline'] = get_tracker_sparkline(tracker_uuid)
dict_uuid['uuid'] = tracker_uuid
return dict_uuid
def get_tracker_sparkline(tracker_uuid, num_day=6):
date_range_sparkline = Date.get_date_range(num_day)
sparklines_value = []
for date_day in date_range_sparkline:
nb_seen_this_day = r_serv_tracker.scard('tracker:item:{}:{}'.format(tracker_uuid, date_day))
if nb_seen_this_day is None:
nb_seen_this_day = 0
sparklines_value.append(int(nb_seen_this_day))
return sparklines_value
def add_tracked_item(tracker_uuid, item_id, item_date): def add_tracked_item(tracker_uuid, item_id, item_date):
# track item # track item
r_serv_tracker.sadd('tracker:item:{}:{}'.format(tracker_uuid, item_date), item_id) r_serv_tracker.sadd('tracker:item:{}:{}'.format(tracker_uuid, item_date), item_id)
@ -46,6 +150,234 @@ def get_tracker_last_updated_by_type(tracker_type):
epoch_update = 0 epoch_update = 0
return float(epoch_update) return float(epoch_update)
######################
#### TRACKERS ACL ####
# # TODO: use new package => duplicate fct
def is_in_role(user_id, role):
if r_serv_db.sismember('user_role:{}'.format(role), user_id):
return True
else:
return False
def is_tracker_in_global_level(tracker, tracker_type):
res = r_serv_tracker.smembers('all:tracker_uuid:{}:{}'.format(tracker_type, tracker))
if res:
for elem_uuid in res:
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'level')=='1':
return True
return False
def is_tracker_in_user_level(tracker, tracker_type, user_id):
res = r_serv_tracker.smembers('user:tracker:{}'.format(user_id))
if res:
for elem_uuid in res:
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'tracked')== tracker:
if r_serv_tracker.hget('tracker:{}'.format(elem_uuid), 'type')== tracker_type:
return True
return False
def api_is_allowed_to_edit_tracker(tracker_uuid, user_id):
if not is_valid_uuid_v4(tracker_uuid):
return ({"status": "error", "reason": "Invalid uuid"}, 400)
tracker_creator = r_serv_tracker.hget('tracker:{}'.format(tracker_uuid), 'user_id')
if not tracker_creator:
return ({"status": "error", "reason": "Unknown uuid"}, 404)
if not is_in_role(user_id, 'admin') and user_id != tracker_creator:
return ({"status": "error", "reason": "Access Denied"}, 403)
return ({"uuid": tracker_uuid}, 200)
##-- ACL --##
#### CREATE TRACKER ####
def api_validate_tracker_to_add(tracker , tracker_type, nb_words=1):
if tracker_type=='regex':
if not is_valid_regex(tracker):
return ({"status": "error", "reason": "Invalid regex"}, 400)
elif tracker_type=='word' or tracker_type=='set':
# force lowercase
tracker = tracker.lower()
word_set = set(tracker)
set_inter = word_set.intersection(special_characters)
if set_inter:
return ({"status": "error", "reason": f'special character(s) not allowed: {set_inter}', "message": "Please use a python regex or remove all special characters"}, 400)
words = tracker.split()
# not a word
if tracker_type=='word' and len(words)>1:
tracker_type = 'set'
# ouput format: tracker1,tracker2,tracker3;2
if tracker_type=='set':
try:
nb_words = int(nb_words)
except:
nb_words = 1
if nb_words==0:
nb_words = 1
words_set = set(words)
words_set = sorted(words_set)
if nb_words > len(words_set):
nb_words = len(words_set)
tracker = ",".join(words_set)
tracker = "{};{}".format(tracker, nb_words)
elif tracker_type=='yara_custom':
if not is_valid_yara_rule(tracker):
return ({"status": "error", "reason": "Invalid custom Yara Rule"}, 400)
elif tracker_type=='yara_default':
if not is_valid_default_yara_rule(tracker):
return ({"status": "error", "reason": "The Yara Rule doesn't exist"}, 400)
else:
return ({"status": "error", "reason": "Incorrect type"}, 400)
return ({"status": "success", "tracker": tracker, "type": tracker_type}, 200)
def create_tracker(tracker, tracker_type, user_id, level, tags, mails, description, dashboard=0, tracker_uuid=None):
# edit tracker
if tracker_uuid:
edit_tracker = True
# check if type changed
old_type = get_tracker_type(tracker_uuid)
old_tracker = get_tracker_by_uuid(tracker_uuid)
old_level = get_tracker_level(tracker_uuid)
tracker_user_id = get_tracker_user_id(tracker_uuid)
# Create new tracker
else:
edit_tracker = False
# generate tracker uuid
tracker_uuid = str(uuid.uuid4())
old_type = None
old_tracker = None
# YARA
if tracker_type == 'yara_custom' or tracker_type == 'yara_default':
# delete yara rule
if tracker_type == 'yara_default' and old_type == 'yara':
if not is_default_yara_rule(old_tracker):
filepath = get_yara_rule_file_by_tracker_name(old_tracker)
if filepath:
os.remove(filepath)
tracker = save_yara_rule(tracker_type, tracker, tracker_uuid=tracker_uuid)
tracker_type = 'yara'
# create metadata
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'tracked', tracker)
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'type', tracker_type)
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'date', datetime.date.today().strftime("%Y%m%d"))
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'level', level)
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'dashboard', dashboard)
if not edit_tracker:
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'user_id', user_id)
if description:
r_serv_tracker.hset('tracker:{}'.format(tracker_uuid), 'description', description)
# type change
if edit_tracker:
r_serv_tracker.srem('all:tracker:{}'.format(old_type), old_tracker)
r_serv_tracker.srem('all:tracker_uuid:{}:{}'.format(old_type, old_tracker), tracker_uuid)
if level != old_level:
if level == 0:
r_serv_tracker.srem('global:tracker', tracker_uuid)
elif level == 1:
r_serv_tracker.srem('user:tracker:{}'.format(tracker_user_id), tracker_uuid)
if tracker_type != old_type:
if old_level == 0:
r_serv_tracker.srem('user:tracker:{}:{}'.format(tracker_user_id, old_type), tracker_uuid)
elif old_level == 1:
r_serv_tracker.srem('global:tracker:{}'.format(old_type), tracker_uuid)
if old_type=='yara':
if not is_default_yara_rule(old_tracker):
filepath = get_yara_rule_file_by_tracker_name(old_tracker)
if filepath:
os.remove(filepath)
# create all tracker set
r_serv_tracker.sadd('all:tracker:{}'.format(tracker_type), tracker)
# create tracker - uuid map
r_serv_tracker.sadd('all:tracker_uuid:{}:{}'.format(tracker_type, tracker), tracker_uuid)
# add display level set
if level == 0: # user only
r_serv_tracker.sadd('user:tracker:{}'.format(user_id), tracker_uuid)
r_serv_tracker.sadd('user:tracker:{}:{}'.format(user_id, tracker_type), tracker_uuid)
elif level == 1: # global
r_serv_tracker.sadd('global:tracker', tracker_uuid)
r_serv_tracker.sadd('global:tracker:{}'.format(tracker_type), tracker_uuid)
# create tracker tags list
for tag in tags:
r_serv_tracker.sadd('tracker:tags:{}'.format(tracker_uuid), escape(tag) )
# create tracker tags mail notification list
for mail in mails:
r_serv_tracker.sadd('tracker:mail:{}'.format(tracker_uuid), escape(mail) )
# toggle refresh module tracker list/set
r_serv_tracker.set('tracker:refresh:{}'.format(tracker_type), time.time())
if tracker_type != old_type: # toggle old type refresh
r_serv_tracker.set('tracker:refresh:{}'.format(old_type), time.time())
return tracker_uuid
def api_add_tracker(dict_input, user_id):
tracker = dict_input.get('tracker', None)
if not tracker:
return ({"status": "error", "reason": "Tracker not provided"}, 400)
tracker_type = dict_input.get('type', None)
if not tracker_type:
return ({"status": "error", "reason": "Tracker type not provided"}, 400)
nb_words = dict_input.get('nb_words', 1)
description = dict_input.get('description', '')
description = escape(description)
res = api_validate_tracker_to_add(tracker , tracker_type, nb_words=nb_words)
if res[1]!=200:
return res
tracker = res[0]['tracker']
tracker_type = res[0]['type']
tags = dict_input.get('tags', [])
mails = dict_input.get('mails', [])
res = verify_mail_list(mails)
if res:
return res
## TODO: add dashboard key
level = dict_input.get('level', 1)
try:
level = int(level)
if level not in range(0, 1):
level = 1
except:
level = 1
tracker_uuid = dict_input.get('uuid', None)
# check edit ACL
if tracker_uuid:
res = api_is_allowed_to_edit_tracker(tracker_uuid, user_id)
if res[1] != 200:
return res
else:
# check if tracker already tracked in global
if level==1:
if is_tracker_in_global_level(tracker, tracker_type) and not tracker_uuid:
return ({"status": "error", "reason": "Tracker already exist"}, 409)
else:
if is_tracker_in_user_level(tracker, tracker_type, user_id) and not tracker_uuid:
return ({"status": "error", "reason": "Tracker already exist"}, 409)
tracker_uuid = create_tracker(tracker , tracker_type, user_id, level, tags, mails, description, tracker_uuid=tracker_uuid)
return ({'tracker': tracker, 'type': tracker_type, 'uuid': tracker_uuid}, 200)
##-- CREATE TRACKER --##
##############
#### YARA #### #### YARA ####
def get_yara_rules_dir(): def get_yara_rules_dir():
return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara') return os.path.join(os.environ['AIL_BIN'], 'trackers', 'yara')
@ -99,13 +431,32 @@ def is_valid_yara_rule(yara_rule):
except: except:
return False return False
def is_valid_default_yara_rule(yara_rule): def is_default_yara_rule(tracked_yara_name):
yara_dir = get_yara_rules_dir()
filename = os.path.join(yara_dir, tracked_yara_name)
filename = os.path.realpath(filename)
try:
if tracked_yara_name.split('/')[0] == 'custom-rules':
return False
except:
return False
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
return False
else:
if os.path.isfile(filename):
return True
return False
def is_valid_default_yara_rule(yara_rule, verbose=True):
yara_dir = get_yara_rules_default_dir() yara_dir = get_yara_rules_default_dir()
filename = os.path.join(yara_dir, yara_rule) filename = os.path.join(yara_dir, yara_rule)
filename = os.path.realpath(filename) filename = os.path.realpath(filename)
# incorrect filename # incorrect filename
if not os.path.commonprefix([filename, yara_dir]) == yara_dir: if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
if verbose:
print('error: file transversal')
print(yara_dir)
print(filename)
return False return False
else: else:
if os.path.isfile(filename): if os.path.isfile(filename):
@ -124,6 +475,17 @@ def save_yara_rule(yara_rule_type, yara_rule, tracker_uuid=None):
filename = os.path.join('ail-yara-rules', 'rules', yara_rule) filename = os.path.join('ail-yara-rules', 'rules', yara_rule)
return filename return filename
def get_yara_rule_file_by_tracker_name(tracked_yara_name):
yara_dir = get_yara_rules_dir()
filename = os.path.join(yara_dir, tracked_yara_name)
filename = os.path.realpath(filename)
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
print('error: file transversal')
print(yara_dir)
print(filename)
return None
return filename
def get_yara_rule_content(yara_rule): def get_yara_rule_content(yara_rule):
yara_dir = get_yara_rules_dir() yara_dir = get_yara_rules_dir()
filename = os.path.join(yara_dir, yara_rule) filename = os.path.join(yara_dir, yara_rule)
@ -137,8 +499,23 @@ def get_yara_rule_content(yara_rule):
rule_content = f.read() rule_content = f.read()
return rule_content return rule_content
##-- YARA --## def api_get_default_rule_content(default_yara_rule):
yara_dir = get_yara_rules_default_dir()
filename = os.path.join(yara_dir, default_yara_rule)
filename = os.path.realpath(filename)
# incorrect filename
if not os.path.commonprefix([filename, yara_dir]) == yara_dir:
return ({'status': 'error', 'reason': 'file transversal detected'}, 400)
if not os.path.isfile(filename):
return ({'status': 'error', 'reason': 'yara rule not found'}, 400)
with open(filename, 'r') as f:
rule_content = f.read()
return ({'rule_name': default_yara_rule, 'content': rule_content}, 200)
##-- YARA --##
if __name__ == '__main__': if __name__ == '__main__':
res = is_valid_yara_rule('rule dummy { }') res = is_valid_yara_rule('rule dummy { }')

View file

@ -49,10 +49,23 @@ faup = Faup()
def generate_uuid(): def generate_uuid():
return str(uuid.uuid4()).replace('-', '') return str(uuid.uuid4()).replace('-', '')
# # TODO: remove me ?
def get_current_date(): def get_current_date():
return datetime.now().strftime("%Y%m%d") return datetime.now().strftime("%Y%m%d")
##-- COMMON --# def is_valid_onion_domain(domain):
if not domain.endswith('.onion'):
return False
domain = domain.replace('.onion', '', 1)
if len(domain) == 16: # v2 address
r_onion = r'[a-z0-9]{16}'
if re.match(r_onion, domain):
return True
elif len(domain) == 56: # v3 address
r_onion = r'[a-z0-9]{56}'
if re.fullmatch(r_onion, domain):
return True
return False
################################################################################ ################################################################################
@ -616,6 +629,19 @@ def api_create_crawler_task(user_id, url, screenshot=True, har=True, depth_limit
crawler_type=crawler_type, crawler_type=crawler_type,
auto_crawler=auto_crawler, crawler_delta=crawler_delta, cookiejar_uuid=cookiejar_uuid, user_agent=user_agent) auto_crawler=auto_crawler, crawler_delta=crawler_delta, cookiejar_uuid=cookiejar_uuid, user_agent=user_agent)
return None return None
#### ####
#### SPLASH API ####
def is_splash_reachable(splash_url, timeout=1.0):
try:
r = requests.get(splash_url , timeout=timeout)
except Exception:
return False
if r.status_code == 200:
return True
else:
return False
#### #### #### ####
def is_redirection(domain, last_url): def is_redirection(domain, last_url):
@ -683,6 +709,15 @@ def save_har(har_dir, item_id, har_content):
with open(filename, 'w') as f: with open(filename, 'w') as f:
f.write(json.dumps(har_content)) f.write(json.dumps(har_content))
# # TODO: FIXME
def api_add_crawled_item(dict_crawled):
domain = None
# create item_id item_id =
save_crawled_item(item_id, response.data['html'])
create_item_metadata(item_id, domain, 'last_url', port, 'father')
#### CRAWLER QUEUES #### #### CRAWLER QUEUES ####
def get_all_crawlers_queues_types(): def get_all_crawlers_queues_types():
all_queues_types = set() all_queues_types = set()
@ -757,7 +792,6 @@ def get_nb_elem_to_crawl_by_type(queue_type):
# SPLASH MANAGER # # SPLASH MANAGER #
# # # #
# # # # # # # # # # # # # # # # # # # # # # # #
def get_splash_manager_url(reload=False): # TODO: add in db config def get_splash_manager_url(reload=False): # TODO: add in db config
return r_serv_onion.get('crawler:splash:manager:url') return r_serv_onion.get('crawler:splash:manager:url')

View file

@ -113,9 +113,6 @@ def get_item_parent(item_id):
def get_item_children(item_id): def get_item_children(item_id):
return list(r_serv_metadata.smembers('paste_children:{}'.format(item_id))) return list(r_serv_metadata.smembers('paste_children:{}'.format(item_id)))
def add_item_parent(item_parent, item_id):
return item_basic.add_item_parent(item_parent, item_id)
# # TODO: handle domain last origin in domain lib # # TODO: handle domain last origin in domain lib
def _delete_node(item_id): def _delete_node(item_id):
# only if item isn't deleted # only if item isn't deleted
@ -169,3 +166,38 @@ def add_map_obj_id_item_id(obj_id, item_id, obj_type):
# delete twitter id # delete twitter id
##-- --## ##-- --##
## COMMON ##
def _get_dir_source_name(directory, source_name=None, l_sources_name=set()):
if source_name:
l_dir = os.listdir(os.path.join(directory, source_name))
else:
l_dir = os.listdir(directory)
# empty directory
if not l_dir:
return l_sources_name.add(source_name)
return l_sources_name
else:
for src_name in l_dir:
if len(src_name) == 4:
try:
int(src_name)
l_sources_name.add(os.path.join(source_name))
return l_sources_name
except:
pass
if source_name:
src_name = os.path.join(source_name, src_name)
l_sources_name = _get_dir_source_name(directory, source_name=src_name, l_sources_name=l_sources_name)
return l_sources_name
def get_all_items_sources():
res = _get_dir_source_name(PASTES_FOLDER)
print(res)
##-- --##
if __name__ == '__main__':
get_all_items_sources()

View file

@ -60,11 +60,10 @@ class HiddenServices(object):
self.paste_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) self.paste_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes"))
self.paste_crawled_directory = os.path.join(self.paste_directory, config_loader.get_config_str("Directories", "crawled")) self.paste_crawled_directory = os.path.join(self.paste_directory, config_loader.get_config_str("Directories", "crawled"))
self.paste_crawled_directory_name = config_loader.get_config_str("Directories", "crawled") self.paste_crawled_directory_name = config_loader.get_config_str("Directories", "crawled")
self.screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) self.screenshot_directory = config_loader.get_files_directory('screenshot')
self.screenshot_directory_screenshot = os.path.join(self.screenshot_directory, 'screenshot')
elif type == 'i2p': elif type == 'i2p':
self.paste_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) self.paste_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"))
self.screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) self.screenshot_directory = config_loader.get_files_directory('screenshot')
else: else:
## TODO: # FIXME: add error ## TODO: # FIXME: add error
pass pass

View file

@ -2,8 +2,11 @@
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os import os
import re
import sys import sys
import redis import redis
import cld3
import html2text
from io import BytesIO from io import BytesIO
@ -31,7 +34,8 @@ PASTES_FOLDER = os.path.join(os.path.realpath(PASTES_FOLDER), '')
r_cache = config_loader.get_redis_conn("Redis_Cache") r_cache = config_loader.get_redis_conn("Redis_Cache")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
screenshot_directory = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) screenshot_directory = config_loader.get_files_directory('screenshot')
har_directory = config_loader.get_files_directory('har')
config_loader = None config_loader = None
@ -59,6 +63,9 @@ def get_item_basename(item_id):
def get_item_size(item_id): def get_item_size(item_id):
return round(os.path.getsize(os.path.join(PASTES_FOLDER, item_id))/1024.0, 2) return round(os.path.getsize(os.path.join(PASTES_FOLDER, item_id))/1024.0, 2)
def get_item_encoding(item_id):
return None
def get_lines_info(item_id, item_content=None): def get_lines_info(item_id, item_content=None):
if not item_content: if not item_content:
item_content = get_item_content(item_id) item_content = get_item_content(item_id)
@ -73,9 +80,86 @@ def get_lines_info(item_id, item_content=None):
return {'nb': nb_line, 'max_length': max_length} return {'nb': nb_line, 'max_length': max_length}
def get_item_metadata(item_id, item_content=None):
## TODO: FIXME ##performance
# encoding
# language
# lines info
item_metadata = {}
item_metadata['date'] = get_item_date(item_id, add_separator=True)
item_metadata['source'] = get_source(item_id)
item_metadata['size'] = get_item_size(item_id)
item_metadata['encoding'] = get_item_encoding(item_id)
item_metadata['lines'] = get_lines_info(item_id, item_content=item_content)
return item_metadata
def get_item_parent(item_id):
return item_basic.get_item_parent(item_id)
def add_item_parent(item_parent, item_id):
return item_basic.add_item_parent(item_parent, item_id)
def get_item_content(item_id): def get_item_content(item_id):
return item_basic.get_item_content(item_id) return item_basic.get_item_content(item_id)
def get_item_content_html2text(item_id, item_content=None, ignore_links=False):
if not item_content:
item_content = get_item_content(item_id)
h = html2text.HTML2Text()
h.ignore_links = ignore_links
h.ignore_images = ignore_links
return h.handle(item_content)
def remove_all_urls_from_content(item_id, item_content=None):
if not item_content:
item_content = get_item_content(item_id)
regex = r'\b(?:http://|https://)?(?:[a-zA-Z\d-]{,63}(?:\.[a-zA-Z\d-]{,63})+)(?:\:[0-9]+)*(?:/(?:$|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*\b'
url_regex = re.compile(regex)
urls = url_regex.findall(item_content)
urls = sorted(urls, key=len, reverse=True)
for url in urls:
item_content = item_content.replace(url, '')
regex_pgp_public_blocs = r'-----BEGIN PGP PUBLIC KEY BLOCK-----[\s\S]+?-----END PGP PUBLIC KEY BLOCK-----'
regex_pgp_signature = r'-----BEGIN PGP SIGNATURE-----[\s\S]+?-----END PGP SIGNATURE-----'
regex_pgp_message = r'-----BEGIN PGP MESSAGE-----[\s\S]+?-----END PGP MESSAGE-----'
re.compile(regex_pgp_public_blocs)
re.compile(regex_pgp_signature)
re.compile(regex_pgp_message)
res = re.findall(regex_pgp_public_blocs, item_content)
for it in res:
item_content = item_content.replace(it, '')
res = re.findall(regex_pgp_signature, item_content)
for it in res:
item_content = item_content.replace(it, '')
res = re.findall(regex_pgp_message, item_content)
for it in res:
item_content = item_content.replace(it, '')
return item_content
def get_item_languages(item_id, min_len=600, num_langs=3, min_proportion=0.2, min_probability=0.7):
all_languages = []
## CLEAN CONTENT ##
content = get_item_content_html2text(item_id, ignore_links=True)
content = remove_all_urls_from_content(item_id, item_content=content)
# REMOVE USELESS SPACE
content = ' '.join(content.split())
#- CLEAN CONTENT -#
#print(content)
#print(len(content))
if len(content) >= min_len:
for lang in cld3.get_frequent_languages(content, num_langs=num_langs):
if lang.proportion >= min_proportion and lang.probability >= min_probability and lang.is_reliable:
all_languages.append(lang)
return all_languages
# API # API
def get_item(request_dict): def get_item(request_dict):
if not request_dict: if not request_dict:
@ -257,6 +341,18 @@ def get_item_list_desc(list_item_id):
def is_crawled(item_id): def is_crawled(item_id):
return item_basic.is_crawled(item_id) return item_basic.is_crawled(item_id)
def get_crawler_matadata(item_id, ltags=None):
dict_crawler = {}
if is_crawled(item_id):
dict_crawler['domain'] = get_item_domain(item_id)
if not ltags:
ltags = Tag.get_obj_tag(item_id)
dict_crawler['is_tags_safe'] = Tag.is_tags_safe(ltags)
dict_crawler['url'] = get_item_link(item_id)
dict_crawler['screenshot'] = get_item_screenshot(item_id)
dict_crawler['har'] = get_item_har_name(item_id)
return dict_crawler
def is_onion(item_id): def is_onion(item_id):
is_onion = False is_onion = False
if len(is_onion) > 62: if len(is_onion) > 62:
@ -293,7 +389,7 @@ def get_item_screenshot(item_id):
return '' return ''
def get_item_har_name(item_id): def get_item_har_name(item_id):
os.path.join(screenshot_directory, item_id) + '.json' har_path = os.path.join(har_directory, item_id) + '.json'
if os.path.isfile(har_path): if os.path.isfile(har_path):
return har_path return har_path
else: else:
@ -322,6 +418,24 @@ def get_item_duplicate(item_id, r_list=True):
return [] return []
return res return res
def get_item_nb_duplicates(item_id):
return r_serv_metadata.scard('dup:{}'.format(item_id))
def get_item_duplicates_dict(item_id):
dict_duplicates = {}
for duplicate in get_item_duplicate(item_id):
duplicate = duplicate[1:-1].replace('\'', '').replace(' ', '').split(',')
duplicate_id = duplicate[1]
if not duplicate_id in dict_duplicates:
dict_duplicates[duplicate_id] = {'date': get_item_date(duplicate_id, add_separator=True), 'algo': {}}
algo = duplicate[0]
if algo == 'tlsh':
similarity = 100 - int(duplicate[2])
else:
similarity = int(duplicate[2])
dict_duplicates[duplicate_id]['algo'][algo] = similarity
return dict_duplicates
def add_item_duplicate(item_id, l_dup): def add_item_duplicate(item_id, l_dup):
for item_dup in l_dup: for item_dup in l_dup:
r_serv_metadata.sadd('dup:{}'.format(item_dup), item_id) r_serv_metadata.sadd('dup:{}'.format(item_dup), item_id)
@ -434,3 +548,17 @@ def delete_domain_node(item_id):
domain_basic.delete_domain_item_core(item_id, domain, port) domain_basic.delete_domain_item_core(item_id, domain, port)
for child_id in get_all_domain_node_by_item_id(item_id): for child_id in get_all_domain_node_by_item_id(item_id):
delete_item(child_id) delete_item(child_id)
# if __name__ == '__main__':
# import Domain
# domain = Domain.Domain('domain.onion')
# for domain_history in domain.get_domain_history():
# domain_item = domain.get_domain_items_crawled(epoch=domain_history[1]) # item_tag
# if "items" in domain_item:
# for item_dict in domain_item['items']:
# item_id = item_dict['id']
# print(item_id)
# for lang in get_item_languages(item_id, min_proportion=0.2, min_probability=0.8):
# print(lang)
# print()
# print(get_item_languages(item_id, min_proportion=0.2, min_probability=0.6)) # 0.7 ?

View file

@ -62,7 +62,6 @@ class Paste(object):
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
self.cache = config_loader.get_redis_conn("Redis_Queues") self.cache = config_loader.get_redis_conn("Redis_Queues")
self.store = config_loader.get_redis_conn("Redis_Data_Merging")
self.store_metadata = config_loader.get_redis_conn("ARDB_Metadata") self.store_metadata = config_loader.get_redis_conn("ARDB_Metadata")
self.PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) self.PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes"))
@ -327,50 +326,27 @@ class Paste(object):
def get_p_date_path(self): def get_p_date_path(self):
return self.p_date_path return self.p_date_path
def save_all_attributes_redis(self, key=None): # def save_all_attributes_redis(self, key=None):
""" # """
Saving all the attributes in a "Redis-like" Database (Redis, LevelDB) # Saving all the attributes in a "Redis-like" Database (Redis, LevelDB)
#
:param r_serv: -- Connexion to the Database. # :param r_serv: -- Connexion to the Database.
:param key: -- Key of an additionnal set. # :param key: -- Key of an additionnal set.
#
Example: # Example:
import redis # import redis
#
r_serv = redis.StrictRedis(host = 127.0.0.1, port = 6739, db = 0) # r_serv = redis.StrictRedis(host = 127.0.0.1, port = 6739, db = 0)
#
PST = Paste("/home/Zkopkmlk.gz") # PST = Paste("/home/Zkopkmlk.gz")
PST.save_all_attributes_redis(r_serv) # PST.save_all_attributes_redis(r_serv)
#
""" # """
# LevelDB Compatibility #
p = self.store.pipeline(False) # def save_attribute_redis(self, attr_name, value):
p.hset(self.p_path, "p_name", self.p_name) # """
p.hset(self.p_path, "p_size", self.p_size) # Save an attribute as a field
p.hset(self.p_path, "p_mime", self.p_mime) # """
# p.hset(self.p_path, "p_encoding", self.p_encoding)
p.hset(self.p_path, "p_date", self._get_p_date())
p.hset(self.p_path, "p_hash_kind", self._get_p_hash_kind())
p.hset(self.p_path, "p_hash", self.p_hash)
# p.hset(self.p_path, "p_langage", self.p_langage)
# p.hset(self.p_path, "p_nb_lines", self.p_nb_lines)
# p.hset(self.p_path, "p_max_length_line", self.p_max_length_line)
# p.hset(self.p_path, "p_categories", self.p_categories)
p.hset(self.p_path, "p_source", self.p_source)
if key is not None:
p.sadd(key, self.p_path)
else:
pass
p.execute()
def save_attribute_redis(self, attr_name, value):
"""
Save an attribute as a field
"""
if type(value) == set:
self.store.hset(self.p_path, attr_name, json.dumps(list(value)))
else:
self.store.hset(self.p_path, attr_name, json.dumps(value))
def save_attribute_duplicate(self, value): def save_attribute_duplicate(self, value):
""" """

View file

@ -38,6 +38,8 @@ tokenizer = RegexpTokenizer('[\&\~\:\;\,\.\(\)\{\}\|\[\]\\\\/\-/\=\'\"\%\$\?\@\+
gaps=True, discard_empty=True) gaps=True, discard_empty=True)
def is_valid_uuid_v4(UUID): def is_valid_uuid_v4(UUID):
if not UUID:
return False
UUID = UUID.replace('-', '') UUID = UUID.replace('-', '')
try: try:
uuid_test = uuid.UUID(hex=UUID, version=4) uuid_test = uuid.UUID(hex=UUID, version=4)
@ -215,11 +217,12 @@ def parse_tracked_term_to_add(term , term_type, nb_words=1):
words_set = set(words) words_set = set(words)
words_set = sorted(words_set) words_set = sorted(words_set)
if nb_words > len(words_set):
nb_words = len(words_set)
term = ",".join(words_set) term = ",".join(words_set)
term = "{};{}".format(term, nb_words) term = "{};{}".format(term, nb_words)
if nb_words > len(words_set):
nb_words = len(words_set)
elif term_type=='yara_custom': elif term_type=='yara_custom':
if not Tracker.is_valid_yara_rule(term): if not Tracker.is_valid_yara_rule(term):
return ({"status": "error", "reason": "Invalid custom Yara Rule"}, 400) return ({"status": "error", "reason": "Invalid custom Yara Rule"}, 400)
@ -322,8 +325,11 @@ def delete_term(term_uuid):
r_serv_term.delete('tracker:stat:{}'.format(term_uuid)) r_serv_term.delete('tracker:stat:{}'.format(term_uuid))
if term_type == 'yara': if term_type == 'yara':
# # TODO: # delete custom rule
pass if not Tracker.is_default_yara_rule(term):
filepath = Tracker.get_yara_rule_file_by_tracker_name(term)
if filepath:
os.remove(filepath)
def replace_tracker_description(term_uuid, description): def replace_tracker_description(term_uuid, description):
description = escape(description) description = escape(description)

View file

@ -20,9 +20,6 @@ subscribe = Redis_Duplicate
[Indexer] [Indexer]
subscribe = Redis_Global subscribe = Redis_Global
[Attributes]
subscribe = Redis_Global
[DomClassifier] [DomClassifier]
subscribe = Redis_Global subscribe = Redis_Global
@ -46,6 +43,9 @@ publish = Redis_Tags
subscribe = Redis_Global subscribe = Redis_Global
publish = Redis_Tags publish = Redis_Tags
[Languages]
subscribe = Redis_Global
[Categ] [Categ]
subscribe = Redis_Global subscribe = Redis_Global
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_ApiKey publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_ApiKey
@ -64,15 +64,17 @@ publish = Redis_Duplicate,Redis_ModuleStats,Redis_Tags
[Onion] [Onion]
subscribe = Redis_Onion subscribe = Redis_Onion
publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_Tags,Redis_Crawler publish = Redis_ValidOnion,Redis_Tags,Redis_Crawler
#publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion #publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_Tags,Redis_Crawler
# TODO remove me
[DumpValidOnion] [DumpValidOnion]
subscribe = Redis_ValidOnion subscribe = Redis_ValidOnion
[Web] [Web]
subscribe = Redis_Web subscribe = Redis_Web
publish = Redis_Url,ZMQ_Url publish = Redis_Url
#publish = Redis_Url,ZMQ_Url
[WebStats] [WebStats]
subscribe = Redis_Url subscribe = Redis_Url

View file

@ -26,7 +26,7 @@ sys.path.append(os.environ['AIL_BIN'])
from Helper import Process from Helper import Process
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
#import ConfigLoader import ConfigLoader
import Screenshot import Screenshot
import crawlers import crawlers
@ -105,14 +105,15 @@ class TorSplashCrawler():
'SPLASH_COOKIES_DEBUG': False 'SPLASH_COOKIES_DEBUG': False
}) })
def crawl(self, type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item): def crawl(self, splash_url, type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item):
self.process.crawl(self.crawler, type=type, crawler_options=crawler_options, date=date, requested_mode=requested_mode, url=url, domain=domain, port=port, cookies=cookies, original_item=original_item) self.process.crawl(self.crawler, splash_url=splash_url, type=type, crawler_options=crawler_options, date=date, requested_mode=requested_mode, url=url, domain=domain, port=port, cookies=cookies, original_item=original_item)
self.process.start() self.process.start()
class TorSplashSpider(Spider): class TorSplashSpider(Spider):
name = 'TorSplashSpider' name = 'TorSplashSpider'
def __init__(self, type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item, *args, **kwargs): def __init__(self, splash_url, type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item, *args, **kwargs):
self.splash_url = splash_url
self.domain_type = type self.domain_type = type
self.requested_mode = requested_mode self.requested_mode = requested_mode
self.original_item = original_item self.original_item = original_item
@ -132,7 +133,11 @@ class TorSplashCrawler():
config_section = 'Crawler' config_section = 'Crawler'
self.p = Process(config_section) self.p = Process(config_section)
self.item_dir = os.path.join(self.p.config.get("Directories", "crawled"), date_str ) self.item_dir = os.path.join(self.p.config.get("Directories", "crawled"), date_str )
self.har_dir = os.path.join(os.environ['AIL_HOME'], self.p.config.get("Directories", "crawled_screenshot"), date_str )
config_loader = ConfigLoader.ConfigLoader()
self.har_dir = os.path.join(config_loader.get_files_directory('har') , date_str )
config_loader = None
self.r_serv_log_submit = redis.StrictRedis( self.r_serv_log_submit = redis.StrictRedis(
host=self.p.config.get("Redis_Log_submit", "host"), host=self.p.config.get("Redis_Log_submit", "host"),
port=self.p.config.getint("Redis_Log_submit", "port"), port=self.p.config.getint("Redis_Log_submit", "port"),
@ -245,30 +250,34 @@ class TorSplashCrawler():
self.logger.error(repr(failure)) self.logger.error(repr(failure))
if failure.check(ResponseNeverReceived): if failure.check(ResponseNeverReceived):
request = failure.request ## DEBUG ##
url= request.meta['current_url'] self.logger.error(failure.request)
father = request.meta['father'] if failure.value.response:
self.logger.error(failure.value.response)
## ----- ##
# Extract request metadata
url = failure.request.meta['current_url']
father = failure.request.meta['father']
l_cookies = self.build_request_arg(failure.request.meta['splash']['args']['cookies'])
# Check if Splash restarted
if not crawlers.is_splash_reachable(self.splash_url):
self.logger.error('Splash, ResponseNeverReceived for %s, retry in 30s ...', url)
time.sleep(30)
self.logger.error('Splash, ResponseNeverReceived for %s, retry in 10s ...', url)
time.sleep(10)
if response:
response_root_key = response.meta['root_key']
else:
response_root_key = None
yield SplashRequest( yield SplashRequest(
url, url,
self.parse, self.parse,
errback=self.errback_catcher, errback=self.errback_catcher,
endpoint='execute', endpoint='execute',
cache_args=['lua_source'],
meta={'father': father, 'current_url': url}, meta={'father': father, 'current_url': url},
args=self.build_request_arg(response.cookiejar) args=l_cookies
) )
else: else:
print('failure') self.logger.error(failure.type)
#print(failure) self.logger.error(failure.getErrorMessage())
print(failure.type)
def save_crawled_item(self, item_id, item_content): def save_crawled_item(self, item_id, item_content):
gzip64encoded = crawlers.save_crawled_item(item_id, item_content) gzip64encoded = crawlers.save_crawled_item(item_id, item_content)

View file

@ -46,4 +46,4 @@ if __name__ == '__main__':
redis_cache.delete('crawler_request:{}'.format(uuid)) redis_cache.delete('crawler_request:{}'.format(uuid))
crawler = TorSplashCrawler(splash_url, crawler_options) crawler = TorSplashCrawler(splash_url, crawler_options)
crawler.crawl(service_type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item) crawler.crawl(splash_url, service_type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item)

View file

@ -11,12 +11,10 @@ import time
import yara import yara
from pubsublogger import publisher from pubsublogger import publisher
#
# import NotificationHelper
#
sys.path.append(os.environ['AIL_BIN']) sys.path.append(os.environ['AIL_BIN'])
from Helper import Process from Helper import Process
import NotificationHelper # # TODO: refractor
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
import Term import Term
@ -26,7 +24,7 @@ import Tracker
import item_basic import item_basic
full_item_url = "/showsavedpaste/?paste=" full_item_url = "/object/item?id="
mail_body_template = "AIL Framework,\nNew YARA match: {}\nitem id: {}\nurl: {}{}" mail_body_template = "AIL Framework,\nNew YARA match: {}\nitem id: {}\nurl: {}{}"
last_refresh = time.time() last_refresh = time.time()
@ -48,7 +46,7 @@ def yara_rules_match(data):
mail_to_notify = Tracker.get_tracker_mails(tracker_uuid) mail_to_notify = Tracker.get_tracker_mails(tracker_uuid)
if mail_to_notify: if mail_to_notify:
mail_subject = Tracker.get_email_subject(tracker_uuid) mail_subject = Tracker.get_email_subject(tracker_uuid)
mail_body = mail_body_template.format(term, item_id, full_item_url, item_id) mail_body = mail_body_template.format(data['rule'], item_id, full_item_url, item_id)
for mail in mail_to_notify: for mail in mail_to_notify:
NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body) NotificationHelper.sendEmailNotification(mail, mail_subject, mail_body)
@ -73,10 +71,12 @@ if __name__ == "__main__":
item_id = p.get_from_set() item_id = p.get_from_set()
if item_id is not None: if item_id is not None:
item_content = item_basic.get_item_content(item_id) item_content = item_basic.get_item_content(item_id)
try:
yara_match = rules.match(data=item_content, callback=yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=60) yara_match = rules.match(data=item_content, callback=yara_rules_match, which_callbacks=yara.CALLBACK_MATCHES, timeout=60)
if yara_match: if yara_match:
print(f'{item_id}: {yara_match}') print(f'{item_id}: {yara_match}')
except yara.TimeoutError as e:
print(f'{item_id}: yara scanning timed out')
else: else:
time.sleep(5) time.sleep(5)

View file

@ -17,6 +17,37 @@ import subprocess
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader import ConfigLoader
def launch_background_upgrade(version, l_script_name):
if r_serv.sismember('ail:to_update', version):
r_serv.delete('ail:update_error')
r_serv.set('ail:update_in_progress', version)
r_serv.set('ail:current_background_update', version)
for script_name in l_script_name:
r_serv.set('ail:current_background_script', script_name)
update_file = os.path.join(os.environ['AIL_HOME'], 'update', version, script_name)
process = subprocess.run(['python' ,update_file])
update_progress = r_serv.get('ail:current_background_script_stat')
#if update_progress:
# if int(update_progress) != 100:
# r_serv.set('ail:update_error', 'Update {} Failed'.format(version))
update_progress = r_serv.get('ail:current_background_script_stat')
if update_progress:
if int(update_progress) == 100:
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
r_serv.srem('ail:to_update', version)
def clean_update_db():
r_serv.delete('ail:update_error')
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
if __name__ == "__main__": if __name__ == "__main__":
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
@ -25,92 +56,12 @@ if __name__ == "__main__":
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None config_loader = None
if r_serv.scard('ail:update_v1.5') != 5: if r_serv.scard('ail:to_update') == 0:
r_serv.delete('ail:update_error') clean_update_db()
r_serv.set('ail:update_in_progress', 'v1.5')
r_serv.set('ail:current_background_update', 'v1.5')
if not r_serv.sismember('ail:update_v1.5', 'onions'):
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.5', 'Update-ARDB_Onions.py')
process = subprocess.run(['python' ,update_file])
if not r_serv.sismember('ail:update_v1.5', 'metadata'):
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.5', 'Update-ARDB_Metadata.py')
process = subprocess.run(['python' ,update_file])
if not r_serv.sismember('ail:update_v1.5', 'tags'):
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.5', 'Update-ARDB_Tags.py')
process = subprocess.run(['python' ,update_file])
if not r_serv.sismember('ail:update_v1.5', 'tags_background'):
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.5', 'Update-ARDB_Tags_background.py')
process = subprocess.run(['python' ,update_file])
if not r_serv.sismember('ail:update_v1.5', 'crawled_screenshot'):
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.5', 'Update-ARDB_Onions_screenshots.py')
process = subprocess.run(['python' ,update_file])
if r_serv.scard('ail:update_v1.5') != 5:
r_serv.set('ail:update_error', 'Update v1.5 Failed, please relaunch the bin/update-background.py script')
else:
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
if r_serv.get('ail:current_background_update') == 'v2.4':
r_serv.delete('ail:update_error')
r_serv.set('ail:update_in_progress', 'v2.4')
r_serv.set('ail:current_background_update', 'v2.4')
r_serv.set('ail:current_background_script', 'domain update')
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v2.4', 'Update_domain.py')
process = subprocess.run(['python' ,update_file])
if int(r_serv_onion.scard('domain_update_v2.4')) != 0: launch_background_upgrade('v1.5', ['Update-ARDB_Onions.py', 'Update-ARDB_Metadata.py', 'Update-ARDB_Tags.py', 'Update-ARDB_Tags_background.py', 'Update-ARDB_Onions_screenshots.py'])
r_serv.set('ail:update_error', 'Update v2.4 Failed, please relaunch the bin/update-background.py script') launch_background_upgrade('v2.4', ['Update_domain.py'])
else: launch_background_upgrade('v2.6', ['Update_screenshots.py'])
r_serv.delete('ail:update_in_progress') launch_background_upgrade('v2.7', ['Update_domain_tags.py'])
r_serv.delete('ail:current_background_script') launch_background_upgrade('v3.4', ['Update_domain.py'])
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
r_serv.delete('update:nb_elem_to_convert')
r_serv.delete('update:nb_elem_converted')
if r_serv.sismember('ail:to_update', 'v2.6'):
new_version = 'v2.6'
r_serv.delete('ail:update_error')
r_serv.delete('ail:current_background_script_stat')
r_serv.set('ail:update_in_progress', new_version)
r_serv.set('ail:current_background_update', new_version)
r_serv.set('ail:current_background_script', 'screenshot update')
update_file = os.path.join(os.environ['AIL_HOME'], 'update', new_version, 'Update_screenshots.py')
process = subprocess.run(['python' ,update_file])
update_progress = r_serv.get('ail:current_background_script_stat')
if update_progress:
if int(update_progress) == 100:
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
r_serv.srem('ail:to_update', new_version)
elif r_serv.sismember('ail:to_update', 'v2.7'):
new_version = 'v2.7'
r_serv.delete('ail:update_error')
r_serv.delete('ail:current_background_script_stat')
r_serv.set('ail:update_in_progress', new_version)
r_serv.set('ail:current_background_update', new_version)
r_serv.set('ail:current_background_script', 'domain tags update')
update_file = os.path.join(os.environ['AIL_HOME'], 'update', new_version, 'Update_domain_tags.py')
process = subprocess.run(['python' ,update_file])
update_progress = r_serv.get('ail:current_background_script_stat')
if update_progress:
if int(update_progress) == 100:
r_serv.delete('ail:update_in_progress')
r_serv.delete('ail:current_background_script')
r_serv.delete('ail:current_background_script_stat')
r_serv.delete('ail:current_background_update')
r_serv.srem('ail:to_update', new_version)

View file

@ -4,7 +4,8 @@ dicofilters = Dicos
pastes = PASTES pastes = PASTES
hash = HASHS hash = HASHS
crawled = crawled crawled = crawled
crawled_screenshot = CRAWLED_SCREENSHOT har = CRAWLED_SCREENSHOT
screenshot = CRAWLED_SCREENSHOT/screenshot
wordtrending_csv = var/www/static/csv/wordstrendingdata wordtrending_csv = var/www/static/csv/wordstrendingdata
wordsfile = files/wordfile wordsfile = files/wordfile
@ -40,7 +41,7 @@ sender_user =
#Proxying requests to the app #Proxying requests to the app
baseUrl = / baseUrl = /
#Host to bind to #Host to bind to
host = 0.0.0.0 host = 127.0.0.1
#Flask server port #Flask server port
port = 7000 port = 7000
#Number of logs to display in the dashboard #Number of logs to display in the dashboard
@ -141,16 +142,6 @@ host = localhost
port = 6381 port = 6381
db = 0 db = 0
[Redis_Data_Merging]
host = localhost
port = 6379
db = 1
[Redis_Paste_Name]
host = localhost
port = 6379
db = 2
[Redis_Mixer_Cache] [Redis_Mixer_Cache]
host = localhost host = localhost
port = 6381 port = 6381
@ -221,6 +212,11 @@ host = localhost
port = 6382 port = 6382
db = 10 db = 10
[Kvrocks_Meta]
host = localhost
port = 6383
db = 0
[Url] [Url]
cc_critical = DE cc_critical = DE
@ -278,6 +274,7 @@ default_crawler_closespider_pagecount = 50
default_crawler_user_agent = Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0 default_crawler_user_agent = Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0
splash_url = http://127.0.0.1 splash_url = http://127.0.0.1
splash_port = 8050-8052 splash_port = 8050-8052
domain_proxy = onion.foundation
[IP] [IP]
# list of comma-separated CIDR that you wish to be alerted for. e.g: # list of comma-separated CIDR that you wish to be alerted for. e.g:

View file

@ -11,6 +11,9 @@ sudo apt-get update
sudo apt-get install python3-pip virtualenv python3-dev python3-tk libfreetype6-dev \ sudo apt-get install python3-pip virtualenv python3-dev python3-tk libfreetype6-dev \
screen g++ python-tk unzip libsnappy-dev cmake -qq screen g++ python-tk unzip libsnappy-dev cmake -qq
#Needed for downloading jemalloc
sudo apt-get install wget -qq
#optional tor install #optional tor install
sudo apt-get install tor -qq sudo apt-get install tor -qq
@ -80,11 +83,18 @@ sudo make install
popd popd
# ARDB # # ARDB #
test ! -d ardb/ && git clone https://github.com/yinqiwen/ardb.git test ! -d ardb/ && git clone https://github.com/ail-project/ardb.git
pushd ardb/ pushd ardb/
make make
popd popd
# KVROCKS #
# test ! -d kvrocks/ && git clone https://github.com/bitleak/kvrocks.git
# pushd kvrocks/
# make -j4
# popd
# Config File
if [ ! -f configs/core.cfg ]; then if [ ! -f configs/core.cfg ]; then
cp configs/core.cfg.sample configs/core.cfg cp configs/core.cfg.sample configs/core.cfg
fi fi

View file

@ -22,11 +22,13 @@ textblob
#Tokeniser #Tokeniser
nltk nltk
html2text
yara-python yara-python
#Crawler #Crawler
scrapy scrapy
scrapy-splash scrapy-splash
pycld3
#Graph #Graph
numpy numpy

View file

@ -33,7 +33,7 @@ if __name__ == '__main__':
config_loader = ConfigLoader.ConfigLoader() config_loader = ConfigLoader.ConfigLoader()
SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"))
NEW_SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"), 'screenshot') NEW_SCREENSHOT_FOLDER = config_loader.get_files_directory('screenshot')
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/' PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
@ -111,4 +111,7 @@ if __name__ == '__main__':
print() print()
print('Done in {} s'.format(end - start_deb)) print('Done in {} s'.format(end - start_deb))
r_serv.set('ail:current_background_script_stat', 100)
r_serv.sadd('ail:update_v1.5', 'crawled_screenshot') r_serv.sadd('ail:update_v1.5', 'crawled_screenshot')
if r_serv.scard('ail:update_v1.5') != 5:
r_serv.set('ail:update_error', 'Update v1.5 Failed, please relaunch the bin/update-background.py script')

View file

@ -43,13 +43,12 @@ if __name__ == '__main__':
print('Updating ARDB_Onion Done => {} paths: {} s'.format(index, end - start)) print('Updating ARDB_Onion Done => {} paths: {} s'.format(index, end - start))
print() print()
# Add background update
r_serv.sadd('ail:to_update', 'v1.5')
#Set current ail version #Set current ail version
r_serv.set('ail:version', 'v1.5') r_serv.set('ail:version', 'v1.5')
#Set current update_in_progress
r_serv.set('ail:update_in_progress', 'v1.5')
r_serv.set('ail:current_background_update', 'v1.5')
#Set current ail version #Set current ail version
r_serv.set('ail:update_date_v1.5', datetime.datetime.now().strftime("%Y%m%d")) r_serv.set('ail:update_date_v1.5', datetime.datetime.now().strftime("%Y%m%d"))

View file

@ -22,14 +22,13 @@ if __name__ == '__main__':
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None config_loader = None
#Set current update_in_progress
r_serv.set('ail:update_in_progress', new_version)
r_serv.set('ail:current_background_update', new_version)
r_serv_onion.sunionstore('domain_update_v2.4', 'full_onion_up', 'full_regular_up') r_serv_onion.sunionstore('domain_update_v2.4', 'full_onion_up', 'full_regular_up')
r_serv.set('update:nb_elem_to_convert', r_serv_onion.scard('domain_update_v2.4')) r_serv.set('update:nb_elem_to_convert', r_serv_onion.scard('domain_update_v2.4'))
r_serv.set('update:nb_elem_converted',0) r_serv.set('update:nb_elem_converted',0)
# Add background update
r_serv.sadd('ail:to_update', new_version)
#Set current ail version #Set current ail version
r_serv.set('ail:version', new_version) r_serv.set('ail:version', new_version)

View file

@ -56,6 +56,8 @@ if __name__ == '__main__':
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None config_loader = None
r_serv.set('ail:current_background_script', 'domain update')
nb_elem_to_update = r_serv_db.get('update:nb_elem_to_convert') nb_elem_to_update = r_serv_db.get('update:nb_elem_to_convert')
if not nb_elem_to_update: if not nb_elem_to_update:
nb_elem_to_update = 0 nb_elem_to_update = 0
@ -78,4 +80,7 @@ if __name__ == '__main__':
update_update_stats() update_update_stats()
else: else:
r_serv.delete('update:nb_elem_to_convert')
r_serv.delete('update:nb_elem_converted')
r_serv_db.set('ail:current_background_script_stat', 100)
sys.exit(0) sys.exit(0)

View file

@ -21,10 +21,6 @@ if __name__ == '__main__':
r_serv = config_loader.get_redis_conn("ARDB_DB") r_serv = config_loader.get_redis_conn("ARDB_DB")
config_loader = None config_loader = None
#Set current update_in_progress
r_serv.set('ail:update_in_progress', new_version)
r_serv.set('ail:current_background_update', new_version)
r_serv.sadd('ail:to_update', new_version) r_serv.sadd('ail:to_update', new_version)
#Set current ail version #Set current ail version

View file

@ -93,9 +93,3 @@ if __name__ == '__main__':
end = time.time() end = time.time()
print('ALL screenshot updated: {} in {} s'.format(nb, end - start_deb)) print('ALL screenshot updated: {} in {} s'.format(nb, end - start_deb))
r_serv_db.delete('ail:update_in_progress')
r_serv_db.delete('ail:current_background_script')
r_serv_db.delete('ail:current_background_script_stat')
r_serv_db.delete('ail:current_background_update')
r_serv_db.srem('ail:to_update', 'v2.6')

View file

@ -23,10 +23,6 @@ if __name__ == '__main__':
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None config_loader = None
#Set current update_in_progress
r_serv.set('ail:update_in_progress', new_version)
r_serv.set('ail:current_background_update', new_version)
r_serv.sadd('ail:to_update', new_version) r_serv.sadd('ail:to_update', new_version)
#### Update tags #### #### Update tags ####

View file

@ -116,9 +116,3 @@ if __name__ == '__main__':
end = time.time() end = time.time()
print('ALL domains tags updated in {} s'.format(end - start_deb)) print('ALL domains tags updated in {} s'.format(end - start_deb))
r_serv_db.delete('ail:update_in_progress')
r_serv_db.delete('ail:current_background_script')
r_serv_db.delete('ail:current_background_script_stat')
r_serv_db.delete('ail:current_background_update')
r_serv_db.srem('ail:to_update', update_version)

39
update/v3.2/Update.py Executable file
View file

@ -0,0 +1,39 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import time
import redis
import argparse
import datetime
import configparser
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
new_version = 'v3.2'
if __name__ == '__main__':
start_deb = time.time()
config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
#### NEW EXPORTER
# remove old tags errors
#r_serv_db.delete('mess_not_saved_export')
# move solo tags to export in tags_db
#all_misp_tags = r_serv_db.smembers('whitelist_misp')
#all_hive_tags = r_serv_db.smembers('whitelist_hive')
# # TODO: save them in tags db
#### NEW EXPORTER
#Set current ail version
r_serv_db.set('ail:version', new_version)
#Set current ail version
r_serv_db.hset('ail:update_date', new_version, datetime.datetime.now().strftime("%Y%m%d"))

52
update/v3.2/Update.sh Executable file
View file

@ -0,0 +1,52 @@
#!/bin/bash
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
export PATH=$AIL_HOME:$PATH
export PATH=$AIL_REDIS:$PATH
export PATH=$AIL_ARDB:$PATH
export PATH=$AIL_BIN:$PATH
export PATH=$AIL_FLASK:$PATH
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
echo -e $GREEN"Shutting down AIL ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
bash ${AIL_BIN}/LAUNCH.sh -ldbv &
wait
echo ""
# SUBMODULES #
git submodule init
git submodule update
echo -e $GREEN"Installing YARA ..."$DEFAULT
pip3 install yara-python
bash ${AIL_BIN}/LAUNCH.sh -t
# SUBMODULES #
git submodule init
git submodule update
echo ""
echo -e $GREEN"Updating AIL VERSION ..."$DEFAULT
echo ""
python ${AIL_HOME}/update/v3.2/Update.py
wait
echo ""
echo ""
echo ""
echo -e $GREEN"Shutting down ARDB ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
exit 0

29
update/v3.3/Update.py Executable file
View file

@ -0,0 +1,29 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import sys
import time
import redis
import argparse
import datetime
import configparser
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
new_version = 'v3.3'
if __name__ == '__main__':
start_deb = time.time()
config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
config_loader = None
#Set current ail version
r_serv_db.set('ail:version', new_version)
#Set current ail version
r_serv_db.hset('ail:update_date', new_version, datetime.datetime.now().strftime("%Y%m%d"))

54
update/v3.3/Update.sh Executable file
View file

@ -0,0 +1,54 @@
#!/bin/bash
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
export PATH=$AIL_HOME:$PATH
export PATH=$AIL_REDIS:$PATH
export PATH=$AIL_ARDB:$PATH
export PATH=$AIL_BIN:$PATH
export PATH=$AIL_FLASK:$PATH
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
echo -e $GREEN"Shutting down AIL ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
bash ${AIL_BIN}/LAUNCH.sh -ldbv &
wait
echo ""
# SUBMODULES #
git submodule update
# echo ""
# echo -e $GREEN"installing KVORCKS ..."$DEFAULT
# cd ${AIL_HOME}
# test ! -d kvrocks/ && git clone https://github.com/bitleak/kvrocks.git
# pushd kvrocks/
# make -j4
# popd
echo -e $GREEN"Installing html2text ..."$DEFAULT
pip3 install html2text
echo ""
echo -e $GREEN"Updating AIL VERSION ..."$DEFAULT
echo ""
python ${AIL_HOME}/update/v3.3/Update.py
wait
echo ""
echo ""
echo ""
echo -e $GREEN"Shutting down ARDB ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
exit 0

36
update/v3.4/Update.py Executable file
View file

@ -0,0 +1,36 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import re
import sys
import time
import redis
import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
new_version = 'v3.4'
if __name__ == '__main__':
start_deb = time.time()
config_loader = ConfigLoader.ConfigLoader()
r_serv = config_loader.get_redis_conn("ARDB_DB")
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None
r_serv_onion.sunionstore('domain_update_v3.4', 'full_onion_up', 'full_regular_up')
r_serv.set('update:nb_elem_to_convert', r_serv_onion.scard('domain_update_v3.4'))
r_serv.set('update:nb_elem_converted',0)
# Add background update
r_serv.sadd('ail:to_update', new_version)
#Set current ail version
r_serv.set('ail:version', new_version)
#Set current ail version
r_serv.hset('ail:update_date', new_version, datetime.datetime.now().strftime("%Y%m%d"))

54
update/v3.4/Update.sh Executable file
View file

@ -0,0 +1,54 @@
#!/bin/bash
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
export PATH=$AIL_HOME:$PATH
export PATH=$AIL_REDIS:$PATH
export PATH=$AIL_ARDB:$PATH
export PATH=$AIL_BIN:$PATH
export PATH=$AIL_FLASK:$PATH
GREEN="\\033[1;32m"
DEFAULT="\\033[0;39m"
echo -e $GREEN"Shutting down AIL ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
# bash ${AIL_BIN}/LAUNCH.sh -ldbv &
# wait
# echo ""
# SUBMODULES #
git submodule update
# echo ""
# echo -e $GREEN"installing KVORCKS ..."$DEFAULT
# cd ${AIL_HOME}
# test ! -d kvrocks/ && git clone https://github.com/bitleak/kvrocks.git
# pushd kvrocks/
# make -j4
# popd
echo -e $GREEN"Installing html2text ..."$DEFAULT
pip3 install pycld3
echo ""
echo -e $GREEN"Updating AIL VERSION ..."$DEFAULT
echo ""
python ${AIL_HOME}/update/v3.4/Update.py
wait
echo ""
echo ""
echo ""
echo -e $GREEN"Shutting down ARDB ..."$DEFAULT
bash ${AIL_BIN}/LAUNCH.sh -ks
wait
exit 0

60
update/v3.4/Update_domain.py Executable file
View file

@ -0,0 +1,60 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
import os
import re
import sys
import time
import redis
import datetime
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/'))
import ConfigLoader
import Domain
def update_update_stats():
nb_updated = int(r_serv_db.get('update:nb_elem_converted'))
progress = int((nb_updated * 100) / nb_elem_to_update)
print('{}/{} updated {}%'.format(nb_updated, nb_elem_to_update, progress))
r_serv_db.set('ail:current_background_script_stat', progress)
def update_domain_language(domain_obj, item_id):
domain_name = domain_obj.get_domain_name()
Domain.add_domain_languages_by_item_id(domain_name, item_id)
if __name__ == '__main__':
start_deb = time.time()
config_loader = ConfigLoader.ConfigLoader()
r_serv_db = config_loader.get_redis_conn("ARDB_DB")
r_serv_onion = config_loader.get_redis_conn("ARDB_Onion")
config_loader = None
r_serv.set('ail:current_background_script', 'domain languages update')
nb_elem_to_update = r_serv_db.get('update:nb_elem_to_convert')
if not nb_elem_to_update:
nb_elem_to_update = 1
else:
nb_elem_to_update = int(nb_elem_to_update)
#Domain._delete_all_domains_languages()
while True:
domain = r_serv_onion.spop('domain_update_v3.4')
if domain is not None:
print(domain)
domain = Domain.Domain(domain)
for domain_history in domain.get_domain_history():
domain_item = domain.get_domain_items_crawled(epoch=domain_history[1]) # item_tag
if "items" in domain_item:
for item_dict in domain_item['items']:
update_domain_language(domain, item_dict['id'])
r_serv_db.incr('update:nb_elem_converted')
update_update_stats()
else:
r_serv_db.set('ail:current_background_script_stat', 100)
sys.exit(0)

View file

@ -42,6 +42,8 @@ from blueprints.crawler_splash import crawler_splash
from blueprints.correlation import correlation from blueprints.correlation import correlation
from blueprints.tags_ui import tags_ui from blueprints.tags_ui import tags_ui
from blueprints.import_export import import_export from blueprints.import_export import import_export
from blueprints.objects_item import objects_item
from blueprints.old_endpoints import old_endpoints
Flask_dir = os.environ['AIL_FLASK'] Flask_dir = os.environ['AIL_FLASK']
@ -97,6 +99,8 @@ app.register_blueprint(crawler_splash, url_prefix=baseUrl)
app.register_blueprint(correlation, url_prefix=baseUrl) app.register_blueprint(correlation, url_prefix=baseUrl)
app.register_blueprint(tags_ui, url_prefix=baseUrl) app.register_blueprint(tags_ui, url_prefix=baseUrl)
app.register_blueprint(import_export, url_prefix=baseUrl) app.register_blueprint(import_export, url_prefix=baseUrl)
app.register_blueprint(objects_item, url_prefix=baseUrl)
app.register_blueprint(old_endpoints, url_prefix=baseUrl)
# ========= =========# # ========= =========#
# ========= Cookie name ======== # ========= Cookie name ========

View file

@ -24,8 +24,10 @@ sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
import Tag import Tag
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib')) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib'))
import Domain
import crawlers import crawlers
import Domain
import Language
import Config_DB import Config_DB
r_cache = Flask_config.r_cache r_cache = Flask_config.r_cache
@ -123,6 +125,9 @@ def send_to_spider():
return create_json_response(res[0], res[1]) return create_json_response(res[0], res[1])
return redirect(url_for('crawler_splash.manual')) return redirect(url_for('crawler_splash.manual'))
#### Domains ####
# add route : /crawlers/show_domain # add route : /crawlers/show_domain
@crawler_splash.route('/crawlers/showDomain', methods=['GET', 'POST']) @crawler_splash.route('/crawlers/showDomain', methods=['GET', 'POST'])
@login_required @login_required
@ -148,7 +153,8 @@ def showDomain():
if domain.domain_was_up(): if domain.domain_was_up():
dict_domain = {**dict_domain, **domain.get_domain_correlation()} dict_domain = {**dict_domain, **domain.get_domain_correlation()}
dict_domain['correlation_nb'] = Domain.get_domain_total_nb_correlation(dict_domain) dict_domain['correlation_nb'] = Domain.get_domain_total_nb_correlation(dict_domain)
dict_domain['origin_item'] = domain.get_domain_last_origin() dict_domain['father'] = domain.get_domain_father()
dict_domain['languages'] = Language.get_languages_from_iso(domain.get_domain_languages(), sort=True)
dict_domain['tags'] = domain.get_domain_tags() dict_domain['tags'] = domain.get_domain_tags()
dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags']) dict_domain['tags_safe'] = Tag.is_tags_safe(dict_domain['tags'])
dict_domain['history'] = domain.get_domain_history_with_status() dict_domain['history'] = domain.get_domain_history_with_status()
@ -236,6 +242,57 @@ def domains_explorer_web():
dict_data = Domain.get_domains_up_by_filers('regular', page=page, date_from=date_from, date_to=date_to) dict_data = Domain.get_domains_up_by_filers('regular', page=page, date_from=date_from, date_to=date_to)
return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label, domain_type='regular') return render_template("domain_explorer.html", dict_data=dict_data, bootstrap_label=bootstrap_label, domain_type='regular')
@crawler_splash.route('/domains/languages/all/json', methods=['GET'])
@login_required
@login_read_only
def domains_all_languages_json():
# # TODO: get domain type
iso = request.args.get('iso')
domain_types = request.args.getlist('domain_types')
return jsonify(Language.get_languages_from_iso(Domain.get_all_domains_languages(), sort=True))
@crawler_splash.route('/domains/languages/search_get', methods=['GET'])
@login_required
@login_read_only
def domains_search_languages_get():
page = request.args.get('page')
try:
page = int(page)
except:
page = 1
domains_types = request.args.getlist('domain_types')
if domains_types:
domains_types = domains_types[0].split(',')
languages = request.args.getlist('languages')
if languages:
languages = languages[0].split(',')
l_dict_domains = Domain.api_get_domains_by_languages(domains_types, Language.get_iso_from_languages(languages), domains_metadata=True, page=page)
return render_template("domains/domains_filter_languages.html", template_folder='../../',
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
current_languages=languages, domains_types=domains_types)
@crawler_splash.route('/domains/name/search', methods=['GET'])
@login_required
@login_analyst
def domains_search_name():
name = request.args.get('name')
page = request.args.get('page')
try:
page = int(page)
except:
page = 1
domains_types = request.args.getlist('domain_types')
if domains_types:
domains_types = domains_types[0].split(',')
l_dict_domains = Domain.api_search_domains_by_name(name, domains_types, domains_metadata=True, page=page)
return render_template("domains/domains_result_list.html", template_folder='../../',
l_dict_domains=l_dict_domains, bootstrap_label=bootstrap_label,
domains_types=domains_types)
##-- --##
## Cookiejar ## ## Cookiejar ##
@crawler_splash.route('/crawler/cookiejar/add', methods=['GET']) @crawler_splash.route('/crawler/cookiejar/add', methods=['GET'])
@login_required @login_required

View file

@ -0,0 +1,97 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
'''
Blueprint Flask: crawler splash endpoints: dashboard, onion crawler ...
'''
import os
import sys
import json
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response, abort, send_file
from flask_login import login_required, current_user
# Import Role_Manager
from Role_Manager import login_admin, login_analyst, login_read_only
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))
import Item
import Tag
sys.path.append(os.path.join(os.environ['AIL_BIN'], 'export'))
import Export
# ============ BLUEPRINT ============
objects_item = Blueprint('objects_item', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates/objects/item'))
# ============ VARIABLES ============
bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info']
# ============ FUNCTIONS ============
# ============= ROUTES ==============
@objects_item.route("/object/item") #completely shows the paste in a new tab
@login_required
@login_read_only
def showItem(): # # TODO: support post
item_id = request.args.get('id')
if not item_id or not Item.exist_item(item_id):
abort(404)
dict_item = {}
dict_item['id'] = item_id
dict_item['name'] = dict_item['id'].replace('/', ' / ')
dict_item['father'] = Item.get_item_parent(item_id)
dict_item['content'] = Item.get_item_content(item_id)
dict_item['metadata'] = Item.get_item_metadata(item_id, item_content=dict_item['content'])
dict_item['tags'] = Tag.get_obj_tag(item_id)
#dict_item['duplicates'] = Item.get_item_nb_duplicates(item_id)
dict_item['duplicates'] = Item.get_item_duplicates_dict(item_id)
dict_item['crawler'] = Item.get_crawler_matadata(item_id, ltags=dict_item['tags'])
## EXPORT SECTION
# # TODO: ADD in Export SECTION
dict_item['hive_case'] = Export.get_item_hive_cases(item_id)
return render_template("show_item.html", bootstrap_label=bootstrap_label,
modal_add_tags=Tag.get_modal_add_tags(dict_item['id'], object_type='item'),
is_hive_connected=Export.get_item_hive_cases(item_id),
dict_item=dict_item)
# kvrocks data
# # TODO: dynamic load:
## duplicates
## correlations
## Dynamic Path FIX
@objects_item.route("/object/item/html2text")
@login_required
@login_read_only
def html2text(): # # TODO: support post
item_id = request.args.get('id')
if not item_id or not Item.exist_item(item_id):
abort(404)
return Item.get_item_content_html2text(item_id)
@objects_item.route("/object/item/raw_content")
@login_required
@login_read_only
def item_raw_content(): # # TODO: support post
item_id = request.args.get('id')
if not item_id or not Item.exist_item(item_id):
abort(404)
return Response(Item.get_item_content(item_id), mimetype='text/plain')
@objects_item.route("/object/item/download")
@login_required
@login_read_only
def item_download(): # # TODO: support post
item_id = request.args.get('id')
if not item_id or not Item.exist_item(item_id):
abort(404)
return send_file(Item.get_raw_content(item_id), attachment_filename=item_id, as_attachment=True)

View file

@ -0,0 +1,34 @@
#!/usr/bin/env python3
# -*-coding:UTF-8 -*
'''
Blueprint Flask: crawler splash endpoints: dashboard, onion crawler ...
'''
import os
import sys
import json
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
from flask_login import login_required, current_user
# Import Role_Manager
from Role_Manager import login_admin, login_analyst, login_read_only
# ============ BLUEPRINT ============
old_endpoints = Blueprint('old_endpoints', __name__, template_folder=os.path.join(os.environ['AIL_FLASK'], 'templates'))
# ============ VARIABLES ============
# ============ FUNCTIONS ============
# ============= ROUTES ==============
@old_endpoints.route("/showsavedpaste/")
@login_required
@login_read_only
def showsavedpaste():
item_id = request.args.get('paste', '')
return redirect(url_for('objects_item.showItem', id=item_id))

View file

@ -26,7 +26,6 @@ r_serv_charts = config_loader.get_redis_conn("ARDB_Trending")
r_serv_sentiment = config_loader.get_redis_conn("ARDB_Sentiment") r_serv_sentiment = config_loader.get_redis_conn("ARDB_Sentiment")
r_serv_term = config_loader.get_redis_conn("ARDB_Tracker") r_serv_term = config_loader.get_redis_conn("ARDB_Tracker")
r_serv_cred = config_loader.get_redis_conn("ARDB_TermCred") r_serv_cred = config_loader.get_redis_conn("ARDB_TermCred")
r_serv_pasteName = config_loader.get_redis_conn("Redis_Paste_Name")
r_serv_tags = config_loader.get_redis_conn("ARDB_Tags") r_serv_tags = config_loader.get_redis_conn("ARDB_Tags")
r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata")
r_serv_db = config_loader.get_redis_conn("ARDB_DB") r_serv_db = config_loader.get_redis_conn("ARDB_DB")
@ -92,13 +91,15 @@ dict_update_description = {'v1.5':{'nb_background_update': 5, 'update_warning_me
'v2.6':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be', 'v2.6':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags/Correlation can be',
'update_warning_message_notice_me': 'missing from the UI.'}, 'update_warning_message_notice_me': 'missing from the UI.'},
'v2.7':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags can be', 'v2.7':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Tags can be',
'update_warning_message_notice_me': 'missing from the UI.'},
'v3.4':{'nb_background_update': 1, 'update_warning_message': 'An Update is running on the background. Some informations like Domain Languages can be',
'update_warning_message_notice_me': 'missing from the UI.'} 'update_warning_message_notice_me': 'missing from the UI.'}
} }
UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted') UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted')
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/' PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/'
SCREENSHOT_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot"), 'screenshot') SCREENSHOT_FOLDER = config_loader.get_files_directory('screenshot')
REPO_ORIGIN = 'https://github.com/ail-project/ail-framework.git' REPO_ORIGIN = 'https://github.com/ail-project/ail-framework.git'

View file

@ -358,7 +358,7 @@ def submit_status():
link = '' link = ''
if paste_submit_link: if paste_submit_link:
for paste in paste_submit_link: for paste in paste_submit_link:
url = url_for('showsavedpastes.showsavedpaste') + '?paste=' + paste url = url_for('objects_item.showItem') + '?id=' + paste
link += '<a target="_blank" href="' + url + '" class="list-group-item">' + paste +'</a>' link += '<a target="_blank" href="' + url + '" class="list-group-item">' + paste +'</a>'
if nb_total == '-1': if nb_total == '-1':

View file

@ -112,7 +112,7 @@
<div class="form-group"> <div class="form-group">
<label for="paste_name">Archive Password</label> <label for="paste_name">Archive Password</label>
<input type="password" class="form-control" id="password" name="password" placeholder="Optionnal"> <input type="password" class="form-control" id="password" name="password" placeholder="Optional">
</div> </div>
</div> </div>
</div> </div>

View file

@ -185,6 +185,8 @@ def delete_user_db(user_id):
r_serv_db.hdel('user:tokens', user_token) r_serv_db.hdel('user:tokens', user_token)
r_serv_db.delete('user_metadata:{}'.format(user_id)) r_serv_db.delete('user_metadata:{}'.format(user_id))
r_serv_db.hdel('user:all', user_id) r_serv_db.hdel('user:all', user_id)
else:
print('Error: user {} do not exist'.format(user_id))
def hashing_password(bytes_password): def hashing_password(bytes_password):
hashed = bcrypt.hashpw(bytes_password, bcrypt.gensalt()) hashed = bcrypt.hashpw(bytes_password, bcrypt.gensalt())

View file

@ -377,7 +377,7 @@ def remove_tag(): #TODO remove me , used by showpaste
res = Tag.api_delete_obj_tags(tags=[tag], object_id=path, object_type="item") res = Tag.api_delete_obj_tags(tags=[tag], object_id=path, object_type="item")
if res[1] != 200: if res[1] != 200:
return str(res[0]) return str(res[0])
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) return redirect(url_for('objects_item.showItem', id=path))
@Tags.route("/Tags/confirm_tag") @Tags.route("/Tags/confirm_tag")
@login_required @login_required
@ -395,7 +395,7 @@ def confirm_tag():
#add analyst tag #add analyst tag
Tag.add_tag('item', tag, path) Tag.add_tag('item', tag, path)
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) return redirect(url_for('objects_item.showItem', id=path))
return 'incompatible tag' return 'incompatible tag'
@ -417,7 +417,7 @@ def tag_validation():
r_serv_statistics.sadd('fp:'+tag, path) r_serv_statistics.sadd('fp:'+tag, path)
r_serv_statistics.srem('tp:'+tag, path) r_serv_statistics.srem('tp:'+tag, path)
return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) return redirect(url_for('objects_item.showItem', id=path))
else: else:
return 'input error' return 'input error'

View file

@ -119,7 +119,7 @@
{% for path in all_path %} {% for path in all_path %}
<tr> <tr>
<td class="pb-0">{{ paste_date[loop.index0] }}</td> <td class="pb-0">{{ paste_date[loop.index0] }}</td>
<td class="pb-0"><a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{path}}" class="text-secondary"> <td class="pb-0"><a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{path}}" class="text-secondary">
<div style="line-height:0.9;">{{ path }}</div> <div style="line-height:0.9;">{{ path }}</div>
</a> </a>
<div class="mb-2"> <div class="mb-2">
@ -375,7 +375,7 @@ function toggle_sidebar(){
button.tooltip(button); button.tooltip(button);
$("#container-show-more").append(button); $("#container-show-more").append(button);
$("#button_show_path").attr('href', '{{ url_for('showsavedpastes.showsavedpaste') }}?paste=' + $(modal).attr('data-path')); $("#button_show_path").attr('href', '{{ url_for('objects_item.showItem') }}?id=' + $(modal).attr('data-path'));
$("#button_show_path").show('fast'); $("#button_show_path").show('fast');
$("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF $("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF
if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed

View file

@ -103,7 +103,7 @@
{% for path in all_path %} {% for path in all_path %}
<tr> <tr>
<td> {{ loop.index0 }}</td> <td> {{ loop.index0 }}</td>
<td><a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{path}}">{{ path }}</a> <td><a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{path}}">{{ path }}</a>
<div> <div>
{% for tag in paste_tags[loop.index0] %} {% for tag in paste_tags[loop.index0] %}
<a href="{{ url_for('Tags.get_tagged_paste') }}?ltags={{ tag[1] }}"> <a href="{{ url_for('Tags.get_tagged_paste') }}?ltags={{ tag[1] }}">
@ -114,7 +114,7 @@
</td> </td>
<td>{{ paste_date[loop.index0] }}</td> <td>{{ paste_date[loop.index0] }}</td>
<td>{{ paste_linenum[loop.index0] }}</td> <td>{{ paste_linenum[loop.index0] }}</td>
<td><p><span class="glyphicon glyphicon-info-sign" data-toggle="tooltip" data-placement="left" title="{{ content[loop.index0] }} "></span> <button type="button" class="btn-link" data-num="{{ loop.index0 + 1 }}" data-toggle="modal" data-target="#mymodal" data-url="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{ path }}&num={{ loop.index0+1 }}" data-path="{{ path }}"><span class="fa fa-search-plus"></span></button></p></td> <td><p><span class="glyphicon glyphicon-info-sign" data-toggle="tooltip" data-placement="left" title="{{ content[loop.index0] }} "></span> <button type="button" class="btn-link" data-num="{{ loop.index0 + 1 }}" data-toggle="modal" data-target="#mymodal" data-url="{{ url_for('objects_item.showItem') }}?id={{ path }}&num={{ loop.index0+1 }}" data-path="{{ path }}"><span class="fa fa-search-plus"></span></button></p></td>
</tr> </tr>
{% endfor %} {% endfor %}

View file

@ -76,7 +76,7 @@ def dashboard_alert(log):
log = log[46:].split(';') log = log[46:].split(';')
if len(log) == 6: if len(log) == 6:
time = datetime_from_utc_to_local(utc_str) time = datetime_from_utc_to_local(utc_str)
path = url_for('showsavedpastes.showsavedpaste',paste=log[5]) path = url_for('objects_item.showItem',id=log[5])
res = {'date': date, 'time': time, 'script': log[0], 'domain': log[1], 'date_paste': log[2], res = {'date': date, 'time': time, 'script': log[0], 'domain': log[1], 'date_paste': log[2],
'paste': log[3], 'message': log[4], 'path': path} 'paste': log[3], 'message': log[4], 'path': path}

View file

@ -87,7 +87,7 @@
<i class="far fa-chart-bar"></i> Feeder(s) Monitor: <i class="far fa-chart-bar"></i> Feeder(s) Monitor:
</div> </div>
<div class="card-body" id="panelbody" style="height:420px;"> <div class="card-body" id="panelbody" style="height:420px;">
<strong>Processed pastes</strong> <strong>Processed items</strong>
<div id="Proc_feeder" style="height: 230px; padding: 0px; position: relative;"></div> <div id="Proc_feeder" style="height: 230px; padding: 0px; position: relative;"></div>
<hr style="border-top: 2px solid #eee; margin-top: 7px; margin-bottom: 7px;"> <hr style="border-top: 2px solid #eee; margin-top: 7px; margin-bottom: 7px;">
<strong>Filtered duplicates</strong> <strong>Filtered duplicates</strong>
@ -165,7 +165,7 @@
</body> </body>
<script> var url_showSavedPath = "{{ url_for('showsavedpastes.showsavedpaste') }}"; </script> <script> var url_showSavedPath = "{{ url_for('objects_item.showItem') }}"; </script>
<script> <script>
$("#page-Dashboard").addClass("active"); $("#page-Dashboard").addClass("active");

View file

@ -489,7 +489,7 @@ def correlation_graph_node_json(correlation_type, type_id, key_id):
for node in nodes_set_dump: for node in nodes_set_dump:
nodes.append({"id": node[0], "group": node[1], "first_seen": node[3], "last_seen": node[4], "nb_seen_in_paste": node[5], 'icon': get_icon_text(correlation_type, node[2]),"url": url_for(get_show_key_id_endpoint(correlation_type), type_id=node[2], key_id=node[0]), 'hash': True}) nodes.append({"id": node[0], "group": node[1], "first_seen": node[3], "last_seen": node[4], "nb_seen_in_paste": node[5], 'icon': get_icon_text(correlation_type, node[2]),"url": url_for(get_show_key_id_endpoint(correlation_type), type_id=node[2], key_id=node[0]), 'hash': True})
for node in nodes_set_paste: for node in nodes_set_paste:
nodes.append({"id": node[0], "group": node[1],"url": url_for('showsavedpastes.showsavedpaste', paste=node[0]), 'hash': False}) nodes.append({"id": node[0], "group": node[1],"url": url_for('objects_item.showItem', id=node[0]), 'hash': False})
links = [] links = []
for link in links_set: for link in links_set:
links.append({"source": link[0], "target": link[1]}) links.append({"source": link[0], "target": link[1]})
@ -1060,7 +1060,7 @@ def hash_graph_node_json():
for node in nodes_set_hash: for node in nodes_set_hash:
nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('hashDecoded.showHash', hash=node[7]), 'hash': True}) nodes.append({"id": node[0], "group": node[1], "first_seen": node[2], "last_seen": node[3], 'estimated_type': node[4], "nb_seen_in_paste": node[5], "size": node[6], 'icon': get_file_icon_text(node[4]),"url": url_for('hashDecoded.showHash', hash=node[7]), 'hash': True})
for node in nodes_set_paste: for node in nodes_set_paste:
nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('showsavedpastes.showsavedpaste', paste=node[3]), 'hash': False}) nodes.append({"id": node[0], "group": node[1], "nb_seen_in_paste": node[2],"url": url_for('objects_item.showItem', id=node[3]), 'hash': False})
links = [] links = []
for link in links_set: for link in links_set:
links.append({"source": link[0], "target": link[1]}) links.append({"source": link[0], "target": link[1]})

View file

@ -93,8 +93,9 @@ def tracked_menu_yara():
@login_analyst @login_analyst
def add_tracked_menu(): def add_tracked_menu():
if request.method == 'POST': if request.method == 'POST':
term = request.form.get("term") tracker = request.form.get("tracker")
term_type = request.form.get("tracker_type") tracker_uuid = request.form.get("tracker_uuid")
tracker_type = request.form.get("tracker_type")
nb_words = request.form.get("nb_word", 1) nb_words = request.form.get("nb_word", 1)
description = request.form.get("description", '') description = request.form.get("description", '')
level = request.form.get("level", 0) level = request.form.get("level", 0)
@ -102,15 +103,15 @@ def add_tracked_menu():
mails = request.form.get("mails", []) mails = request.form.get("mails", [])
# YARA # # YARA #
if term_type == 'yara': if tracker_type == 'yara':
yara_default_rule = request.form.get("yara_default_rule") yara_default_rule = request.form.get("yara_default_rule")
yara_custom_rule = request.form.get("yara_custom_rule") yara_custom_rule = request.form.get("yara_custom_rule")
if yara_custom_rule: if yara_custom_rule:
term = yara_custom_rule tracker = yara_custom_rule
term_type='yara_custom' tracker_type='yara_custom'
else: else:
term = yara_default_rule tracker = yara_default_rule
term_type='yara_default' tracker_type='yara_default'
# # # #
if level == 'on': if level == 'on':
@ -121,17 +122,58 @@ def add_tracked_menu():
if tags: if tags:
tags = tags.split() tags = tags.split()
input_dict = {"term": term, "type": term_type, "nb_words": nb_words, "tags": tags, "mails": mails, "level": level, "description": description} input_dict = {"tracker": tracker, "type": tracker_type, "nb_words": nb_words, "tags": tags, "mails": mails, "level": level, "description": description}
user_id = current_user.get_id() user_id = current_user.get_id()
res = Term.parse_json_term_to_add(input_dict, user_id) # edit tracker
if tracker_uuid:
input_dict['uuid'] = tracker_uuid
res = Tracker.api_add_tracker(input_dict, user_id)
if res[1] == 200: if res[1] == 200:
return redirect(url_for('hunter.tracked_menu')) return redirect(url_for('hunter.tracked_menu'))
else: else:
## TODO: use modal ## TODO: use modal
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1] return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
else: else:
all_yara_files = Tracker.get_all_default_yara_files() return render_template("edit_tracker.html", all_yara_files=Tracker.get_all_default_yara_files())
return render_template("Add_tracker.html", all_yara_files=all_yara_files)
@hunter.route("/tracker/edit", methods=['GET', 'POST'])
@login_required
@login_analyst
def edit_tracked_menu():
user_id = current_user.get_id()
tracker_uuid = request.args.get('uuid', None)
res = Term.check_term_uuid_valid_access(tracker_uuid, user_id) # check if is author or admin
if res: # invalid access
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
dict_tracker = Tracker.get_tracker_metedata(tracker_uuid, user_id=True, level=True, description=True, tags=True, mails=True)
dict_tracker['tags'] = ' '.join(dict_tracker['tags'])
dict_tracker['mails'] = ' '.join(dict_tracker['mails'])
if dict_tracker['type'] == 'set':
dict_tracker['tracker'], dict_tracker['nb_words'] = dict_tracker['tracker'].split(';')
dict_tracker['tracker'] = dict_tracker['tracker'].replace(',', ' ')
elif dict_tracker['type'] == 'yara': #is_valid_default_yara_rule
if Tracker.is_default_yara_rule(dict_tracker['tracker']):
dict_tracker['yara_file'] = dict_tracker['tracker'].split('/')
dict_tracker['yara_file'] = dict_tracker['yara_file'][-2] + '/' + dict_tracker['yara_file'][-1]
dict_tracker['content'] = None
else:
dict_tracker['yara_file'] = None
dict_tracker['content'] = Tracker.get_yara_rule_content(dict_tracker['tracker'])
return render_template("edit_tracker.html", dict_tracker=dict_tracker,
all_yara_files=Tracker.get_all_default_yara_files())
## TO EDIT
# word
# set of word + nb words
# regex
# yara custum
# yara default ???? => allow edit ?
#### EDIT SHow Trackers ??????????????????????????????????????????????????
@hunter.route("/tracker/show_tracker") @hunter.route("/tracker/show_tracker")
@login_required @login_required
@ -254,12 +296,13 @@ def get_json_tracker_stats():
res = Term.get_list_tracked_term_stats_by_day([tracker_uuid]) res = Term.get_list_tracked_term_stats_by_day([tracker_uuid])
return jsonify(res) return jsonify(res)
# @hunter.route("/tracker/get_all_default_yara_rules_by_type", methods=['GET']) @hunter.route("/tracker/yara/default_rule/content", methods=['GET'])
# @login_required @login_required
# @login_read_only @login_read_only
# def get_all_default_yara_rules_by_type(): def get_default_yara_rule_content():
# yara_types = request.args.get('yara_types') default_yara_rule = request.args.get('rule_name')
# get_all_default_yara_rules_by_types(yara_types) res = Tracker.api_get_default_rule_content(default_yara_rule)
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
# ========= REGISTRATION ========= # ========= REGISTRATION =========
app.register_blueprint(hunter, url_prefix=baseUrl) app.register_blueprint(hunter, url_prefix=baseUrl)

View file

@ -27,39 +27,41 @@
<div class="col-12 col-lg-10" id="core_content"> <div class="col-12 col-lg-10" id="core_content">
<div class="card mb-3 mt-1"> <div class="card my-3">
<div class="card-header"> <div class="card-header bg-dark text-white">
<h5 class="card-title">Create a new tracker</h5> <h5 class="card-title">Edit a Tracker</h5>
</div> </div>
<div class="card-body"> <div class="card-body">
<p class="card-text">Select a tracker type.</p>
<form action="{{ url_for('hunter.add_tracked_menu') }}" method='post'> <form action="{{ url_for('hunter.add_tracked_menu') }}" method='post'>
{%if dict_tracker%}
<input id="tracker_uuid" name="tracker_uuid" class="form-control" type="text" value="{{dict_tracker['uuid']}}" hidden>
{%endif%}
<div class="row"> <div class="row">
<div class="col-12 col-xl-9"> <div class="col-12 col-xl-9">
<div class="input-group mb-2 mr-sm-2"> <div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend"> <div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-tag"></i></div> <div class="input-group-text bg-danger text-white"><i class="fas fa-tag"></i></div>
</div> </div>
<input id="tags" name="tags" class="form-control" placeholder="Tags (optional, space separated)" type="text"> <input id="tags" name="tags" class="form-control" placeholder="Tags (optional, space separated)" type="text" {%if dict_tracker%}{%if dict_tracker['tags']%}value="{{dict_tracker['tags']}}"{%endif%}{%endif%}>
</div> </div>
<div class="input-group mb-2 mr-sm-2"> <div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend"> <div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-at"></i></div> <div class="input-group-text bg-secondary text-white"><i class="fas fa-at"></i></div>
</div> </div>
<input id="mails" name="mails" class="form-control" placeholder="E-Mails Notification (optional, space separated)" type="text"> <input id="mails" name="mails" class="form-control" placeholder="E-Mails Notification (optional, space separated)" type="text" {%if dict_tracker%}{%if dict_tracker['mails']%}value="{{dict_tracker['mails']}}"{%endif%}{%endif%}>
</div> </div>
<div class="input-group mb-2 mr-sm-2"> <div class="input-group mb-2 mr-sm-2">
<div class="input-group-prepend"> <div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-pencil-alt"></i></div> <div class="input-group-text bg-info text-white"><i class="fas fa-pencil-alt"></i></div>
</div> </div>
<input id="description" name="description" class="form-control" placeholder="Tracker Description (optional)" type="text"> <input id="description" name="description" class="form-control" placeholder="Tracker Description (optional)" type="text" {%if dict_tracker%}{%if dict_tracker['description']%}value="{{dict_tracker['description']}}"{%endif%}{%endif%}>
</div> </div>
</div> </div>
<div class="col-12 col-xl-3"> <div class="col-12 col-xl-3">
<div class="custom-control custom-switch mt-1"> <div class="custom-control custom-switch mt-1">
<input class="custom-control-input" type="checkbox" name="level" id="id_level" checked> <input class="custom-control-input" type="checkbox" name="level" id="id_level" {%if dict_tracker%}{%if dict_tracker['level']==1%}checked{%endif%}{%else%}checked{%endif%}>
<label class="custom-control-label" for="id_level"> <label class="custom-control-label" for="id_level">
<i class="fas fa-users"></i>&nbsp;Show tracker to all Users <i class="fas fa-users"></i>&nbsp;Show tracker to all Users
</label> </label>
@ -68,6 +70,7 @@
</div> </div>
<hr> <hr>
<h4>Tracker Type:</h4>
<select id="tracker_type" name="tracker_type" class="custom-select w-25 mb-3"> <select id="tracker_type" name="tracker_type" class="custom-select w-25 mb-3">
<option disabled selected value> -- Select a tracker type -- </option> <option disabled selected value> -- Select a tracker type -- </option>
@ -81,35 +84,42 @@
<div class="row" id="simple_input"> <div class="row" id="simple_input">
<div class="col-12 col-lg-10"> <div class="col-12 col-lg-10">
<input id="term" name="term" class="form-control" placeholder="Terms to track (space separated)" type="text"> <input id="tracker" name="tracker" class="form-control" placeholder="Terms to track (space separated)" type="text" {%if dict_tracker%}{%if dict_tracker['tracker']!='yara'%}value="{{dict_tracker['tracker']}}"{%endif%}{%endif%}>
</div> </div>
<div class="col-12 col-lg-2"> <div class="col-12 col-lg-2">
<input type="number" id="nb_word" name="nb_word" name="quantity" min="1" placeholder="Nb of keywords"> <input type="number" id="nb_word" name="nb_word" name="quantity" min="1" placeholder="Nb of keywords" {%if dict_tracker%}{%if dict_tracker['nb_words']%}value="{{dict_tracker['nb_words']}}"{%endif%}{%endif%}>
</div> </div>
</div> </div>
<div class="" id="yara_rule"> <div class="mb-2" id="yara_rule">
<div class="" id="yara_default_rules"> <div class="" id="yara_default_rules">
<select class="custom-select w-100 mb-3" name="yara_default_rule"> <h6>Default YARA rules:</h6>
<select class="custom-select w-100 mb-3" id="yara_default_rule" name="yara_default_rule" onchange="get_default_rule_content(this);">
<option selected>Select a default rule</option> <option selected>Select a default rule</option>
{% for yara_types in all_yara_files %} {% for yara_types in all_yara_files %}
{% for yara_file in all_yara_files[yara_types] %} {% for yara_file_name in all_yara_files[yara_types] %}
<option value="{{yara_types}}/{{yara_file}}">{{yara_types}} - {{yara_file}}</option> <option value="{{yara_types}}/{{yara_file_name}}">{{yara_types}} - {{yara_file_name}}</option>
{% endfor %} {% endfor %}
{% endfor %} {% endfor %}
</select> </select>
<pre class="border bg-light" id="default_yara_rule_content"></pre>
</div> </div>
<hr>
<h6>Custom YARA rules:</h6>
<div class="row" id="textarea"> <div class="row" id="textarea">
<textarea class="form-control mx-3" id="text_input" name="yara_custom_rule" placeholder="Enter your own YARA rule" rows="5"></textarea> <textarea class="form-control mx-3" id="text_input" name="yara_custom_rule" placeholder="Enter your own YARA rule" rows="5">{%if dict_tracker%}{%if dict_tracker['type']=='yara' and dict_tracker['content']%}{{dict_tracker['content']}}{%endif%}{%endif%}</textarea>
</div> </div>
</div> </div>
<br> <br>
<button class="btn btn-success mt-2"> <button class="btn btn-success mt-2">
<i class="fas fa-plus"></i> Add Tracker <i class="fas fa-plus"></i> {%if dict_tracker%}Edit{%else%}Create{%endif%} Tracker
</button> </button>
</form> </form>
@ -134,7 +144,7 @@ $(document).ready(function(){
$("#page-Tracker").addClass("active"); $("#page-Tracker").addClass("active");
$("#nav_manual_crawler").addClass("active"); $("#nav_manual_crawler").addClass("active");
$("#tracker_desc").hide(); $("#tracker_desc").hide();
$("#term").hide(); $("#tracker").hide();
$("#nb_word").hide(); $("#nb_word").hide();
$("#yara_rule").hide(); $("#yara_rule").hide();
@ -143,30 +153,38 @@ $(document).ready(function(){
if (tracker_type=="word") { if (tracker_type=="word") {
$("#tracker_desc").text("Token to track. You need to use a regex if you want to use one of the following special characters [<>~!?@#$%^&*|()_-+={}\":;,.\'\n\r\t]/\\ "); $("#tracker_desc").text("Token to track. You need to use a regex if you want to use one of the following special characters [<>~!?@#$%^&*|()_-+={}\":;,.\'\n\r\t]/\\ ");
$("#tracker_desc").show(); $("#tracker_desc").show();
$("#term").show(); $("#tracker").show();
$("#nb_word").hide(); $("#nb_word").hide();
$("#yara_rule").hide(); $("#yara_rule").hide();
} else if (tracker_type=="set") { } else if (tracker_type=="set") {
$("#tracker_desc").text("Set of Terms to track (space separated). This tracker is used to check if an item contain one or more terms specified in a set. If an item contain NB unique terms (by default NB of unique keywords = 1), this tracker is triggered. You need to use a regex if you want to use one of the following special characters [<>~!?@#$%^&*|()_-+={}\":;,.\'\n\r\t]/\\ "); $("#tracker_desc").text("Set of Terms to track (space separated). This tracker is used to check if an item contain one or more terms specified in a set. If an item contain NB unique terms (by default NB of unique keywords = 1), this tracker is triggered. You need to use a regex if you want to use one of the following special characters [<>~!?@#$%^&*|()_-+={}\":;,.\'\n\r\t]/\\ ");
$("#tracker_desc").show(); $("#tracker_desc").show();
$("#term").show(); $("#tracker").show();
$("#nb_word").show(); $("#nb_word").show();
$("#yara_rule").hide(); $("#yara_rule").hide();
} else if (tracker_type=="regex") { } else if (tracker_type=="regex") {
$("#tracker_desc").text("Enter a valid Python regex"); $("#tracker_desc").text("Enter a valid Python regex");
$("#tracker_desc").show(); $("#tracker_desc").show();
$("#term").show(); $("#tracker").show();
$("#nb_word").hide(); $("#nb_word").hide();
$("#yara_rule").hide(); $("#yara_rule").hide();
} else if (tracker_type=="yara") { } else if (tracker_type=="yara") {
$("#tracker_desc").text("Select a default yara rule or create your own rule:"); $("#tracker_desc").text("Select a default yara rule or create your own rule:");
$("#tracker_desc").show(); $("#tracker_desc").show();
$("#term").hide(); $("#tracker").hide();
$("#nb_word").hide(); $("#nb_word").hide();
$("#yara_rule").show(); $("#yara_rule").show();
} }
}); });
{%if dict_tracker%}
$('#tracker_type').val('{{dict_tracker['type']}}').change();
{%if dict_tracker['type']=='yara' and dict_tracker['yara_file']%}
$('#yara_default_rule').val('{{dict_tracker['yara_file']}}').change();
{%endif%}
{%endif%}
}); });
function toggle_sidebar(){ function toggle_sidebar(){
@ -183,4 +201,18 @@ function toggle_sidebar(){
} }
} }
function get_default_rule_content(selector){
var yara_name = selector.value
if (yara_name === "Select a default rule") {
jQuery("#default_yara_rule_content").text("")
} else {
$.getJSON("{{ url_for('hunter.get_default_yara_rule_content') }}?rule_name=" + yara_name,
function(data) {
jQuery("#default_yara_rule_content").text(data['content'])
});
}
}
</script> </script>

View file

@ -171,10 +171,14 @@
</div> </div>
<a href="{{ url_for('hunter.delete_tracker') }}?uuid={{tracker_metadata['uuid']}}" class="float-right" style="font-size: 15px"> <div class="d-flex flex-row-reverse">
<button class='btn btn-danger'><i class="fas fa-trash-alt"></i> <a href="{{ url_for('hunter.delete_tracker') }}?uuid={{tracker_metadata['uuid']}}" style="font-size: 15px">
</button> <button class='btn btn-danger'><i class="fas fa-trash-alt"></i></button>
</a> </a>
<a href="{{ url_for('hunter.edit_tracked_menu') }}?uuid={{tracker_metadata['uuid']}}" class="mx-2" style="font-size: 15px">
<button class='btn btn-info'>Edit Tracker <i class="fas fa-pencil-alt"></i></button>
</a>
</div>
{%if yara_rule_content%} {%if yara_rule_content%}
<p class="my-0"></br></br><pre class="border bg-light">{{ yara_rule_content }}</pre></p> <p class="my-0"></br></br><pre class="border bg-light">{{ yara_rule_content }}</pre></p>
@ -227,7 +231,7 @@
{{item['date'][0:4]}}/{{item['date'][4:6]}}/{{item['date'][6:8]}} {{item['date'][0:4]}}/{{item['date'][4:6]}}/{{item['date'][6:8]}}
</td> </td>
<td> <td>
<a class="text-secondary" target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{item['id']}}"> <a class="text-secondary" target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{item['id']}}">
<div style="line-height:0.9;">{{ item['id'] }}</div> <div style="line-height:0.9;">{{ item['id'] }}</div>
</a> </a>
<div class="mb-2"> <div class="mb-2">

View file

@ -67,11 +67,13 @@
<td> <td>
<span> <span>
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}"> <a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
{% if dict_uuid['term']%}
{% if dict_uuid['term']|length > 256 %} {% if dict_uuid['term']|length > 256 %}
{{ dict_uuid['term'][0:256]}}... {{ dict_uuid['term'][0:256]}}...
{% else %} {% else %}
{{ dict_uuid['term']}} {{ dict_uuid['term']}}
{% endif %} {% endif %}
{% endif %}
</a> </a>
</span> </span>
<div> <div>
@ -128,11 +130,13 @@
<td> <td>
<span> <span>
<a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}"> <a target="_blank" href="{{ url_for('hunter.show_tracker') }}?uuid={{ dict_uuid['uuid'] }}">
{% if dict_uuid['term']%}
{% if dict_uuid['term']|length > 256 %} {% if dict_uuid['term']|length > 256 %}
{{ dict_uuid['term'][0:256]}}... {{ dict_uuid['term'][0:256]}}...
{% else %} {% else %}
{{ dict_uuid['term']}} {{ dict_uuid['term']}}
{% endif %} {% endif %}
{% endif %}
</a> </a>
</span> </span>
<div> <div>

View file

@ -27,7 +27,6 @@ import Flask_config
app = Flask_config.app app = Flask_config.app
config_loader = Flask_config.config_loader config_loader = Flask_config.config_loader
baseUrl = Flask_config.baseUrl baseUrl = Flask_config.baseUrl
r_serv_pasteName = Flask_config.r_serv_pasteName
r_serv_metadata = Flask_config.r_serv_metadata r_serv_metadata = Flask_config.r_serv_metadata
max_preview_char = Flask_config.max_preview_char max_preview_char = Flask_config.max_preview_char
max_preview_modal = Flask_config.max_preview_modal max_preview_modal = Flask_config.max_preview_modal
@ -116,17 +115,7 @@ def search():
selected_index = os.path.join(baseindexpath, index_name) selected_index = os.path.join(baseindexpath, index_name)
''' temporary disabled ''' temporary disabled
# Search filename # # TODO: search by filename/item id
for path in r_serv_pasteName.smembers(q[0]):
r.append(path)
paste = Paste.Paste(path)
content = paste.get_p_content()
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
c.append(content[0:content_range])
curr_date = str(paste._get_p_date())
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
paste_date.append(curr_date)
paste_size.append(paste._get_p_size())
''' '''
# Search full line # Search full line

View file

@ -98,7 +98,7 @@
{% for path in r %} {% for path in r %}
<tr> <tr>
<td> {{ loop.index0 }}</td> <td> {{ loop.index0 }}</td>
<td><a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{path}}">{{ path }}</a> <td><a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{path}}">{{ path }}</a>
<div> <div>
{% for tag in paste_tags[loop.index0] %} {% for tag in paste_tags[loop.index0] %}
<a href="{{ url_for('tags_ui.get_obj_by_tags') }}?object_type=item&ltags={{ tag[1] }}"> <a href="{{ url_for('tags_ui.get_obj_by_tags') }}?object_type=item&ltags={{ tag[1] }}">
@ -109,7 +109,7 @@
</td> </td>
<td>{{ paste_date[loop.index0] }}</td> <td>{{ paste_date[loop.index0] }}</td>
<td>{{ paste_size[loop.index0] }}</td> <td>{{ paste_size[loop.index0] }}</td>
<td><p><span class="glyphicon glyphicon-info-sign" data-toggle="tooltip" data-placement="left" title="{{ c[loop.index0] }} "></span> <button type="button" class="btn-link" data-num="{{ loop.index0 + 1 }}" data-toggle="modal" data-target="#mymodal" data-url="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{ path }}" data-path="{{ path }}"><span class="fa fa-search-plus"></span></button></p></td> <td><p><span class="glyphicon glyphicon-info-sign" data-toggle="tooltip" data-placement="left" title="{{ c[loop.index0] }} "></span> <button type="button" class="btn-link" data-num="{{ loop.index0 + 1 }}" data-toggle="modal" data-target="#mymodal" data-url="{{ url_for('objects_item.showItem') }}?id={{ path }}" data-path="{{ path }}"><span class="fa fa-search-plus"></span></button></p></td>
</tr> </tr>
{% endfor %} {% endfor %}
</tbody> </tbody>
@ -207,11 +207,11 @@
} }
search_table.row.add( [ search_table.row.add( [
init_num_of_elements_in_table+((offset))+i+1, init_num_of_elements_in_table+((offset))+i+1,
"<a target=\"_blank\" href=\"{{ url_for('showsavedpastes.showsavedpaste') }}?paste="+data.path_array[i]+"&num="+i+"\"> "+ data.path_array[i] +"</a>" "<a target=\"_blank\" href=\"{{ url_for('objects_item.showItem') }}?id="+data.path_array[i]+"&num="+i+"\"> "+ data.path_array[i] +"</a>"
+ "<div>" + tag + "</div>", + "<div>" + tag + "</div>",
data.date_array[i], data.date_array[i],
data.size_array[i], data.size_array[i],
"<p><span class=\"glyphicon glyphicon-info-sign\" data-toggle=\"tooltip\" data-placement=\"left\" title=\""+curr_preview+"\"></span> <button type=\"button\" class=\"btn-link\" data-num=\""+i+"\" data-toggle=\"modal\" data-target=\"#mymodal\" data-url=\"{{ url_for('showsavedpastes.showsavedpaste') }}?paste="+data.path_array[i]+"&num="+i+"\" data-path=\""+data.path_array[i]+"\"><span class=\"fa fa-search-plus\"></span></button></p>" "<p><span class=\"glyphicon glyphicon-info-sign\" data-toggle=\"tooltip\" data-placement=\"left\" title=\""+curr_preview+"\"></span> <button type=\"button\" class=\"btn-link\" data-num=\""+i+"\" data-toggle=\"modal\" data-target=\"#mymodal\" data-url=\"{{ url_for('objects_item.showItem') }}?id="+data.path_array[i]+"&num="+i+"\" data-path=\""+data.path_array[i]+"\"><span class=\"fa fa-search-plus\"></span></button></p>"
] ).draw( false ); ] ).draw( false );
} }
offset = offset + data.path_array.length; offset = offset + data.path_array.length;

View file

@ -131,11 +131,11 @@ def new_token():
generate_new_token(current_user.get_id()) generate_new_token(current_user.get_id())
return redirect(url_for('settings.edit_profile')) return redirect(url_for('settings.edit_profile'))
@settings.route("/settings/new_token_user", methods=['GET']) @settings.route("/settings/new_token_user", methods=['POST'])
@login_required @login_required
@login_admin @login_admin
def new_token_user(): def new_token_user():
user_id = request.args.get('user_id') user_id = request.form.get('user_id')
if r_serv_db.exists('user_metadata:{}'.format(user_id)): if r_serv_db.exists('user_metadata:{}'.format(user_id)):
generate_new_token(user_id) generate_new_token(user_id)
return redirect(url_for('settings.users_list')) return redirect(url_for('settings.users_list'))
@ -215,18 +215,18 @@ def users_list():
new_user_dict['password'] = request.args.get('new_user_password') new_user_dict['password'] = request.args.get('new_user_password')
return render_template("users_list.html", all_users=all_users, new_user=new_user_dict, admin_level=True) return render_template("users_list.html", all_users=all_users, new_user=new_user_dict, admin_level=True)
@settings.route("/settings/edit_user", methods=['GET']) @settings.route("/settings/edit_user", methods=['POST'])
@login_required @login_required
@login_admin @login_admin
def edit_user(): def edit_user():
user_id = request.args.get('user_id') user_id = request.form.get('user_id')
return redirect(url_for('settings.create_user', user_id=user_id)) return redirect(url_for('settings.create_user', user_id=user_id))
@settings.route("/settings/delete_user", methods=['GET']) @settings.route("/settings/delete_user", methods=['POST'])
@login_required @login_required
@login_admin @login_admin
def delete_user(): def delete_user():
user_id = request.args.get('user_id') user_id = request.form.get('user_id')
delete_user_db(user_id) delete_user_db(user_id)
return redirect(url_for('settings.users_list')) return redirect(url_for('settings.users_list'))

View file

@ -52,8 +52,16 @@
<tr> <tr>
<td>API Key</td> <td>API Key</td>
<td> <td>
<span id="censored_key">
{{user_metadata['api_key'][:4]}}*********************************{{user_metadata['api_key'][-4:]}}
</span>
<span id="uncensored_key" style="display: none;">
{{user_metadata['api_key']}} {{user_metadata['api_key']}}
</span>
<a class="ml-3" href="{{url_for('settings.new_token')}}"><i class="fa fa-random"></i></a> <a class="ml-3" href="{{url_for('settings.new_token')}}"><i class="fa fa-random"></i></a>
<span class="btn btn-outline-secondary ml-1 px-1 py-0" id="btn_key" onclick="show_api_key();">
<i class="fas fa-eye"></i>
</span>
</td> </td>
</tr> </tr>
</tbody> </tbody>
@ -91,6 +99,13 @@ function toggle_sidebar(){
$('#core_content').addClass('col-lg-10') $('#core_content').addClass('col-lg-10')
} }
} }
function show_api_key() {
$('#censored_key').hide();
$('#btn_key').hide();
$('#uncensored_key').show();
}
</script> </script>
</html> </html>

View file

@ -17,17 +17,6 @@
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script> <script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script> <script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<style>
.edit_icon:hover{
cursor: pointer;
color: #17a2b8;
}
.trash_icon:hover{
cursor: pointer;
color: #c82333;
}
</style>
</head> </head>
<body> <body>
@ -75,16 +64,37 @@
<td>{{user['email']}}</td> <td>{{user['email']}}</td>
<td>{{user['role']}}</td> <td>{{user['role']}}</td>
<td> <td>
<form action="{{ url_for('settings.new_token_user') }}" id="post_new_token" method=POST>
<span id="censored_key_{{loop.index0}}">
{{user['api_key'][:4]}}*********************************{{user['api_key'][-4:]}}
</span>
<span id="uncensored_key_{{loop.index0}}" style="display: none;">
{{user['api_key']}} {{user['api_key']}}
<a class="ml-3" href="{{url_for('settings.new_token_user')}}?user_id={{user['email']}}"><i class="fa fa-random"></i></a> </span>
<input type="hidden" name="user_id" value="{{user['email']}}">
<button class="btn btn-outline-info ml-3 px-1 py-0" type="submit">
<i class="fas fa-random"></i>
</button>
<span class="btn btn-outline-secondary ml-1 px-1 py-0" id="btn_key_{{loop.index0}}" onclick="show_api_key({{loop.index0}})">
<i class="fas fa-eye"></i>
</span>
</form>
</td> </td>
<td> <td>
<a href="{{ url_for('settings.edit_user')}}?user_id={{user['email']}}"> <div class="d-flex justify-content-start">
<i class="fas fa-pencil-alt edit_icon"></i> <form action="{{ url_for('settings.edit_user') }}" id="post_edit_user" method=POST>
</a> <input type="hidden" name="user_id" value="{{user['email']}}">
<a href="{{ url_for('settings.delete_user')}}?user_id={{user['email']}}" class="ml-4"> <button class="btn btn-outline-primary ml-3 px-1 py-0" type="submit">
<i class="fas fa-trash-alt trash_icon"></i> <i class="fas fa-pencil-alt"></i>
</a> </button>
</form>
<form action="{{ url_for('settings.delete_user') }}" id="post_delete_user" method=POST>
<input type="hidden" name="user_id" value="{{user['email']}}">
<button class="btn btn-outline-danger ml-3 px-1 py-0" type="submit">
<i class="fas fa-trash-alt"></i>
</button>
</form>
</div>
</td> </td>
</tr> </tr>
{% endfor %} {% endfor %}
@ -117,6 +127,13 @@ function toggle_sidebar(){
$('#core_content').addClass('col-lg-10') $('#core_content').addClass('col-lg-10')
} }
} }
function show_api_key(key_id) {
$('#censored_key_' + key_id).hide();
$('#btn_key_' + key_id).hide();
$('#uncensored_key_' + key_id).show();
}
</script> </script>
</html> </html>

View file

@ -32,11 +32,9 @@ import Flask_config
app = Flask_config.app app = Flask_config.app
baseUrl = Flask_config.baseUrl baseUrl = Flask_config.baseUrl
r_serv_pasteName = Flask_config.r_serv_pasteName
r_serv_metadata = Flask_config.r_serv_metadata r_serv_metadata = Flask_config.r_serv_metadata
r_serv_tags = Flask_config.r_serv_tags r_serv_tags = Flask_config.r_serv_tags
r_serv_statistics = Flask_config.r_serv_statistics r_serv_statistics = Flask_config.r_serv_statistics
r_serv_onion = Flask_config.r_serv_onion
max_preview_char = Flask_config.max_preview_char max_preview_char = Flask_config.max_preview_char
max_preview_modal = Flask_config.max_preview_modal max_preview_modal = Flask_config.max_preview_modal
DiffMaxLineLength = Flask_config.DiffMaxLineLength DiffMaxLineLength = Flask_config.DiffMaxLineLength
@ -404,13 +402,6 @@ def show_item_min(requested_path , content_range=0):
# ============ ROUTES ============ # ============ ROUTES ============
@showsavedpastes.route("/showsavedpaste/") #completely shows the paste in a new tab
@login_required
@login_read_only
def showsavedpaste():
requested_path = request.args.get('paste', '')
return showpaste(0, requested_path)
@showsavedpastes.route("/showsaveditem_min/") #completely shows the paste in a new tab @showsavedpastes.route("/showsaveditem_min/") #completely shows the paste in a new tab
@login_required @login_required
@login_read_only @login_read_only

View file

@ -73,7 +73,7 @@
{% if item_parent %} {% if item_parent %}
<div class="list-group" id="item_parent"> <div class="list-group" id="item_parent">
<a href="{{ url_for('showsavedpastes.showsavedpaste')}}?paste={{item_parent}}" target="_blank">{{item_parent}}</a> <a href="{{ url_for('objects_item.showItem')}}?id={{item_parent}}" target="_blank">{{item_parent}}</a>
</div> </div>
{% endif %} {% endif %}
@ -179,7 +179,7 @@
</tr> </tr>
<tr> <tr>
<td>Father</td> <td>Father</td>
<td><a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{ crawler_metadata['paste_father'] }}" id='paste_father'>{{ crawler_metadata['paste_father'] }}</a></td> <td><a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{ crawler_metadata['paste_father'] }}" id='paste_father'>{{ crawler_metadata['paste_father'] }}</a></td>
</tr> </tr>
<tr> <tr>
<td>Url</td> <td>Url</td>
@ -220,7 +220,7 @@
</div> </div>
<div class="col-2"> <div class="col-2">
<div class="mt-2"> <div class="mt-2">
<small><a class="text-info" href="{{ url_for('showsavedpastes.showsavedrawpaste') }}?paste={{ request.args.get('paste') }}" id='raw_paste' > [Raw content] </a></small> <small><a class="text-info" href="{{ url_for('objects_item.item_raw_content') }}?id={{ request.args.get('paste') }}" id='raw_paste' > [Raw content] </a></small>
</div> </div>
</div> </div>
</div> </div>

View file

@ -176,7 +176,7 @@
html_to_add += "<td>"+curr_data.size+"</td>"; html_to_add += "<td>"+curr_data.size+"</td>";
html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>"; html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>";
html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>"; html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>";
html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content.replace(/\"/g, "\'")+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('showsavedpastes.showsavedpaste') }}?paste="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>"; html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content.replace(/\"/g, "\'")+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('objects_item.showItem') }}?id="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>";
html_to_add += "</tr>"; html_to_add += "</tr>";
} }

View file

@ -460,7 +460,7 @@ function bindEventsForCurrentPage() {
html_to_add += "<td>"+curr_data.size+"</td>"; html_to_add += "<td>"+curr_data.size+"</td>";
html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>"; html_to_add += "<td>"+curr_data.lineinfo[0]+"</td>";
html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>"; html_to_add += "<td>"+curr_data.lineinfo[1]+"</td>";
html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content.replace(/\"/g, "\'")+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('showsavedpastes.showsavedpaste') }}?paste="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>"; html_to_add += "<td><div class=\"row\"><button class=\"btn btn-xs btn-default\" data-toggle=\"popover\" data-placement=\"left\" data-content=\""+curr_data.content.replace(/\"/g, "\'")+"\">Preview content</button><a target=\"_blank\" href=\"{{ url_for('objects_item.showItem') }}?id="+curr_data.path+"&num=0\"> <button type=\"button\" class=\"btn btn-xs btn-info\">Show Paste</button></a></div></td>";
html_to_add += "</tr>"; html_to_add += "</tr>";
} }

View file

@ -0,0 +1,7 @@
.hg-text{
padding-top: 0.2em;
padding-bottom: 0.2em;
padding-right: 0.15em;
padding-left: 0.15em;
background-color: #2e5;
}

View file

@ -251,7 +251,7 @@ function create_log_table(obj_json) {
msage.appendChild(document.createTextNode(message.join(" "))); msage.appendChild(document.createTextNode(message.join(" ")));
var paste_path = parsedmess[5]; var paste_path = parsedmess[5];
var url_to_saved_paste = url_showSavedPath+"?paste="+paste_path+"&num="+parsedmess[0]; var url_to_saved_paste = url_showSavedPath+"?id="+paste_path;
var action_icon_a = document.createElement("A"); var action_icon_a = document.createElement("A");
action_icon_a.setAttribute("TARGET", "_blank"); action_icon_a.setAttribute("TARGET", "_blank");

View file

@ -38,7 +38,7 @@
{% endfor %} {% endfor %}
</div> </div>
<div class="mb-2 float-right"> <div class="mb-2 float-right">
<a href="{{ url_for('showsavedpastes.showsavedpaste')}}?paste={{ dict_object["correlation_id"] }}" target="_blank" style="font-size: 15px"> <a href="{{ url_for('objects_item.showItem')}}?id={{ dict_object["correlation_id"] }}" target="_blank" style="font-size: 15px">
<button class="btn btn-info"><i class="fas fa-search"></i> Show Paste <button class="btn btn-info"><i class="fas fa-search"></i> Show Paste
</button> </button>
</a> </a>

View file

@ -92,24 +92,7 @@
</tbody> </tbody>
</table> </table>
<div class="d-flex justify-content-center my-4"> {% include 'domains/block_domains_name_search.html' %}
<div class="card border-secondary" style="max-width: 40rem;">
<div class="card-body text-dark">
<h5 class="card-title">Show Domain:</h5>
<form class="" action="{{url_for('crawler_splash.showDomain')}}" method="post">
<div class="input-group mb-3">
<input type="text" class="form-control" style="min-width: 30rem;" placeholder="Domain name" aria-label="Domain name" aria-describedby="btn_show_domain" id="in_show_domain" , name="in_show_domain">
<div class="input-group-append">
<button class="btn btn-info" type="submit" id="btn_show_domain">
<i class="fas fa-search"></i>
</button>
</div>
</div>
</form>
</div>
</div>
</div>
<hr> <hr>
<div class="row mb-3"> <div class="row mb-3">
@ -135,6 +118,10 @@
{% include 'tags/block_obj_tags_search.html' %} {% include 'tags/block_obj_tags_search.html' %}
{% endwith %} {% endwith %}
{% with object_type='domain' %}
{% include 'domains/block_languages_search.html' %}
{% endwith %}
</div> </div>
</div> </div>
</div> </div>

View file

@ -68,7 +68,7 @@
</div> </div>
{% with dict_data=dict_data, bootstrap_label=bootstrap_label %} {% with l_dict_domains=dict_data['list_elem'], bootstrap_label=bootstrap_label %}
{% include 'domains/card_img_domain.html' %} {% include 'domains/card_img_domain.html' %}
{% endwith %} {% endwith %}

View file

@ -67,6 +67,7 @@
<th>First Seen</th> <th>First Seen</th>
<th>Last Check</th> <th>Last Check</th>
<th>Ports</th> <th>Ports</th>
<th>Languages</th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
@ -74,6 +75,11 @@
<td class="panelText">{%if "first_seen" in dict_domain%}{{ dict_domain['first_seen'] }}{%endif%}</td> <td class="panelText">{%if "first_seen" in dict_domain%}{{ dict_domain['first_seen'] }}{%endif%}</td>
<td class="panelText">{%if "last_check" in dict_domain%}{{ dict_domain['last_check'] }}{%endif%}</td> <td class="panelText">{%if "last_check" in dict_domain%}{{ dict_domain['last_check'] }}{%endif%}</td>
<td class="panelText">{%if dict_domain["ports"]%}{{ dict_domain["ports"] }}{%endif%}</td> <td class="panelText">{%if dict_domain["ports"]%}{{ dict_domain["ports"] }}{%endif%}</td>
<td class="panelText">
{% for languages in dict_domain['languages'] %}
{{languages}}
{% endfor %}
</td>
</tr> </tr>
</tbody> </tbody>
</table> </table>
@ -94,12 +100,36 @@
</button> </button>
</div> </div>
<hr> <table class="table table-sm table-responsive">
{% if dict_domain['origin_item']=='manual' or dict_domain['origin_item']=='auto' %} <thead>
<span class="badge badge-dark">{{ dict_domain['origin_item'] }}</span> <td></td>
<td>Last Origin:</td>
</thead>
<tbody>
{% if 'father' in dict_domain %}
{% if dict_domain['father']=='manual' or dict_domain['father']=='auto' %}
<tr>
<td></td>
<td><span class="badge badge-dark">{{ dict_domain['father'] }}</span></td>
</tr>
{%else%} {%else%}
Last Origin: <a class="badge" target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste', paste=dict_domain['origin_item']) }}" />{{ dict_domain['origin_item'] }}</a> <tr>
<td><i class="far fa-file"></i></td>
<td>
<a class="badge" target="_blank" href="{{ url_for('objects_item.showItem', id=dict_domain['father']['item_father']) }}" />{{ dict_domain['father']['item_father'] }}</a>
</td>
</tr>
{% if dict_domain['father']['domain_father'] %}
<td><i class="fab fa-html5"></i></td>
<td>
<a class="badge" target="_blank" href="{{ url_for('crawler_splash.showDomain', domain=dict_domain['father']['domain_father']) }}" />{{ dict_domain['father']['domain_father'] }}</a>
</td>
{%endif%} {%endif%}
<p>
{%endif%}
{%endif%}
</tbody>
</table>
{% if 'correlation_nb' in dict_domain %} {% if 'correlation_nb' in dict_domain %}
{% if dict_domain["correlation_nb"] > 0 %} {% if dict_domain["correlation_nb"] > 0 %}
@ -352,7 +382,7 @@
{% for item in dict_domain["crawler_history"]["items"] %} {% for item in dict_domain["crawler_history"]["items"] %}
<tr> <tr>
<td> <td>
<a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{item["id"]}}" class="text-secondary"> <a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{item["id"]}}" class="text-secondary">
<div style="line-height:0.9;">{{ item["link"] }}</div> <div style="line-height:0.9;">{{ item["link"] }}</div>
</a> </a>
<div> <div>
@ -498,7 +528,7 @@ function toggle_sidebar(){
<script> <script>
var ctx = canvas.getContext('2d'), img = new Image(); var ctx = canvas.getContext('2d'), img = new Image();
var base_url = "{{ url_for('showsavedpastes.screenshot', filename="") }}"; var base_url = "{{ url_for('showsavedpastes.screenshot', filename="") }}";
var screenshot_href = "{{ url_for('showsavedpastes.showsavedpaste') }}?paste="; var screenshot_href = "{{ url_for('objects_item.showItem') }}?id=";
/// turn off image smoothing /// turn off image smoothing
ctx.webkitImageSmoothingEnabled = false; ctx.webkitImageSmoothingEnabled = false;

View file

@ -0,0 +1,50 @@
<div class="d-flex justify-content-center my-4">
<div class="card border-secondary" style="max-width: 40rem;">
<div class="card-body text-dark">
<h5 class="card-title">Search Domain by name:</h5>
<div class="input-group mb-3">
<input type="text" class="form-control" id="in_search_name" value="{{search}}" style="min-width: 30rem;" placeholder="Domain name" aria-label="Domain name" aria-describedby="btn_show_domain">
<div class="input-group-append">
<button class="btn btn-info" type="button" id="btn_search_name" onclick="searchDomainName()">
<i class="fas fa-search"></i>
</button>
</div>
</div>
<div class="mb-3">
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domain_onion_switch" value="" id="domain_onion_switch" {%if 'onion' in domains_types or not domains_types%}checked{%endif%}>
<label class="custom-control-label" for="domain_onion_switch">
<span class="badge badge-danger"><i class="fas fa-user-secret"></i> Onion Domains</span>
</label>
</div>
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domain_regular_switch" value="True" id="domain_regular_switch"{%if 'regular' in domains_types%}checked{%endif%}>
<label class="custom-control-label" for="domain_regular_switch">
<span class="badge badge-warning"><i class="fab fa-html5"></i> Web Domains</span>
</label>
</div>
</div>
</div>
</div>
</div>
<script>
function searchDomainName() {
var all_domain_types = ['onion', 'regular'] // TODO: load from flask
var l_domains_types = [];
console.log(document.getElementById('in_search_name'));
var data = document.getElementById('in_search_name').value;
for (var i = 0; i < all_domain_types.length; i++) {
if (document.getElementById('domain_'+ all_domain_types[i] +'_switch').checked) {
l_domains_types.push(all_domain_types[i])
}
}
var parameter = "?name=" + data + "&domain_types=" + l_domains_types +"{%if page%}&page={{ page }}{%endif%}";
window.location.href = "{{ url_for('crawler_splash.domains_search_name') }}" + parameter;
}
</script>

View file

@ -0,0 +1,73 @@
<div class="card mb-3 mt-1">
<div class="card-header text-white bg-dark">
<h5 class="card-title mb-0">
<i class="fas fa-language" style="font-size: 1.8rem;"></i> Domains by Languages :
</h5>
</div>
<div class="card-body">
<div class="input-group">
<div class="input-group-prepend">
<button class="btn btn-outline-danger" type="button" id="button-clear" style="z-index: 1;" onclick="emptySearch()">
<i class="fas fa-eraser"></i>
</button>
</div>
<input id="llanguages" name="llanguages" type="text" class="form-control" aria-describedby="button-clear" autocomplete="off">
</div>
<div class="mb-3">
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domain_onion_switch" value="" id="domain_onion_switch" {%if 'onion' in domains_types%}checked{%endif%}>
<label class="custom-control-label" for="domain_onion_switch">
<span class="badge badge-danger"><i class="fas fa-user-secret"></i> Onion Domains</span>
</label>
</div>
<div class="custom-control custom-switch">
<input class="custom-control-input" type="checkbox" name="domain_regular_switch" value="True" id="domain_regular_switch"{%if 'regular' in domains_types%}checked{%endif%}>
<label class="custom-control-label" for="domain_regular_switch">
<span class="badge badge-warning"><i class="fab fa-html5"></i> Web Domains</span>
</label>
</div>
</div>
<button class="btn btn-primary" type="button" id="button-search" onclick="searchLanguages()">
<i class="fas fa-search"></i> Search
</button>
</div>
</div>
<link href="{{ url_for('static', filename='css/tags.css') }}" rel="stylesheet" type="text/css" />
<script src="{{ url_for('static', filename='js/tags.js') }}"></script>
<script>
var llanguages;
$.getJSON("{{ url_for('crawler_splash.domains_all_languages_json') }}", //?object_type={{ object_type }}"
function(data) {
llanguages = $('#llanguages').tagSuggest({
data: data,
value: [{%if current_languages%}{% for language in current_languages %}'{{language|safe}}',{%endfor%}{%endif%}],
sortOrder: 'name',
maxDropHeight: 200,
name: 'llanguages'
});
});
function searchLanguages() {
var all_domain_types = ['onion', 'regular'] // TODO: load from flask
var l_domains_types = [];
var data = llanguages.getValue();
for (var i = 0; i < all_domain_types.length; i++) {
if (document.getElementById('domain_'+ all_domain_types[i] +'_switch').checked) {
l_domains_types.push(all_domain_types[i])
}
}
var parameter = "?languages=" + data + "&domain_types=" + l_domains_types +"{%if page%}&page={{ page }}{%endif%}";
window.location.href = "{{ url_for('crawler_splash.domains_search_languages_get') }}" + parameter;
}
function emptySearch() {
llanguages.clear();
}
</script>

View file

@ -1,27 +1,23 @@
{% for dict_domain in dict_data['list_elem'] %} {% for dict_domain in l_dict_domains %}
{% if loop.index0 % 4 == 0 %} {% if loop.index0 % 4 == 0 %}
<div class="card-deck mt-3"> <div class="card-deck mt-3">
{% endif %} {% endif %}
<div class="card"> <div class="card {% if dict_domain["status"] %}border-success{% else %}border-danger{% endif %}">
<div class="text-center"> <div class="text-center">
<canvas id="canvas_{{loop.index0}}" style="max-height: 400px; max-width: 100%;"></canvas> <canvas id="canvas_{{loop.index0}}" style="max-height: 400px; max-width: 100%;"></canvas>
</div> </div>
<div class="card-body pb-0"> <div class="card-body pb-0">
<h5 class="card-title"> <h5 class="card-title">
<a target="_blank" href="{{ url_for('crawler_splash.showDomain') }}?domain={{dict_domain["id"]}}"> <a target="_blank" href="{{ url_for('crawler_splash.showDomain') }}?domain={{dict_domain["id"]}}">
{% if 'hl-start' in dict_domain %}
{{dict_domain["id"][:dict_domain['hl-start']]}}<span class="hg-text">{{dict_domain["id"][dict_domain['hl-start']:dict_domain['hl-end']]}}</span>{{dict_domain["id"][dict_domain['hl-end']:]}}
{% else %}
{{dict_domain["id"]}} {{dict_domain["id"]}}
{% endif %}
</a> </a>
</h5>
<p class="card-text">
<small class="text-muted">
First seen: {{dict_domain["first_seen"]}}<br>
Last_seen: {{dict_domain["first_seen"]}}<br>
Ports: {{dict_domain["ports"]}}
</small>
</p>
<small class="text-muted">Status: </small>
{% if dict_domain["status"] %} {% if dict_domain["status"] %}
<span style="color:Green;"> <span style="color:Green;">
<i class="fas fa-check-circle"></i> UP <i class="fas fa-check-circle"></i> UP
@ -31,6 +27,36 @@
<i class="fas fa-times-circle"></i> DOWN <i class="fas fa-times-circle"></i> DOWN
</span> </span>
{% endif %} {% endif %}
</h5>
<div>
<span class="badge badge-dark">
<span data-toggle="tooltip" data-placement="top" title="Tooltip on top">
<span class="badge badge-info" style="font-size: 0.8rem;">
<i class="fas fa-hourglass-start"></i>
</span>
{{dict_domain["first_seen"]}}
</span>
<span class="badge badge-light mx-1" style="font-size: 1rem;">
<i class="far fa-calendar-alt"></i>
</span>
{{dict_domain["first_seen"]}}
<span class="badge badge-secondary" style="font-size: 0.8rem;">
<i class="fas fa-hourglass-end"></i>
</span>
</span>
</div>
<p class="card-text">
<small class="text-muted">
Ports: {{dict_domain["ports"]}}<br>
{% if dict_domain['languages'] %}
Languages:
{% for language in dict_domain['languages'] %}
<span class="badge badge-secondary" style="font-size: 0.75rem;">{{ language }}</span>
{% endfor %}
{% endif %}
</small>
</p>
<div> <div>
{% for tag in dict_domain['tags'] %} {% for tag in dict_domain['tags'] %}
<a href="{{ url_for('tags_ui.get_obj_by_tags') }}?object_type=domain&ltags={{ tag }}"> <a href="{{ url_for('tags_ui.get_obj_by_tags') }}?object_type=domain&ltags={{ tag }}">
@ -50,6 +76,6 @@
{% endfor %} {% endfor %}
{% if dict_data['list_elem']|length % 4 != 0 %} {% if l_dict_domains|length % 4 != 0 %}
</div> </div>
{% endif %} {% endif %}

View file

@ -0,0 +1,192 @@
<!DOCTYPE html>
<html>
<head>
<title>Show Domain - AIL</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js') }}"></script>
<style>
.card-columns {
column-count: 4;
}
</style>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'crawler/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="row">
<div class="col-12 col-lg-6">
{% include 'domains/block_languages_search.html' %}
</div>
<div class="col-12 col-xl-6">
<div class="card my-2 border-secondary" >
<div class="card-body py-2">
<div class="row">
<div class="col-md-3 text-center">
<button class="btn btn-primary" onclick="blocks.value=0;pixelate_all();">
<i class="fas fa-eye-slash"></i>
<span class="label-icon">Hide</span>
</button>
</div>
<div class="col-md-6">
<input class="custom-range mt-2" id="blocks" type="range" min="1" max="50" value="5">
</div>
<div class="col-md-3 text-center">
<button class="btn btn-primary" onclick="blocks.value=50;pixelate_all();">
<i class="fas fa-plus-square"></i>
<span class="label-icon">Full resolution</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
{% with l_dict_domains=l_dict_domains['list_elem'], bootstrap_label=bootstrap_label %}
{% include 'domains/card_img_domain.html' %}
{% endwith %}
<br>
<br>
{%if l_dict_domains['list_elem']%}
{% with page=l_dict_domains['page'], nb_page_max=l_dict_domains['nb_pages'], nb_first_elem=l_dict_domains['nb_first_elem'], nb_last_elem=l_dict_domains['nb_last_elem'], nb_all_elem=l_dict_domains['nb_all_elem'] %}
{% set target_url=url_for('crawler_splash.domains_search_languages_get') + "?languages=" + ','.join(current_languages)%}
{%if domains_types %}
{% set target_url = target_url + '&domain_types=' + ','.join(domains_types)%}
{%endif%}
{% include 'pagination.html' %}
{% endwith %}
{%endif%}
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_title_domains_explorer').removeClass("text-muted");
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>
<script>
// img_url
// ctx
// canevas_id
var dict_canevas_blurr_img = {}
function init_canevas_blurr_img(canevas_id, img_url){
// ctx, turn off image smoothin
dict_canevas_blurr_img[canevas_id] = {}
var canvas_container = document.getElementById(canevas_id);
var ctx = canvas_container.getContext('2d');
ctx.webkitImageSmoothingEnabled = false;
ctx.imageSmoothingEnabled = false;
dict_canevas_blurr_img[canevas_id]["ctx"] = ctx;
// img
dict_canevas_blurr_img[canevas_id]["img"] = new Image();
dict_canevas_blurr_img[canevas_id]["img"].onload = function() {pixelate_img(canevas_id);};
dict_canevas_blurr_img[canevas_id]["img"].addEventListener("error", function() {img_error(canevas_id);});
dict_canevas_blurr_img[canevas_id]["img"].src = img_url;
}
function pixelate_all(){
Object.entries(dict_canevas_blurr_img).forEach(([key, value]) => {
pixelate_img(key);
});
}
function pixelate_img(canevas_id) {
if (typeof canevas_id !== 'undefined') {
var canevas_to_blurr = document.getElementById(canevas_id);
/// use slider value
if( blocks.value == 50 ){
size = 1;
} else {
var size = (blocks.value) * 0.01;
}
canevas_to_blurr.width = dict_canevas_blurr_img[canevas_id]["img"].width;
canevas_to_blurr.height = dict_canevas_blurr_img[canevas_id]["img"].height;
/// cache scaled width and height
w = canevas_to_blurr.width * size;
h = canevas_to_blurr.height * size;
/// draw original image to the scaled size
dict_canevas_blurr_img[canevas_id]["ctx"].drawImage(dict_canevas_blurr_img[canevas_id]["img"], 0, 0, w, h);
/// pixelated
dict_canevas_blurr_img[canevas_id]["ctx"].drawImage(canevas_to_blurr, 0, 0, w, h, 0, 0, canevas_to_blurr.width, canevas_to_blurr.height);
}
}
function img_error(canevas_id) {
dict_canevas_blurr_img[canevas_id]["img"].onerror=null;
dict_canevas_blurr_img[canevas_id]["img"].src="{{ url_for('static', filename='image/AIL.png') }}";
}
blocks.addEventListener('change', pixelate_all, false);
{% for dict_domain in l_dict_domains['list_elem'] %}
{% if 'screenshot' in dict_domain %}
{% if dict_domain['is_tags_safe'] %}
var screenshot_url = "{{ url_for('showsavedpastes.screenshot', filename="") }}{{dict_domain['screenshot']}}";
{% else %}
var screenshot_url = "{{ url_for('static', filename='image/AIL.png') }}";
{% endif %}
init_canevas_blurr_img("canvas_{{loop.index0}}", screenshot_url);
{% endif %}
{% endfor %}
</script>
</html>

View file

@ -0,0 +1,195 @@
<!DOCTYPE html>
<html>
<head>
<title>Domain Search - AIL</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/ail-project.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js') }}"></script>
<style>
.card-columns {
column-count: 4;
}
</style>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="container-fluid">
<div class="row">
{% include 'crawler/menu_sidebar.html' %}
<div class="col-12 col-lg-10" id="core_content">
<div class="row">
<div class="col-12 col-lg-6">
{% with page=l_dict_domains['page'], search=l_dict_domains['search'] %}
{% include 'domains/block_domains_name_search.html' %}
{% endwith %}
</div>
<div class="col-12 col-xl-6">
<div class="card my-2 border-secondary" >
<div class="card-body py-2">
<div class="row">
<div class="col-md-3 text-center">
<button class="btn btn-primary" onclick="blocks.value=0;pixelate_all();">
<i class="fas fa-eye-slash"></i>
<span class="label-icon">Hide</span>
</button>
</div>
<div class="col-md-6">
<input class="custom-range mt-2" id="blocks" type="range" min="1" max="50" value="5">
</div>
<div class="col-md-3 text-center">
<button class="btn btn-primary" onclick="blocks.value=50;pixelate_all();">
<i class="fas fa-plus-square"></i>
<span class="label-icon">Full resolution</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
{% with l_dict_domains=l_dict_domains['list_elem'], bootstrap_label=bootstrap_label %}
{% include 'domains/card_img_domain.html' %}
{% endwith %}
<br>
<br>
{%if l_dict_domains['list_elem']%}
{% with page=l_dict_domains['page'], nb_page_max=l_dict_domains['nb_pages'], nb_first_elem=l_dict_domains['nb_first_elem'], nb_last_elem=l_dict_domains['nb_last_elem'], nb_all_elem=l_dict_domains['nb_all_elem'] %}
{% set target_url=url_for('crawler_splash.domains_search_name') + "?name=" + l_dict_domains['search']%}
{%if domains_types %}
{% set target_url = target_url + '&domain_types=' + ','.join(domains_types)%}
{%endif%}
{% include 'pagination.html' %}
{% endwith %}
{%endif%}
</div>
</div>
</div>
</body>
<script>
$(document).ready(function(){
$('#nav_title_domains_explorer').removeClass("text-muted");
});
function toggle_sidebar(){
if($('#nav_menu').is(':visible')){
$('#nav_menu').hide();
$('#side_menu').removeClass('border-right')
$('#side_menu').removeClass('col-lg-2')
$('#core_content').removeClass('col-lg-10')
}else{
$('#nav_menu').show();
$('#side_menu').addClass('border-right')
$('#side_menu').addClass('col-lg-2')
$('#core_content').addClass('col-lg-10')
}
}
</script>
<script>
// img_url
// ctx
// canevas_id
var dict_canevas_blurr_img = {}
function init_canevas_blurr_img(canevas_id, img_url){
// ctx, turn off image smoothin
dict_canevas_blurr_img[canevas_id] = {}
var canvas_container = document.getElementById(canevas_id);
var ctx = canvas_container.getContext('2d');
ctx.webkitImageSmoothingEnabled = false;
ctx.imageSmoothingEnabled = false;
dict_canevas_blurr_img[canevas_id]["ctx"] = ctx;
// img
dict_canevas_blurr_img[canevas_id]["img"] = new Image();
dict_canevas_blurr_img[canevas_id]["img"].onload = function() {pixelate_img(canevas_id);};
dict_canevas_blurr_img[canevas_id]["img"].addEventListener("error", function() {img_error(canevas_id);});
dict_canevas_blurr_img[canevas_id]["img"].src = img_url;
}
function pixelate_all(){
Object.entries(dict_canevas_blurr_img).forEach(([key, value]) => {
pixelate_img(key);
});
}
function pixelate_img(canevas_id) {
if (typeof canevas_id !== 'undefined') {
var canevas_to_blurr = document.getElementById(canevas_id);
/// use slider value
if( blocks.value == 50 ){
size = 1;
} else {
var size = (blocks.value) * 0.01;
}
canevas_to_blurr.width = dict_canevas_blurr_img[canevas_id]["img"].width;
canevas_to_blurr.height = dict_canevas_blurr_img[canevas_id]["img"].height;
/// cache scaled width and height
w = canevas_to_blurr.width * size;
h = canevas_to_blurr.height * size;
/// draw original image to the scaled size
dict_canevas_blurr_img[canevas_id]["ctx"].drawImage(dict_canevas_blurr_img[canevas_id]["img"], 0, 0, w, h);
/// pixelated
dict_canevas_blurr_img[canevas_id]["ctx"].drawImage(canevas_to_blurr, 0, 0, w, h, 0, 0, canevas_to_blurr.width, canevas_to_blurr.height);
}
}
function img_error(canevas_id) {
dict_canevas_blurr_img[canevas_id]["img"].onerror=null;
dict_canevas_blurr_img[canevas_id]["img"].src="{{ url_for('static', filename='image/AIL.png') }}";
}
blocks.addEventListener('change', pixelate_all, false);
{% for dict_domain in l_dict_domains['list_elem'] %}
{% if 'screenshot' in dict_domain %}
{% if dict_domain['is_tags_safe'] %}
var screenshot_url = "{{ url_for('showsavedpastes.screenshot', filename="") }}{{dict_domain['screenshot']}}";
{% else %}
var screenshot_url = "{{ url_for('static', filename='image/AIL.png') }}";
{% endif %}
init_canevas_blurr_img("canvas_{{loop.index0}}", screenshot_url);
{% endif %}
{% endfor %}
</script>
</html>

View file

@ -0,0 +1,120 @@
<div id="create_hive_case_modal" class="modal fade" role="dialog">
<div class="modal-dialog modal-lg">
<div id="create_hive_case_modal_content" class="modal-content">
<div class="modal-header justify-content-center">
<img id="hive-logo" src="{{url_for('static', filename='image/thehive-logo.png') }}" width="500" >
</div>
<div class="modal-body">
<form method="post" action="{{ url_for('PasteSubmit.create_hive_case') }}" target="_blank">
<div class="input clear required">
<label for="EventThreatLevelId">Threat Level</label>
<select name="threat_level_hive" id="EventThreatLevelId" required="required">
<option value="1">High</option>
<option value="2" selected="selected">Medium</option>
<option value="3">Low</option>
</select>
</div>
<div class="input select required">
<label for="TLP">TLP</label>
<select name="hive_tlp" id="hive_tlp" required="required" class="selectpicker">
<option value="0">White</option>
<option value="1">Green</option>
<option value="2" selected="selected">Amber</option>
<option value="3">Red</option>
</select>
</div>
<div class="clear required">
<label for="hive_case_title">Title</label>
<input name="hive_case_title" class="form-control span6" placeholder="Title" type="text" id="hive_case_title" required="required"/>
</div>
<div class="clear required">
<label for="hive_description">Description</label>
<input name="hive_description" class="form-control span6" placeholder="Quick Case Description" type="text" id="hive_description" required="required"/>
</div>
<input type="hidden" id="paste" name="paste" value="{{ dict_item['id'] }}">
</div>
<div class="modal-footer">
<button class="btn btn-primary" type="submit">
<i class="fas fa-plus"></i>
Create Case
</button>
</form>
<button type="button" class="btn btn-default" data-dismiss="modal" >Close</button>
</div>
</div>
</div>
</div>
<script> // TODO: add tags to PASTE or DOMAIN
var ltags;
var ltagsgalaxies;
$.getJSON("{{ url_for('Tags.get_all_tags_taxonomies') }}",
function(data) {
ltags = $('#ltags').tagSuggest({
data: data,
maxDropHeight: 200,
name: 'ltags'
});
});
$.getJSON("{{ url_for('Tags.get_all_tags_galaxy') }}",
function(data) {
ltagsgalaxies = $('#ltagsgalaxies').tagSuggest({
data: data,
maxDropHeight: 200,
name: 'ltagsgalaxies'
});
});
jQuery("#all-tags-taxonomies").click(function(e){
//change input tags list
$.getJSON("{{ url_for('Tags.get_all_tags_taxonomies') }}",
function(data) {
ltags.setData(data)
});
});
jQuery("#all-tags-galaxies").click(function(e){
$.getJSON("{{ url_for('Tags.get_all_tags_galaxy') }}",
function(data) {
ltagsgalaxies.setData(data)
});
});
{% for taxo in modal_add_tags['active_taxonomies'] %}
jQuery("#{{ taxo }}-id{{ loop.index0 }}").click(function(e){
$.getJSON("{{ url_for('Tags.get_tags_taxonomie') }}?taxonomie={{ taxo }}",
function(data) {
ltags.setData(data)
});
});
{% endfor %}
{% for galaxy in modal_add_tags['active_galaxies'] %}
jQuery("#{{ galaxy }}-idgalax{{ loop.index0 }}").click(function(e){
$.getJSON("{{ url_for('Tags.get_tags_galaxy') }}?galaxy={{ galaxy }}",
function(data) {
ltagsgalaxies.setData(data)
});
});
{% endfor %}
function addTags() {
var tags = ltags.getValue()
var tagsgalaxy = ltagsgalaxies.getValue()
window.location.replace("{{ url_for('tags_ui.add_tags') }}?tags=" + tags + "&tagsgalaxies=" + tagsgalaxy + "&object_id={{ modal_add_tags['object_id'] }}&object_type={{ modal_add_tags['object_type'] }}");
}
</script>

View file

@ -86,7 +86,7 @@ function get_html_and_update_modal(event, truemodal) {
button.tooltip(button); button.tooltip(button);
$("#container-show-more").append(button); $("#container-show-more").append(button);
$("#modal_show_min_item_button_show_item").attr('href', '{{ url_for('showsavedpastes.showsavedpaste') }}?paste=' + $(modal).attr('data-path')); $("#modal_show_min_item_button_show_item").attr('href', '{{ url_for('objects_item.showItem') }}?id=' + $(modal).attr('data-path'));
$("#modal_show_min_item_button_show_item").show('fast'); $("#modal_show_min_item_button_show_item").show('fast');
$("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF $("#loading-gif-modal").css("visibility", "hidden"); // Hide the loading GIF
if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed if ($("[data-initsize]").attr('data-initsize') < char_to_display) { // All the content is displayed

View file

@ -0,0 +1,446 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Show Item Object - AIL</title>
<link rel="icon" href="{{ url_for('static', filename='image/ail-icon.png') }}">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="{{ url_for('static', filename='css/bootstrap4.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/font-awesome.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/tags.css') }}" rel="stylesheet" type="text/css" />
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap4.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/tags.js') }}"></script>
<style>
.rotate{
-moz-transition: all 0.1s linear;
-webkit-transition: all 0.1s linear;
transition: all 0.1s linear;
}
.rotate.down{
-moz-transform:rotate(180deg);
-webkit-transform:rotate(180deg);
transform:rotate(180deg);
}
</style>
</head>
<body>
{% include 'nav_bar.html' %}
<div class="card my-2 mx-2">
<div class="card-header bg-dark">
<h3 class="text-white text-center" >{{ dict_item['name'] }}</h3>
</div>
<div class="card-body pb-1">
<table class="table table-condensed">
<thead class="">
<tr>
<th>Date</th>
<th>Source</th>
<th>Encoding</th>
<th>Size (Kb)</th>
<th>Number of lines</th>
<th>Max line length</th>
</tr>
</thead>
<tbody>
<tr>
<td>{{ dict_item['metadata']['date'] }}</td>
<td>{{ dict_item['metadata']['source'] }}</td>
<td>{{ dict_item['metadata']['encoding'] }}</td>
<td>{{ dict_item['metadata']['size'] }}</td>
<td>{{ dict_item['metadata']['lines']['nb'] }}</td>
<td>{{ dict_item['metadata']['lines']['max_length'] }}</td>
</tr>
</tbody>
</table>
<div>
<h5>
<div>
{% include 'modals/edit_tag.html' %}
{% for tag in dict_item['tags'] %}
<button class="btn btn-{{ bootstrap_label[loop.index0 % 5] }}" data-toggle="modal" data-target="#edit_tags_modal"
data-tagid="{{ tag }}" data-objtype="item" data-objid="{{ dict_item['id'] }}">
{{ tag }}
</button>
{% endfor %}
<br>
{% include 'modals/add_tags.html' %}
<button type="button" class="btn btn-light" data-toggle="modal" data-target="#add_tags_modal">
<i class="far fa-plus-square"></i>
</button>
</div>
</h5>
</div>
{% if dict_item['father'] %}
<div class="mt-3">
Father: <a href="{{ url_for('objects_item.showItem')}}?id={{dict_item['father']}}" target="_blank">{{dict_item['father']}}</a>
</div>
{% endif %}
<div class="d-flex flex-row-reverse bd-highlight">
<div>
<a href="{{ url_for('correlation.show_correlation')}}?object_type=paste&correlation_id={{ dict_item['id'] }}&correlation_objects=paste" target="_blank">
<button class="btn btn-lg btn-info"><i class="fas fa-project-diagram"></i> Correlations Graph
</button>
</a>
</div>
<div class="mx-2">
{% with obj_type='item', obj_id=dict_item['id'], obj_lvl=0%}
{% include 'import_export/block_add_user_object_to_export.html' %}
{% endwith %}
</div>
{% if is_hive_connected %}
<div>
{% include 'modals/create_hive_case.html' %}
<button type="button" class="btn btn-lg btn-outline-dark mx-1" data-toggle="modal" data-target="#create_hive_case_modal">
Create
<img id="thehive-icon" src="{{url_for('static', filename='image/thehive_icon.png') }}">
Case
</button>
</div>
{% endif %}
</div>
</div>
</div>
{% if misp_eventid %}
<div class="list-group" id="misp_event">
<li class="list-group-item active">MISP Events already Created</li>
<a target="_blank" href="{{ misp_url }}" class="list-group-item">{{ misp_url }}</a>
</div>
{% endif %}
{% if dict_item['hive_case'] %}
<div class="list-group" id="misp_event">
<li class="list-group-item active">The Hive Case already Created</li>
<a target="_blank" href="{{ hive_url }}" class="list-group-item">{{ hive_url }}</a>
</div>
{% endif %}
{% if dict_item['duplicates'] != 0 %}
<div id="accordionDuplicate" class="mb-2 mx-3">
<div class="card">
<div class="card-header py-1" id="headingDuplicate">
<div class="row">
<div class="col-11">
<div class="mt-2">
<i class="far fa-clone"></i> duplicates&nbsp;&nbsp;
<div class="badge badge-warning">{{dict_item['duplicates']|length}}</div>
</div>
</div>
<div class="col-1">
<button class="btn btn-link btn-lg py-2 float-right rotate" data-toggle="collapse" data-target="#collapseDuplicate" aria-expanded="true" aria-controls="collapseDuplicate">
<i class="fas fa-chevron-circle-down"></i>
</button>
</div>
</div>
</div>
<div id="collapseDuplicate" class="collapse" aria-labelledby="headingDuplicate" data-parent="#accordionDuplicate">
<div class="card-body">
<table class="table" id="tableDup">
<thead class="thead-dark">
<tr>
<th>Date</th>
<th>Similarity</th>
<th>Item</th>
<th>Diff</th>
</tr>
</thead>
<tbody>
{% for duplicate_id in dict_item['duplicates'] %}
<tr>
<td>{{dict_item['duplicates'][duplicate_id]['date']}}</td>
<td class="py-0">
<table class="table table-borderless table-sm my-0">
<tbody>
{%for algo in dict_item['duplicates'][duplicate_id]['algo']|sort()%}
<tr>
<td class="py-0">{{algo}}</td>
<td class="w-100 py-0">
<div class="progress mt-1">
<div class="progress-bar progress-bar-striped {%if algo=='tlsh'%}bg-secondary{%endif%}" role="progressbar" style="width: {{dict_item['duplicates'][duplicate_id]['algo'][algo]}}%;" aria-valuenow="{{dict_item['duplicates'][duplicate_id]['algo'][algo]}}" aria-valuemin="0" aria-valuemax="100">
{{dict_item['duplicates'][duplicate_id]['algo'][algo]}}%
</div>
</div>
</td>
</tr>
{%endfor%}
</tbody>
</table>
</td>
<td>
<a href="{{ url_for('objects_item.showItem')}}?id={{duplicate_id}}" target="_blank">
{{duplicate_id}}
</a>
</td>
<td>
<a target="_blank" href="{{ url_for('showsavedpastes.showDiff') }}?s1={{dict_item['id']}}&s2={{duplicate_id}}" class="fa fa-columns" title="Show diff"></a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
{% endif %}
{% if l_64|length != 0 %}
<div id="accordionDecoded" class="mb-3">
<div class="card">
<div class="card-header py-1" id="headingDecoded">
<div class="row">
<div class="col-11">
<div class="mt-2">
<i class="fas fa-lock-open"></i> Decoded Files&nbsp;&nbsp;
<div class="badge badge-warning">{{l_64|length}}</div>
</div>
</div>
<div class="col-1">
<button class="btn btn-link py-2 float-right rotate" data-toggle="collapse" data-target="#collapseDecoded" aria-expanded="true" aria-controls="collapseDecoded">
<i class="fas fa-chevron-circle-down"></i>
</button>
</div>
</div>
</div>
<div id="collapseDecoded" class="collapse show" aria-labelledby="headingDecoded" data-parent="#accordionDecoded">
<div class="card-body">
<table id="tableb64" class="red_table table table-striped">
<thead>
<tr>
<th>estimated type</th>
<th>hash</th>
</tr>
</thead>
<tbody>
{% for b64 in l_64 %}
<tr>
<td><i class="fas {{ b64[0] }}"></i>&nbsp;&nbsp;{{ b64[1] }}</td>
<td><a target="_blank" href="{{ url_for('correlation.show_correlation') }}?object_type=decoded&correlation_id={{ b64[2] }}&correlation_objects=paste"</a> {{b64[2]}} ({{ b64[4] }})</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
{% endif %}
{% if dict_item['crawler'] %}
<div id="accordionCrawler" class="mb-3 mx-3">
<div class="card">
<div class="card-header py-1" id="headingCrawler">
<div class="row">
<div class="col-11">
<div class="mt-2">
<i class="fas fa-spider"></i> Crawler
</div>
</div>
<div class="col-1">
<button class="btn btn-link btn-lg py-2 float-right rotate down" data-toggle="collapse" data-target="#collapseCrawler" aria-expanded="true" aria-controls="collapseCrawler">
<i class="fas fa-chevron-circle-down"></i>
</button>
</div>
</div>
</div>
<div id="collapseCrawler" class="collapse show" aria-labelledby="headingCrawler" data-parent="#accordionCrawler">
<div class="row mx-0">
<div class="col-md-5">
<div class="my-2">
<table class="table table-sm">
<thead>
<td></td>
<td>Last Origin:</td>
</thead>
<tbody>
<tr>
<td><i class="far fa-file"></i></td>
<td>
<a class="badge" target="_blank" href="{{ url_for('objects_item.showItem', paste=dict_item['father']) }}" />{{ dict_item['father'] }}</a>
</td>
</tr>
<td><i class="fab fa-html5"></i></td>
<td>
<a class="badge" target="_blank" href="{{ url_for('crawler_splash.showDomain', domain=dict_item['crawler']['domain']) }}" />{{ dict_item['crawler']['domain'] }}</a>
</td>
</tr>
<tr>
<td>url</td>
<td>
{{ dict_item['crawler']['url'] }}
</td>
</tr>
</tbody>
</table>
</div>
</div>
<div class="col-md-7">
<div class="card my-2" style="background-color:#ecf0f1;">
<div class="card-body py-2">
<div class="row">
<div class="col-md-8">
<input class="custom-range mt-2" id="blocks" type="range" min="1" max="50" value="{%if dict_item['crawler']['is_tags_safe']%}13{%else%}0{%endif%}">
</div>
<div class="col-md-4">
<button class="btn {%if dict_item['crawler']['is_tags_safe']%}btn-primary{%else%}btn-danger{%endif%}" onclick="blocks.value=50;pixelate();">
{%if dict_item['crawler']['is_tags_safe']%}
<i class="fas fas fa-plus-square"></i>
{%else%}
<i class="fas fa-exclamation-triangle"></i>
{%endif%}
<span class="label-icon">Full resolution</span>
</button>
</div>
</div>
</div>
</div>
<canvas id="canvas" style="width:100%;"></canvas>
</div>
</div>
</div>
</div>
</div>
{% endif %}
<!-- nav-pills nav-justified nav-tabs-->
<div class="card">
<div class="card-body pt-0 py-1 px-2">
<ul class="nav nav-pills" id="pills-tab" role="tablist">
<li class="nav-item">
<a class="nav-link active" id="pills-content-tab" data-toggle="pill" href="#pills-content" role="tab" aria-controls="pills-content" aria-selected="true">Content</a>
</li>
<li class="nav-item">
<a class="nav-link" id="pills-html2text-tab" data-toggle="pill" href="#pills-html2text" role="tab" aria-controls="pills-html2text" aria-selected="false">html2text</a>
</li>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#">Others</a>
<div class="dropdown-menu">
<a class="dropdown-item" href="{{ url_for('objects_item.item_raw_content', id=dict_item['id']) }}"><i class="far fa-file"></i> &nbsp;Raw Content</a>
<a class="dropdown-item" href="{{ url_for('objects_item.item_download', id=dict_item['id']) }}"><i class="fas fa-download"></i> &nbsp;Download</i></a>
</div>
</li>
</ul>
<div class="tab-content" id="pills-tabContent">
<div class="tab-pane fade show active" id="pills-content" role="tabpanel" aria-labelledby="pills-content-tab">
<p class="my-0"> <pre class="border">{{ dict_item['content'] }}</pre></p>
</div>
<div class="tab-pane fade" id="pills-html2text" role="tabpanel" aria-labelledby="pills-html2text-tab">
<p class="my-0"> <pre id="html2text-container" class="border"></pre></p>
</div>
</div>
</div>
</div>
<script>
var ltags
var ltagsgalaxies
$(document).ready(function(){
$('#tableDup').DataTable();
// $('#tableb64').DataTable({
// "aLengthMenu": [[5, 10, 15, -1], [5, 10, 15, "All"]],
// "iDisplayLength": 5,
// "order": [[ 1, "asc" ]]
// });
$(".rotate").click(function(){
$(this).toggleClass("down") ;
})
});
$('#pills-html2text-tab').on('shown.bs.tab', function (e) {
if ($('#html2text-container').is(':empty')){
$.get("{{ url_for('objects_item.html2text') }}?id={{ dict_item['id'] }}").done(function(data){
$('#html2text-container').text(data);
});
}
});
</script>
{% if dict_item['crawler'] %}
<script>
var ctx = canvas.getContext('2d'), img = new Image();
/// turn off image smoothing
ctx.webkitImageSmoothingEnabled = false;
ctx.imageSmoothingEnabled = false;
img.onload = pixelate;
img.addEventListener("error", img_error);
var draw_img = false;
img.src = "{{ url_for('showsavedpastes.screenshot', filename=dict_item['crawler']['screenshot']) }}";
function pixelate() {
/// use slider value
if( blocks.value == 50 ){
size = 1;
} else {
var size = (blocks.value) * 0.01;
}
canvas.width = img.width;
canvas.height = img.height;
/// cache scaled width and height
w = canvas.width * size;
h = canvas.height * size;
/// draw original image to the scaled size
ctx.drawImage(img, 0, 0, w, h);
/// pixelated
ctx.drawImage(canvas, 0, 0, w, h, 0, 0, canvas.width, canvas.height);
}
function img_error() {
img.onerror=null;
img.src="{{ url_for('static', filename='image/AIL.png') }}";
blocks.value = 50;
pixelate;
}
blocks.addEventListener('change', pixelate, false);
</script>
{% endif %}
</body>
</html>

View file

@ -129,7 +129,7 @@
<tr> <tr>
<td class="pb-0">{{ dict_obj['date'] }}</td> <td class="pb-0">{{ dict_obj['date'] }}</td>
<td class="pb-0"> <td class="pb-0">
<a target="_blank" href="{{ url_for('showsavedpastes.showsavedpaste') }}?paste={{dict_obj['id']}}" class="text-secondary"> <a target="_blank" href="{{ url_for('objects_item.showItem') }}?id={{dict_obj['id']}}" class="text-secondary">
<div style="line-height:0.9;">{{ dict_obj['id'] }}</div> <div style="line-height:0.9;">{{ dict_obj['id'] }}</div>
</a> </a>
<div class="mb-2"> <div class="mb-2">

View file

@ -91,9 +91,7 @@ wget -q https://raw.githubusercontent.com/flot/flot/958e5fd43c6dff4bab3e1fd5cb61
#Ressources for sparkline and canvasJS and slider #Ressources for sparkline and canvasJS and slider
wget -q http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js wget -q http://omnipotent.net/jquery.sparkline/2.1.2/jquery.sparkline.min.js -O ./static/js/jquery.sparkline.min.js
wget -q http://canvasjs.com/fdm/chart/ -O temp/canvasjs.zip wget -q https://canvasjs.com/assets/script/canvasjs.min.js -O ./static/js/jquery.canvasjs.min.js
unzip -qq temp/canvasjs.zip -d temp/
mv temp/Chart\ 2.3.2\ GA\ -\ Stable/jquery.canvasjs.min.js ./static/js/jquery.canvasjs.min.js
wget -q https://jqueryui.com/resources/download/jquery-ui-1.12.1.zip -O temp/jquery-ui.zip wget -q https://jqueryui.com/resources/download/jquery-ui-1.12.1.zip -O temp/jquery-ui.zip
unzip -qq temp/jquery-ui.zip -d temp/ unzip -qq temp/jquery-ui.zip -d temp/