mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
chg: [Tags alertHandler] remove alertHandler + add quick tag search sidebar + use tag last_seen to display tagged items
This commit is contained in:
parent
ce6475e96e
commit
c2bc72c72e
19 changed files with 199 additions and 299 deletions
|
@ -57,8 +57,6 @@ def search_api_key(message):
|
||||||
msg = 'infoleak:automatic-detection="api-key";{}'.format(filename)
|
msg = 'infoleak:automatic-detection="api-key";{}'.format(filename)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
||||||
msg = 'apikey;{}'.format(filename)
|
|
||||||
p.populate_set_out(msg, 'alertHandler')
|
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(filename, 'Duplicate')
|
p.populate_set_out(filename, 'Duplicate')
|
||||||
|
|
||||||
|
@ -86,7 +84,7 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
if message is not None:
|
if message is not None:
|
||||||
|
|
||||||
search_api_key(message)
|
search_api_key(message)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
publisher.debug("Script ApiKey is Idling 10s")
|
publisher.debug("Script ApiKey is Idling 10s")
|
||||||
|
|
|
@ -32,7 +32,7 @@ def decode_base58(bc, length):
|
||||||
for char in bc:
|
for char in bc:
|
||||||
n = n * 58 + digits58.index(char)
|
n = n * 58 + digits58.index(char)
|
||||||
return n.to_bytes(length, 'big')
|
return n.to_bytes(length, 'big')
|
||||||
|
|
||||||
def check_bc(bc):
|
def check_bc(bc):
|
||||||
try:
|
try:
|
||||||
bcbytes = decode_base58(bc, 25)
|
bcbytes = decode_base58(bc, 25)
|
||||||
|
@ -62,8 +62,6 @@ def search_key(content, message, paste):
|
||||||
to_print = 'Bitcoin found: {} address and {} private Keys'.format(len(bitcoin_address), len(bitcoin_private_key))
|
to_print = 'Bitcoin found: {} address and {} private Keys'.format(len(bitcoin_address), len(bitcoin_private_key))
|
||||||
print(to_print)
|
print(to_print)
|
||||||
publisher.warning(to_print)
|
publisher.warning(to_print)
|
||||||
msg = ('bitcoin;{}'.format(message))
|
|
||||||
p.populate_set_out( msg, 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="bitcoin-address";{}'.format(message)
|
msg = 'infoleak:automatic-detection="bitcoin-address";{}'.format(message)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -107,9 +107,6 @@ if __name__ == "__main__":
|
||||||
publisher.warning(to_print)
|
publisher.warning(to_print)
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(filepath, 'Duplicate')
|
p.populate_set_out(filepath, 'Duplicate')
|
||||||
#Send to alertHandler
|
|
||||||
msg = 'credential;{}'.format(filepath)
|
|
||||||
p.populate_set_out(msg, 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="credential";{}'.format(filepath)
|
msg = 'infoleak:automatic-detection="credential";{}'.format(filepath)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -82,9 +82,6 @@ if __name__ == "__main__":
|
||||||
to_print, len(creditcard_set), paste.p_path))
|
to_print, len(creditcard_set), paste.p_path))
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(filename, 'Duplicate')
|
p.populate_set_out(filename, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
|
||||||
msg = 'creditcard;{}'.format(filename)
|
|
||||||
p.populate_set_out(msg, 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="credit-card";{}'.format(filename)
|
msg = 'infoleak:automatic-detection="credit-card";{}'.format(filename)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -31,10 +31,6 @@ def search_cve(message):
|
||||||
print('{} contains CVEs'.format(paste.p_name))
|
print('{} contains CVEs'.format(paste.p_name))
|
||||||
publisher.warning('{} contains CVEs'.format(paste.p_name))
|
publisher.warning('{} contains CVEs'.format(paste.p_name))
|
||||||
|
|
||||||
#send to Browse_warning_paste
|
|
||||||
msg = 'cve;{}'.format(filepath)
|
|
||||||
p.populate_set_out(msg, 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="cve";{}'.format(filepath)
|
msg = 'infoleak:automatic-detection="cve";{}'.format(filepath)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
|
|
|
@ -147,9 +147,6 @@ def set_out_paste(decoder_name, message):
|
||||||
publisher.warning(decoder_name+' decoded')
|
publisher.warning(decoder_name+' decoded')
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(message, 'Duplicate')
|
p.populate_set_out(message, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
|
||||||
msg = (decoder_name+';{}'.format(message))
|
|
||||||
p.populate_set_out( msg, 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="'+decoder_name+'";{}'.format(message)
|
msg = 'infoleak:automatic-detection="'+decoder_name+'";{}'.format(message)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -112,10 +112,7 @@ def search_key(paste):
|
||||||
|
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(message, 'Duplicate')
|
p.populate_set_out(message, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
|
||||||
msg = ('keys;{}'.format(message))
|
|
||||||
print(message)
|
print(message)
|
||||||
p.populate_set_out( msg, 'alertHandler')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -206,8 +206,6 @@ function launching_scripts {
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./LibInjection.py; read x"
|
screen -S "Script_AIL" -X screen -t "LibInjection" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./LibInjection.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "alertHandler" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./alertHandler.py; read x"
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x"
|
screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./MISP_The_Hive_feeder.py; read x"
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
screen -S "Script_AIL" -X screen -t "Tags" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tags.py; read x"
|
screen -S "Script_AIL" -X screen -t "Tags" bash -c "cd ${AIL_BIN}; ${ENV_PY} ./Tags.py; read x"
|
||||||
|
|
|
@ -51,8 +51,7 @@ def analyse(url, path):
|
||||||
publisher.warning(to_print)
|
publisher.warning(to_print)
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(path, 'Duplicate')
|
p.populate_set_out(path, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
|
||||||
p.populate_set_out('sqlinjection;{}'.format(path), 'alertHandler')
|
|
||||||
msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path)
|
msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,6 @@ if __name__ == "__main__":
|
||||||
publisher.warning(to_print)
|
publisher.warning(to_print)
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(filename, 'Duplicate')
|
p.populate_set_out(filename, 'Duplicate')
|
||||||
p.populate_set_out('mail;{}'.format(filename), 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="mail";{}'.format(filename)
|
msg = 'infoleak:automatic-detection="mail";{}'.format(filename)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -235,7 +235,6 @@ if __name__ == "__main__":
|
||||||
else:
|
else:
|
||||||
for url in fetch(p, r_cache, urls, domains_list, path):
|
for url in fetch(p, r_cache, urls, domains_list, path):
|
||||||
publisher.info('{}Checked {};{}'.format(to_print, url, PST.p_path))
|
publisher.info('{}Checked {};{}'.format(to_print, url, PST.p_path))
|
||||||
#p.populate_set_out('onion;{}'.format(PST.p_path), 'alertHandler')
|
|
||||||
|
|
||||||
# TAG Item
|
# TAG Item
|
||||||
msg = 'infoleak:automatic-detection="onion";{}'.format(PST.p_path)
|
msg = 'infoleak:automatic-detection="onion";{}'.format(PST.p_path)
|
||||||
|
|
|
@ -32,14 +32,11 @@ def search_phone(message):
|
||||||
if len(results) > 4:
|
if len(results) > 4:
|
||||||
print(results)
|
print(results)
|
||||||
publisher.warning('{} contains PID (phone numbers)'.format(paste.p_name))
|
publisher.warning('{} contains PID (phone numbers)'.format(paste.p_name))
|
||||||
#send to Browse_warning_paste
|
|
||||||
msg = 'phone;{}'.format(message)
|
|
||||||
p.populate_set_out(msg, 'alertHandler')
|
|
||||||
#Send to duplicate
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="phone-number";{}'.format(message)
|
msg = 'infoleak:automatic-detection="phone-number";{}'.format(message)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
||||||
|
#Send to duplicate
|
||||||
p.populate_set_out(message, 'Duplicate')
|
p.populate_set_out(message, 'Duplicate')
|
||||||
stats = {}
|
stats = {}
|
||||||
for phone_number in results:
|
for phone_number in results:
|
||||||
|
|
|
@ -82,8 +82,6 @@ def analyse(url, path):
|
||||||
publisher.warning(to_print)
|
publisher.warning(to_print)
|
||||||
#Send to duplicate
|
#Send to duplicate
|
||||||
p.populate_set_out(path, 'Duplicate')
|
p.populate_set_out(path, 'Duplicate')
|
||||||
#send to Browse_warning_paste
|
|
||||||
p.populate_set_out('sqlinjection;{}'.format(path), 'alertHandler')
|
|
||||||
|
|
||||||
msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path)
|
msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path)
|
||||||
p.populate_set_out(msg, 'Tags')
|
p.populate_set_out(msg, 'Tags')
|
||||||
|
|
|
@ -1,63 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*-coding:UTF-8 -*
|
|
||||||
|
|
||||||
"""
|
|
||||||
The Browse_warning_paste module
|
|
||||||
====================
|
|
||||||
|
|
||||||
This module saved signaled paste (logged as 'warning') in redis for further usage
|
|
||||||
like browsing by category
|
|
||||||
|
|
||||||
Its input comes from other modules, namely:
|
|
||||||
Credential, CreditCard, SQLinjection, CVE, Keys, Mail and Phone
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import redis
|
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from packages import Paste
|
|
||||||
from pubsublogger import publisher
|
|
||||||
from Helper import Process
|
|
||||||
|
|
||||||
import sys
|
|
||||||
sys.path.append('../')
|
|
||||||
|
|
||||||
flag_misp = False
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
publisher.port = 6380
|
|
||||||
publisher.channel = "Script"
|
|
||||||
|
|
||||||
config_section = 'alertHandler'
|
|
||||||
|
|
||||||
p = Process(config_section)
|
|
||||||
|
|
||||||
# port generated automatically depending on the date
|
|
||||||
curYear = datetime.now().year
|
|
||||||
server = redis.StrictRedis(
|
|
||||||
host=p.config.get("ARDB_DB", "host"),
|
|
||||||
port=p.config.get("ARDB_DB", "port"),
|
|
||||||
db=curYear,
|
|
||||||
decode_responses=True)
|
|
||||||
|
|
||||||
# FUNCTIONS #
|
|
||||||
publisher.info("Script duplicate started")
|
|
||||||
|
|
||||||
while True:
|
|
||||||
message = p.get_from_set()
|
|
||||||
if message is not None:
|
|
||||||
module_name, p_path = message.split(';')
|
|
||||||
print("new alert : {}".format(module_name))
|
|
||||||
#PST = Paste.Paste(p_path)
|
|
||||||
else:
|
|
||||||
publisher.debug("Script Attribute is idling 10s")
|
|
||||||
time.sleep(10)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Add in redis for browseWarningPaste
|
|
||||||
# Format in set: WARNING_moduleName -> p_path
|
|
||||||
key = "WARNING_" + module_name
|
|
||||||
server.sadd(key, p_path)
|
|
||||||
|
|
||||||
publisher.info('Saved warning paste {}'.format(p_path))
|
|
|
@ -1,77 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -x
|
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_LEVELDB" ] && echo "Needs the env var AIL_LEVELDB. Run the script from the virtual environment." && exit 1;
|
|
||||||
|
|
||||||
echo -e "\t* Checking configuration"
|
|
||||||
bash -c "./Update-conf.py"
|
|
||||||
exitStatus=$?
|
|
||||||
if [ $exitStatus -ge 1 ]; then
|
|
||||||
echo -e $RED"\t* Configuration not up-to-date"$DEFAULT
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT
|
|
||||||
|
|
||||||
screen -dmS "Script"
|
|
||||||
sleep 0.1
|
|
||||||
echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT
|
|
||||||
|
|
||||||
screen -S "Script" -X screen -t "ModuleInformation" bash -c './ModulesInformationV2.py -k 0 -c 1; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Mixer" bash -c './Mixer.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Global" bash -c './Global.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Duplicates" bash -c './Duplicates.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Attributes" bash -c './Attributes.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Lines" bash -c './Lines.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Categ" bash -c './Categ.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Tokenize" bash -c './Tokenize.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "CreditCards" bash -c './CreditCards.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Onion" bash -c './Onion.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Mail" bash -c './Mail.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Web" bash -c './Web.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Credential" bash -c './Credential.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Curve" bash -c './Curve.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "RegexForTermsFrequency" bash -c './RegexForTermsFrequency.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "SetForTermsFrequency" bash -c './SetForTermsFrequency.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Indexer" bash -c './Indexer.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Keys" bash -c './Keys.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Phone" bash -c './Phone.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Release" bash -c './Release.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "Cve" bash -c './Cve.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "WebStats" bash -c './WebStats.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "alertHandler" bash -c './alertHandler.py; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Script" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
|
|
|
@ -51,7 +51,7 @@ publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Re
|
||||||
|
|
||||||
[CreditCards]
|
[CreditCards]
|
||||||
subscribe = Redis_CreditCards
|
subscribe = Redis_CreditCards
|
||||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_ModuleStats,Redis_Tags
|
||||||
|
|
||||||
[BankAccount]
|
[BankAccount]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
|
@ -59,12 +59,12 @@ publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[Mail]
|
[Mail]
|
||||||
subscribe = Redis_Mail
|
subscribe = Redis_Mail
|
||||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_ModuleStats,Redis_Tags
|
||||||
|
|
||||||
[Onion]
|
[Onion]
|
||||||
subscribe = Redis_Onion
|
subscribe = Redis_Onion
|
||||||
publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler,Redis_Tags,Redis_Crawler
|
publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_Tags,Redis_Crawler
|
||||||
#publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler
|
#publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion
|
||||||
|
|
||||||
[DumpValidOnion]
|
[DumpValidOnion]
|
||||||
subscribe = Redis_ValidOnion
|
subscribe = Redis_ValidOnion
|
||||||
|
@ -78,18 +78,15 @@ subscribe = Redis_Url
|
||||||
|
|
||||||
[LibInjection]
|
[LibInjection]
|
||||||
subscribe = Redis_Url
|
subscribe = Redis_Url
|
||||||
publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[SQLInjectionDetection]
|
[SQLInjectionDetection]
|
||||||
subscribe = Redis_Url
|
subscribe = Redis_Url
|
||||||
publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[ModuleStats]
|
[ModuleStats]
|
||||||
subscribe = Redis_ModuleStats
|
subscribe = Redis_ModuleStats
|
||||||
|
|
||||||
[alertHandler]
|
|
||||||
subscribe = Redis_alertHandler
|
|
||||||
|
|
||||||
[Tags]
|
[Tags]
|
||||||
subscribe = Redis_Tags
|
subscribe = Redis_Tags
|
||||||
publish = Redis_Tags_feed
|
publish = Redis_Tags_feed
|
||||||
|
@ -99,7 +96,7 @@ subscribe = Redis_Tags_feed
|
||||||
|
|
||||||
#[send_to_queue]
|
#[send_to_queue]
|
||||||
#subscribe = Redis_Cve
|
#subscribe = Redis_Cve
|
||||||
#publish = Redis_alertHandler,Redis_Tags
|
#publish = Redis_Tags
|
||||||
|
|
||||||
[SentimentAnalysis]
|
[SentimentAnalysis]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
|
@ -109,31 +106,31 @@ subscribe = Redis_Global
|
||||||
|
|
||||||
[Credential]
|
[Credential]
|
||||||
subscribe = Redis_Credential
|
subscribe = Redis_Credential
|
||||||
publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_ModuleStats,Redis_Tags
|
||||||
|
|
||||||
[Cve]
|
[Cve]
|
||||||
subscribe = Redis_Cve
|
subscribe = Redis_Cve
|
||||||
publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[Phone]
|
[Phone]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[Keys]
|
[Keys]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[ApiKey]
|
[ApiKey]
|
||||||
subscribe = Redis_ApiKey
|
subscribe = Redis_ApiKey
|
||||||
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[Decoder]
|
[Decoder]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[Bitcoin]
|
[Bitcoin]
|
||||||
subscribe = Redis_Global
|
subscribe = Redis_Global
|
||||||
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
|
publish = Redis_Duplicate,Redis_Tags
|
||||||
|
|
||||||
[submit_paste]
|
[submit_paste]
|
||||||
subscribe = Redis
|
subscribe = Redis
|
||||||
|
@ -142,4 +139,3 @@ publish = Redis_Mixer
|
||||||
[Crawler]
|
[Crawler]
|
||||||
subscribe = Redis_Crawler
|
subscribe = Redis_Crawler
|
||||||
publish = Redis_Mixer,Redis_Tags
|
publish = Redis_Mixer,Redis_Tags
|
||||||
|
|
||||||
|
|
|
@ -118,6 +118,16 @@ def get_all_dates_range(date_from, date_to):
|
||||||
all_dates['date_range'] = date_range
|
all_dates['date_range'] = date_range
|
||||||
return all_dates
|
return all_dates
|
||||||
|
|
||||||
|
def get_last_seen_from_tags_list(list_tags):
|
||||||
|
min_last_seen = 99999999
|
||||||
|
for tag in list_tags:
|
||||||
|
tag_last_seen = r_serv_tags.hget('tag_metadata:{}'.format(tag), 'last_seen')
|
||||||
|
if tag_last_seen:
|
||||||
|
tag_last_seen = int(tag_last_seen)
|
||||||
|
if tag_last_seen < min_last_seen:
|
||||||
|
min_last_seen = tag_last_seen
|
||||||
|
return str(min_last_seen)
|
||||||
|
|
||||||
def add_item_tag(tag, item_path):
|
def add_item_tag(tag, item_path):
|
||||||
item_date = int(get_item_date(item_path))
|
item_date = int(get_item_date(item_path))
|
||||||
|
|
||||||
|
@ -211,123 +221,133 @@ def update_tag_last_seen(tag, tag_first_seen, tag_last_seen):
|
||||||
def Tags_page():
|
def Tags_page():
|
||||||
date_from = request.args.get('date_from')
|
date_from = request.args.get('date_from')
|
||||||
date_to = request.args.get('date_to')
|
date_to = request.args.get('date_to')
|
||||||
|
tags = request.args.get('ltags')
|
||||||
|
|
||||||
|
if tags is None:
|
||||||
|
dates = get_all_dates_range(date_from, date_to)
|
||||||
|
return render_template("Tags.html", date_from=dates['date_from'], date_to=dates['date_to'])
|
||||||
|
|
||||||
|
# unpack tags
|
||||||
|
list_tags = tags.split(',')
|
||||||
|
list_tag = []
|
||||||
|
for tag in list_tags:
|
||||||
|
list_tag.append(tag.replace('"','\"'))
|
||||||
|
|
||||||
|
#no search by date, use last_seen for date_from/date_to
|
||||||
|
if date_from is None and date_to is None and tags is not None:
|
||||||
|
date_from = get_last_seen_from_tags_list(list_tags)
|
||||||
|
date_to = date_from
|
||||||
|
|
||||||
|
# TODO verify input
|
||||||
|
|
||||||
dates = get_all_dates_range(date_from, date_to)
|
dates = get_all_dates_range(date_from, date_to)
|
||||||
|
|
||||||
tags = request.args.get('ltags')
|
if(type(list_tags) is list):
|
||||||
if tags is None:
|
# no tag
|
||||||
return render_template("Tags.html", date_from=dates['date_from'], date_to=dates['date_to'])
|
if list_tags is False:
|
||||||
|
print('empty')
|
||||||
|
# 1 tag
|
||||||
|
elif len(list_tags) < 2:
|
||||||
|
tagged_pastes = []
|
||||||
|
for date in dates['date_range']:
|
||||||
|
tagged_pastes.extend(r_serv_tags.smembers('{}:{}'.format(list_tags[0], date)))
|
||||||
|
|
||||||
else:
|
# 2 tags or more
|
||||||
tags = request.args.get('ltags')
|
|
||||||
|
|
||||||
list_tags = tags.split(',')
|
|
||||||
list_tag = []
|
|
||||||
for tag in list_tags:
|
|
||||||
list_tag.append(tag.replace('"','\"'))
|
|
||||||
|
|
||||||
# TODO verify input
|
|
||||||
|
|
||||||
if(type(list_tags) is list):
|
|
||||||
# no tag
|
|
||||||
if list_tags is False:
|
|
||||||
print('empty')
|
|
||||||
# 1 tag
|
|
||||||
elif len(list_tags) < 2:
|
|
||||||
tagged_pastes = []
|
|
||||||
for date in dates['date_range']:
|
|
||||||
tagged_pastes.extend(r_serv_tags.smembers('{}:{}'.format(list_tags[0], date)))
|
|
||||||
|
|
||||||
# 2 tags or more
|
|
||||||
else:
|
|
||||||
tagged_pastes = []
|
|
||||||
for date in dates['date_range']:
|
|
||||||
tag_keys = []
|
|
||||||
for tag in list_tags:
|
|
||||||
tag_keys.append('{}:{}'.format(tag, date))
|
|
||||||
|
|
||||||
if len(tag_keys) > 1:
|
|
||||||
daily_items = r_serv_tags.sinter(tag_keys[0], *tag_keys[1:])
|
|
||||||
else:
|
|
||||||
daily_items = r_serv_tags.sinter(tag_keys[0])
|
|
||||||
tagged_pastes.extend(daily_items)
|
|
||||||
|
|
||||||
else :
|
|
||||||
return 'INCORRECT INPUT'
|
|
||||||
|
|
||||||
all_content = []
|
|
||||||
paste_date = []
|
|
||||||
paste_linenum = []
|
|
||||||
all_path = []
|
|
||||||
allPastes = list(tagged_pastes)
|
|
||||||
paste_tags = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
page = int(request.args.get('page'))
|
|
||||||
except:
|
|
||||||
page = 1
|
|
||||||
if page <= 0:
|
|
||||||
page = 1
|
|
||||||
nb_page_max = len(tagged_pastes)/(max_tags_result)
|
|
||||||
if not nb_page_max.is_integer():
|
|
||||||
nb_page_max = int(nb_page_max)+1
|
|
||||||
else:
|
else:
|
||||||
nb_page_max = int(nb_page_max)
|
tagged_pastes = []
|
||||||
if page > nb_page_max:
|
for date in dates['date_range']:
|
||||||
page = nb_page_max
|
tag_keys = []
|
||||||
start = max_tags_result*(page -1)
|
for tag in list_tags:
|
||||||
stop = max_tags_result*page
|
tag_keys.append('{}:{}'.format(tag, date))
|
||||||
|
|
||||||
for path in allPastes[start:stop]: ######################moduleName
|
if len(tag_keys) > 1:
|
||||||
all_path.append(path)
|
daily_items = r_serv_tags.sinter(tag_keys[0], *tag_keys[1:])
|
||||||
paste = Paste.Paste(path)
|
|
||||||
content = paste.get_p_content()
|
|
||||||
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
|
||||||
all_content.append(content[0:content_range].replace("\"", "\'").replace("\r", " ").replace("\n", " "))
|
|
||||||
curr_date = str(paste._get_p_date())
|
|
||||||
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
|
|
||||||
paste_date.append(curr_date)
|
|
||||||
paste_linenum.append(paste.get_lines_info()[0])
|
|
||||||
p_tags = r_serv_metadata.smembers('tag:'+path)
|
|
||||||
complete_tags = []
|
|
||||||
l_tags = []
|
|
||||||
for tag in p_tags:
|
|
||||||
complete_tag = tag
|
|
||||||
|
|
||||||
tag = tag.split('=')
|
|
||||||
if len(tag) > 1:
|
|
||||||
if tag[1] != '':
|
|
||||||
tag = tag[1][1:-1]
|
|
||||||
# no value
|
|
||||||
else:
|
|
||||||
tag = tag[0][1:-1]
|
|
||||||
# use for custom tags
|
|
||||||
else:
|
else:
|
||||||
tag = tag[0]
|
daily_items = r_serv_tags.sinter(tag_keys[0])
|
||||||
|
tagged_pastes.extend(daily_items)
|
||||||
|
|
||||||
l_tags.append( (tag,complete_tag) )
|
else :
|
||||||
|
return 'INCORRECT INPUT'
|
||||||
|
|
||||||
paste_tags.append(l_tags)
|
all_content = []
|
||||||
|
paste_date = []
|
||||||
|
paste_linenum = []
|
||||||
|
all_path = []
|
||||||
|
allPastes = list(tagged_pastes)
|
||||||
|
paste_tags = []
|
||||||
|
|
||||||
if len(allPastes) > 10:
|
try:
|
||||||
finished = False
|
page = int(request.args.get('page'))
|
||||||
else:
|
except:
|
||||||
finished = True
|
page = 1
|
||||||
|
if page <= 0:
|
||||||
|
page = 1
|
||||||
|
nb_page_max = len(tagged_pastes)/(max_tags_result)
|
||||||
|
if not nb_page_max.is_integer():
|
||||||
|
nb_page_max = int(nb_page_max)+1
|
||||||
|
else:
|
||||||
|
nb_page_max = int(nb_page_max)
|
||||||
|
if page > nb_page_max:
|
||||||
|
page = nb_page_max
|
||||||
|
start = max_tags_result*(page -1)
|
||||||
|
stop = max_tags_result*page
|
||||||
|
|
||||||
return render_template("Tags.html",
|
for path in allPastes[start:stop]:
|
||||||
all_path=all_path,
|
all_path.append(path)
|
||||||
tags=tags,
|
paste = Paste.Paste(path)
|
||||||
list_tag = list_tag,
|
content = paste.get_p_content()
|
||||||
date_from=dates['date_from'],
|
content_range = max_preview_char if len(content)>max_preview_char else len(content)-1
|
||||||
date_to=dates['date_to'],
|
all_content.append(content[0:content_range].replace("\"", "\'").replace("\r", " ").replace("\n", " "))
|
||||||
page=page, nb_page_max=nb_page_max,
|
curr_date = str(paste._get_p_date())
|
||||||
paste_tags=paste_tags,
|
curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:]
|
||||||
bootstrap_label=bootstrap_label,
|
paste_date.append(curr_date)
|
||||||
content=all_content,
|
paste_linenum.append(paste.get_lines_info()[0])
|
||||||
paste_date=paste_date,
|
p_tags = r_serv_metadata.smembers('tag:'+path)
|
||||||
paste_linenum=paste_linenum,
|
complete_tags = []
|
||||||
char_to_display=max_preview_modal,
|
l_tags = []
|
||||||
finished=finished)
|
for tag in p_tags:
|
||||||
|
complete_tag = tag
|
||||||
|
|
||||||
|
tag = tag.split('=')
|
||||||
|
if len(tag) > 1:
|
||||||
|
if tag[1] != '':
|
||||||
|
tag = tag[1][1:-1]
|
||||||
|
# no value
|
||||||
|
else:
|
||||||
|
tag = tag[0][1:-1]
|
||||||
|
# use for custom tags
|
||||||
|
else:
|
||||||
|
tag = tag[0]
|
||||||
|
|
||||||
|
l_tags.append( (tag,complete_tag) )
|
||||||
|
|
||||||
|
paste_tags.append(l_tags)
|
||||||
|
|
||||||
|
if len(allPastes) > 10:
|
||||||
|
finished = False
|
||||||
|
else:
|
||||||
|
finished = True
|
||||||
|
|
||||||
|
if len(list_tag) == 1:
|
||||||
|
tag_nav=tags.replace('"', '').replace('=', '').replace(':', '')
|
||||||
|
else:
|
||||||
|
tag_nav='empty'
|
||||||
|
|
||||||
|
return render_template("Tags.html",
|
||||||
|
all_path=all_path,
|
||||||
|
tags=tags,
|
||||||
|
tag_nav=tag_nav,
|
||||||
|
list_tag = list_tag,
|
||||||
|
date_from=dates['date_from'],
|
||||||
|
date_to=dates['date_to'],
|
||||||
|
page=page, nb_page_max=nb_page_max,
|
||||||
|
paste_tags=paste_tags,
|
||||||
|
bootstrap_label=bootstrap_label,
|
||||||
|
content=all_content,
|
||||||
|
paste_date=paste_date,
|
||||||
|
paste_linenum=paste_linenum,
|
||||||
|
char_to_display=max_preview_modal,
|
||||||
|
finished=finished)
|
||||||
|
|
||||||
|
|
||||||
@Tags.route("/Tags/get_all_tags")
|
@Tags.route("/Tags/get_all_tags")
|
||||||
|
|
|
@ -217,7 +217,8 @@ var last_clicked_paste;
|
||||||
var can_change_modal_content = true;
|
var can_change_modal_content = true;
|
||||||
|
|
||||||
$(document).ready(function(){
|
$(document).ready(function(){
|
||||||
$("#page-Browse-Items").addClass("active");
|
$("#nav_quick_search").removeClass("text-muted");
|
||||||
|
$("#nav_tag_{{tag_nav}}").addClass("active");
|
||||||
search_table = $('#myTable_').DataTable({ "order": [[ 0, "asc" ]] });
|
search_table = $('#myTable_').DataTable({ "order": [[ 0, "asc" ]] });
|
||||||
|
|
||||||
// Use to bind the button with the new displayed data
|
// Use to bind the button with the new displayed data
|
||||||
|
|
|
@ -34,5 +34,58 @@
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
<h5 class="d-flex text-muted w-100" id="nav_quick_search">
|
||||||
|
<span>Quick Search </span>
|
||||||
|
</h5>
|
||||||
|
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100">
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="credential"' id='nav_tag_infoleakautomatic-detectioncredential'>
|
||||||
|
<i class="fas fa-unlock-alt"></i>
|
||||||
|
Credentials
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="credit-card"' id='nav_tag_infoleakautomatic-detectioncredit-card'>
|
||||||
|
<i class="far fa-credit-card"></i>
|
||||||
|
Credit cards
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="mail"' id='nav_tag_infoleakautomatic-detectionmail'>
|
||||||
|
<i class="fas fa-envelope"></i>
|
||||||
|
Mails
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="cve"' id='nav_tag_infoleakautomatic-detectioncve'>
|
||||||
|
<i class="fas fa-bug"></i>
|
||||||
|
CVEs
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="onion"' id='nav_tag_infoleakautomatic-detectiononion'>
|
||||||
|
<i class="fas fa-user-secret"></i>
|
||||||
|
Onions
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="bitcoin-address"' id='nav_tag_infoleakautomatic-detectionbitcoin-address'>
|
||||||
|
<i class="fab fa-bitcoin"></i>
|
||||||
|
Bitcoin
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="base64"' id='nav_tag_infoleakautomatic-detectionbase64'>
|
||||||
|
<i class="fas fa-lock-open"></i>
|
||||||
|
Base64
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href='{{url_for('Tags.Tags_page')}}?ltags=infoleak:automatic-detection="phone-number"' id='nav_tag_infoleakautomatic-detectionphone-number'>
|
||||||
|
<i class="fas fa-phone"></i>
|
||||||
|
Phones
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
</nav>
|
</nav>
|
||||||
</div>
|
</div>
|
||||||
|
|
Loading…
Reference in a new issue