add Base64 module

This commit is contained in:
Terrtia 2018-04-20 10:48:44 +02:00
parent 41eb250278
commit 3fc48db903
4 changed files with 105 additions and 1 deletions

91
bin/Base64.py Executable file
View file

@ -0,0 +1,91 @@
#!/usr/bin/env python3.5
# -*-coding:UTF-8 -*
"""
Base64 module
Dectect Base64, decode it and send to XXX for reprocess
"""
import time
import os
from pubsublogger import publisher
from Helper import Process
from packages import Paste
import re
import base64
from hashlib import sha1
import magic
def search_base64(content):
base64_list = re.findall(regex_base64, content)
if(len(base64_list) > 0):
for b64 in base64_list:
if len(b64) >= 40 :
decode = base64.b64decode(b64)
type = magic.from_buffer(decode, mime=True)
#print(type)
#print(decode)
save_base64_as_file(decode, type)
def save_base64_as_file(decode, type):
hash = sha1(decode).hexdigest()
filename = os.path.join(os.environ['AIL_HOME'],
p.config.get("Directories", "base64"), type, hash[:2], hash)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as f:
f.write(decode)
if __name__ == '__main__':
# If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh)
# Port of the redis instance used by pubsublogger
publisher.port = 6380
# Script is the default channel used for the modules.
publisher.channel = 'Script'
# Section name in bin/packages/modules.cfg
config_section = 'Base64'
# Setup the I/O queues
p = Process(config_section)
# Sent to the logging a description of the module
publisher.info("Base64 started")
regex_base64 = '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)'
re.compile(regex_base64)
# Endless loop getting messages from the input queue
while True:
# Get one message from the input queue
message = p.get_from_set()
if message is None:
publisher.debug("{} queue is empty, waiting".format(config_section))
time.sleep(1)
continue
# Do something with the message from the queue
filename = message
paste = Paste.Paste(filename)
content = paste.get_p_content()
#print(filename)
search_base64(content)
# (Optional) Send that thing to the next queue
#p.populate_set_out(something_has_been_done)

View file

@ -142,6 +142,8 @@ function launching_scripts {
sleep 0.1
screen -S "Script_AIL" -X screen -t "Mail" bash -c './Mail.py; read x'
sleep 0.1
#screen -S "Script_AIL" -X screen -t "Dox" bash -c './Dox.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "Web" bash -c './Web.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "Credential" bash -c './Credential.py; read x'
@ -158,6 +160,8 @@ function launching_scripts {
sleep 0.1
screen -S "Script_AIL" -X screen -t "Keys" bash -c './Keys.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "Base64" bash -c './Base64.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "Release" bash -c './Release.py; read x'

View file

@ -2,6 +2,7 @@
bloomfilters = Blooms
dicofilters = Dicos
pastes = PASTES
base64 = BASE64
wordtrending_csv = var/www/static/csv/wordstrendingdata
wordsfile = files/wordfile

View file

@ -45,7 +45,7 @@ subscribe = Redis_CurveManageTopSets
[Categ]
subscribe = Redis_Global
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve
publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Redis_SourceCode,Redis_Cve,Redis_Dox
[CreditCards]
subscribe = Redis_CreditCards
@ -74,6 +74,10 @@ subscribe = Redis_Url
subscribe = Redis_Url
publish = Redis_alertHandler,Redis_Duplicate
[Dox]
subscribe = Redis_Dox
publish = Redis_Duplicate,Redis_alertHandler,Redis_ModuleStats
[ModuleStats]
subscribe = Redis_ModuleStats
@ -105,3 +109,7 @@ publish = Redis_Duplicate,Redis_alertHandler
[Keys]
subscribe = Redis_Global
publish = Redis_Duplicate,Redis_alertHandler
[Base64]
subscribe = Redis_Global
#publish = ZMQ_Global