diff --git a/.gitignore b/.gitignore
index 4a266743..e74906ae 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,6 +17,7 @@ BASE64
DATA_ARDB
indexdir/
logs/
+old/
# Webstuff
var/www/static/
@@ -26,9 +27,11 @@ var/www/static/
!var/www/static/js/plot-graph.js
!var/www/static/js/trendingchart.js
var/www/templates/header.html
+var/www/submitted
# Local config
bin/packages/config.cfg
+configs/keys
# installed files
nltk_data/
diff --git a/README.md b/README.md
index 6e6bfdec..fd52b2c3 100644
--- a/README.md
+++ b/README.md
@@ -36,6 +36,9 @@ Features
* Detect Bitcoin address and Bitcoin private keys
* Detect private keys and certificate
* Tagging system with [MISP Galaxy](https://github.com/MISP/misp-galaxy) and [MISP Taxonomies](https://github.com/MISP/misp-taxonomies) tags
+* UI paste submission
+* Create events on [MISP](https://github.com/MISP/MISP) and cases on [The Hive](https://github.com/TheHive-Project/TheHive)
+* Automatic paste export on [MISP](https://github.com/MISP/MISP) (events) and [The Hive](https://github.com/TheHive-Project/TheHive) (alerts) on selected tags
Installation
------------
@@ -154,6 +157,16 @@ Tagging system
![Tags](./doc/screenshots/tags.png?raw=true "AIL framework tags")
+MISP and The Hive, automatic events and alerts creation
+--------
+
+![paste_submit](./doc/screenshots/tag_auto_export.png?raw=true "AIL framework MISP and Hive auto export")
+
+Paste submission
+--------
+
+![paste_submit](./doc/screenshots/paste_submit.png?raw=true "AIL framework paste submission")
+
Sentiment analysis
------------------
diff --git a/bin/Global.py b/bin/Global.py
index 6115a53f..32a3656b 100755
--- a/bin/Global.py
+++ b/bin/Global.py
@@ -57,7 +57,6 @@ if __name__ == '__main__':
while True:
message = p.get_from_set()
- #print(message)
# Recovering the streamed message informations.
if message is not None:
splitted = message.split()
diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh
index ed05f676..1f987479 100755
--- a/bin/LAUNCH.sh
+++ b/bin/LAUNCH.sh
@@ -160,9 +160,13 @@ function launching_scripts {
sleep 0.1
screen -S "Script_AIL" -X screen -t "alertHandler" bash -c './alertHandler.py; read x'
sleep 0.1
+ screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c './MISP_The_Hive_feeder.py; read x'
+ sleep 0.1
screen -S "Script_AIL" -X screen -t "Tags" bash -c './Tags.py; read x'
sleep 0.1
screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x'
+ sleep 0.1
+ screen -S "Script_AIL" -X screen -t "SubmitPaste" bash -c './submit_paste.py; read x'
}
diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py
new file mode 100755
index 00000000..b11c44cb
--- /dev/null
+++ b/bin/MISP_The_Hive_feeder.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python3
+# -*-coding:UTF-8 -*
+
+"""
+module
+====================
+
+This module send tagged pastes to MISP or THE HIVE Project
+
+"""
+
+import redis
+import sys
+import os
+import time
+import json
+import configparser
+
+from pubsublogger import publisher
+from Helper import Process
+from packages import Paste
+import ailleakObject
+
+import uuid
+
+from pymisp import PyMISP
+
+sys.path.append('../configs/keys')
+
+# import MISP KEYS
+try:
+ from mispKEYS import misp_url, misp_key, misp_verifycert
+ flag_misp = True
+except:
+ print('Misp keys not present')
+ flag_misp = False
+
+# import The Hive Keys
+try:
+ from theHiveKEYS import the_hive_url, the_hive_key, the_hive_verifycert
+ if the_hive_url == '':
+ flag_the_hive = False
+ else:
+ flag_the_hive = True
+except:
+ print('The HIVE keys not present')
+ flag_the_hive = False
+
+from thehive4py.api import TheHiveApi
+import thehive4py.exceptions
+from thehive4py.models import Alert, AlertArtifact
+from thehive4py.models import Case, CaseTask, CustomFieldHelper
+
+
+
+def create_the_hive_alert(source, path, content, tag):
+ tags = list(r_serv_metadata.smembers('tag:'+path))
+
+ artifacts = [
+ AlertArtifact( dataType='uuid-ail', data=r_serv_db.get('ail:uuid') ),
+ AlertArtifact( dataType='file', data=path, tags=tags )
+ ]
+
+ l_tags = tag.split(',')
+ print(tag)
+
+ # Prepare the sample Alert
+ sourceRef = str(uuid.uuid4())[0:6]
+ alert = Alert(title='AIL Leak',
+ tlp=3,
+ tags=l_tags,
+ description='infoleak',
+ type='ail',
+ source=source,
+ sourceRef=sourceRef,
+ artifacts=artifacts)
+
+ # Create the Alert
+ id = None
+ try:
+ response = HiveApi.create_alert(alert)
+ if response.status_code == 201:
+ #print(json.dumps(response.json(), indent=4, sort_keys=True))
+ print('Alert Created')
+ print('')
+ id = response.json()['id']
+ else:
+ print('ko: {}/{}'.format(response.status_code, response.text))
+ return 0
+ except:
+ print('hive connection error')
+
+if __name__ == "__main__":
+
+ publisher.port = 6380
+ publisher.channel = "Script"
+
+ config_section = 'misp_the_hive_feeder'
+
+ configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
+ if not os.path.exists(configfile):
+ raise Exception('Unable to find the configuration file. \
+ Did you set environment variables? \
+ Or activate the virtualenv.')
+
+ cfg = configparser.ConfigParser()
+ cfg.read(configfile)
+
+ r_serv_db = redis.StrictRedis(
+ host=cfg.get("ARDB_DB", "host"),
+ port=cfg.getint("ARDB_DB", "port"),
+ db=cfg.getint("ARDB_DB", "db"),
+ decode_responses=True)
+
+ r_serv_metadata = redis.StrictRedis(
+ host=cfg.get("ARDB_Metadata", "host"),
+ port=cfg.getint("ARDB_Metadata", "port"),
+ db=cfg.getint("ARDB_Metadata", "db"),
+ decode_responses=True)
+
+ uuid_ail = r_serv_db.get('ail:uuid')
+ if uuid_ail is None:
+ uuid_ail = r_serv_db.set('ail:uuid', uuid.uuid4() )
+
+ config_section = 'misp_the_hive_feeder'
+
+ p = Process(config_section)
+ # create MISP connection
+ if flag_misp:
+ try:
+ pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
+ except:
+ flag_misp = False
+ r_serv_db.set('ail:misp', False)
+ print('Not connected to MISP')
+
+ if flag_misp:
+ try:
+ misp_wrapper = ailleakObject.ObjectWrapper(pymisp)
+ r_serv_db.set('ail:misp', True)
+ print('Connected to MISP:', misp_url)
+ except e:
+ flag_misp = False
+ r_serv_db.set('ail:misp', False)
+ print(e)
+ print('Not connected to MISP')
+
+ # create The HIVE connection
+ if flag_the_hive:
+ try:
+ HiveApi = TheHiveApi(the_hive_url, the_hive_key, cert = the_hive_verifycert)
+ r_serv_db.set('ail:thehive', True)
+ except:
+ HiveApi = False
+ flag_the_hive = False
+ r_serv_db.set('ail:thehive', False)
+ print('Not connected to The HIVE')
+
+ if HiveApi != False and flag_the_hive:
+ try:
+ HiveApi.get_alert(0)
+ print('Connected to The HIVE:', the_hive_url)
+ except thehive4py.exceptions.AlertException:
+ HiveApi = False
+ flag_the_hive = False
+ print('Not connected to The HIVE')
+
+ while True:
+
+ # Get one message from the input queue
+ message = p.get_from_set()
+ if message is None:
+ publisher.debug("{} queue is empty, waiting 1s".format(config_section))
+ time.sleep(1)
+ continue
+ else:
+
+ if flag_the_hive or flag_misp:
+ tag, path = message.split(';')
+ paste = Paste.Paste(path)
+ source = '/'.join(paste.p_path.split('/')[-6:])
+
+ full_path = os.path.join(os.environ['AIL_HOME'],
+ p.config.get("Directories", "pastes"), path)
+
+
+ if HiveApi != False:
+ if int(r_serv_db.get('hive:auto-alerts')) == 1:
+ whitelist_hive = r_serv_db.scard('whitelist_hive')
+ if r_serv_db.sismember('whitelist_hive', tag):
+ create_the_hive_alert(source, path, full_path, tag)
+
+ else:
+ print('hive, auto alerts creation disable')
+ if flag_misp:
+ if int(r_serv_db.get('misp:auto-events')) == 1:
+ if r_serv_db.sismember('whitelist_misp', tag):
+ misp_wrapper.pushToMISP(uuid_ail, path, tag)
+ else:
+ print('misp, auto events creation disable')
diff --git a/bin/Tags.py b/bin/Tags.py
index f4939ec3..15f8f837 100755
--- a/bin/Tags.py
+++ b/bin/Tags.py
@@ -66,3 +66,5 @@ if __name__ == '__main__':
print("new paste: {}".format(path))
print(" tagged: {}".format(tag))
server_metadata.sadd('tag:'+path, tag)
+
+ p.populate_set_out(message, 'MISP_The_Hive_feeder')
diff --git a/bin/ailleakObject.py b/bin/ailleakObject.py
index bbf88711..111db905 100755
--- a/bin/ailleakObject.py
+++ b/bin/ailleakObject.py
@@ -8,12 +8,11 @@ import datetime
import json
from io import BytesIO
-class AilleakObject(AbstractMISPObjectGenerator):
- def __init__(self, moduleName, p_source, p_date, p_content, p_duplicate, p_duplicate_number):
+class AilLeakObject(AbstractMISPObjectGenerator):
+ def __init__(self, uuid_ail, p_source, p_date, p_content, p_duplicate, p_duplicate_number):
super(AbstractMISPObjectGenerator, self).__init__('ail-leak')
- self._moduleName = moduleName
- self._p_source = p_source.split('/')[-5:]
- self._p_source = '/'.join(self._p_source)[:-3] # -3 removes .gz
+ self._uuid = uuid_ail
+ self._p_source = p_source
self._p_date = p_date
self._p_content = p_content
self._p_duplicate = p_duplicate
@@ -21,14 +20,15 @@ class AilleakObject(AbstractMISPObjectGenerator):
self.generate_attributes()
def generate_attributes(self):
- self.add_attribute('type', value=self._moduleName)
self.add_attribute('origin', value=self._p_source, type='text')
- self.add_attribute('last-seen', value=self._p_date)
+ self.add_attribute('last-seen', value=self._p_date, type='datetime')
if self._p_duplicate_number > 0:
self.add_attribute('duplicate', value=self._p_duplicate, type='text')
self.add_attribute('duplicate_number', value=self._p_duplicate_number, type='counter')
- self._pseudofile = BytesIO(self._p_content)
- self.add_attribute('raw-data', value=self._p_source, data=self._pseudofile, type="attachment")
+ self._pseudofile = BytesIO(self._p_content.encode())
+ res = self.add_attribute('raw-data', value=self._p_source, data=self._pseudofile, type="attachment")# , ShadowAttribute=self.p_tag)
+ #res.add_shadow_attributes(tag)
+ self.add_attribute('sensor', value=self._uuid, type="text")
class ObjectWrapper:
def __init__(self, pymisp):
@@ -38,30 +38,40 @@ class ObjectWrapper:
cfg = configparser.ConfigParser()
cfg.read('./packages/config.cfg')
self.maxDuplicateToPushToMISP = cfg.getint("ailleakObject", "maxDuplicateToPushToMISP")
+ self.attribute_to_tag = None
- def add_new_object(self, moduleName, path):
- self.moduleName = moduleName
+ def add_new_object(self, uuid_ail, path, p_source, tag):
+ self.uuid_ail = uuid_ail
self.path = path
+ self.p_source = p_source
self.paste = Paste.Paste(path)
self.p_date = self.date_to_str(self.paste.p_date)
- self.p_source = self.paste.p_path
self.p_content = self.paste.get_p_content()
+ self.p_tag = tag
temp = self.paste._get_p_duplicate()
#beautifier
- temp = json.loads(temp)
- self.p_duplicate_number = len(temp) if len(temp) >= 0 else 0
- to_ret = ""
- for dup in temp[:self.maxDuplicateToPushToMISP]:
- algo = dup[0]
- path = dup[1].split('/')[-5:]
- path = '/'.join(path)[:-3] # -3 removes .gz
- perc = dup[2]
- to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
- self.p_duplicate = to_ret
+ if not temp:
+ temp = ''
- self.mispObject = AilleakObject(self.moduleName, self.p_source, self.p_date, self.p_content, self.p_duplicate, self.p_duplicate_number)
+ p_duplicate_number = len(temp) if len(temp) >= 0 else 0
+
+ to_ret = ""
+ for dup in temp[:10]:
+ dup = dup.replace('\'','\"').replace('(','[').replace(')',']')
+ dup = json.loads(dup)
+ algo = dup[0]
+ path = dup[1].split('/')[-6:]
+ path = '/'.join(path)[:-3] # -3 removes .gz
+ if algo == 'tlsh':
+ perc = 100 - int(dup[2])
+ else:
+ perc = dup[2]
+ to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
+ p_duplicate = to_ret
+
+ self.mispObject = AilLeakObject(self.uuid_ail, self.p_source, self.p_date, self.p_content, p_duplicate, p_duplicate_number)
def date_to_str(self, date):
return "{0}-{1}-{2}".format(date.year, date.month, date.day)
@@ -108,21 +118,57 @@ class ObjectWrapper:
event = self.pymisp.new_event(distribution, threat,
analysis, info, date,
published, orgc_id, org_id, sharing_group_id)
+ eventUuid = event['Event']['uuid']
+ self.pymisp.tag(eventUuid, 'infoleak:output-format="ail-daily"')
return event
# Publish object to MISP
- def pushToMISP(self):
+ def pushToMISP(self, uuid_ail, path, tag):
+ self._p_source = path.split('/')[-5:]
+ self._p_source = '/'.join(self._p_source)[:-3]
+
if self.currentID_date != datetime.date.today(): #refresh id
self.eventID_to_push = self.get_daily_event_id()
mispTYPE = 'ail-leak'
- try:
- templateID = [x['ObjectTemplate']['id'] for x in self.pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == mispTYPE][0]
- except IndexError:
- valid_types = ", ".join([x['ObjectTemplate']['name'] for x in self.pymisp.get_object_templates_list()])
- print ("Template for type %s not found! Valid types are: %s" % (mispTYPE, valid_types))
- r = self.pymisp.add_object(self.eventID_to_push, templateID, self.mispObject)
- if 'errors' in r:
- print(r)
+
+ # paste object already exist
+ if self.paste_object_exist(self.eventID_to_push, self._p_source):
+ # add new tag
+ self.tag(self.attribute_to_tag, tag)
+ print(self._p_source + ' tagged: ' + tag)
+ #create object
else:
- print('Pushed:', self.moduleName, '->', self.p_source)
+ self.add_new_object(uuid_ail, path, self._p_source, tag)
+
+
+ try:
+ templateID = [x['ObjectTemplate']['id'] for x in self.pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == mispTYPE][0]
+ except IndexError:
+ valid_types = ", ".join([x['ObjectTemplate']['name'] for x in self.pymisp.get_object_templates_list()])
+ print ("Template for type %s not found! Valid types are: %s" % (mispTYPE, valid_types))
+ r = self.pymisp.add_object(self.eventID_to_push, templateID, self.mispObject)
+ if 'errors' in r:
+ print(r)
+ else:
+ # tag new object
+ self.set_attribute_to_tag_uuid(self.eventID_to_push, self._p_source)
+ self.tag(self.attribute_to_tag, tag)
+ print('Pushed:', tag, '->', self._p_source)
+
+ def paste_object_exist(self, eventId, source):
+ res = self.pymisp.search(controller='attributes', eventid=eventId, values=source)
+ # object already exist
+ if res['response']:
+ self.attribute_to_tag = res['response']['Attribute'][0]['uuid']
+ return True
+ # new object
+ else:
+ return False
+
+ def set_attribute_to_tag_uuid(self, eventId, source):
+ res = self.pymisp.search(controller='attributes', eventid=eventId, values=source)
+ self.attribute_to_tag = res['response']['Attribute'][0]['uuid']
+
+ def tag(self, uuid, tag):
+ self.pymisp.tag(uuid, tag)
diff --git a/bin/alertHandler.py b/bin/alertHandler.py
index 60787b77..d18aaba0 100755
--- a/bin/alertHandler.py
+++ b/bin/alertHandler.py
@@ -20,16 +20,10 @@ from packages import Paste
from pubsublogger import publisher
from Helper import Process
-from pymisp import PyMISP
-import ailleakObject
import sys
sys.path.append('../')
-try:
- from mispKEYS import misp_url, misp_key, misp_verifycert
- flag_misp = True
-except:
- print('Misp keys not present')
- flag_misp = False
+
+flag_misp = False
if __name__ == "__main__":
publisher.port = 6380
@@ -38,16 +32,6 @@ if __name__ == "__main__":
config_section = 'alertHandler'
p = Process(config_section)
- if flag_misp:
- try:
- pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
- print('Connected to MISP:', misp_url)
- except:
- flag_misp = False
- print('Not connected to MISP')
-
- if flag_misp:
- wrapper = ailleakObject.ObjectWrapper(pymisp)
# port generated automatically depending on the date
curYear = datetime.now().year
@@ -77,12 +61,3 @@ if __name__ == "__main__":
server.sadd(key, p_path)
publisher.info('Saved warning paste {}'.format(p_path))
-
- # Create MISP AIL-leak object and push it
- if flag_misp:
- allowed_modules = ['credential', 'phone', 'creditcards']
- if module_name in allowed_modules:
- wrapper.add_new_object(module_name, p_path)
- wrapper.pushToMISP()
- else:
- print('not pushing to MISP:', module_name, p_path)
diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py
index 317743f4..d1e3f0d3 100755
--- a/bin/packages/Paste.py
+++ b/bin/packages/Paste.py
@@ -76,7 +76,7 @@ class Paste(object):
port=cfg.getint("Redis_Data_Merging", "port"),
db=cfg.getint("Redis_Data_Merging", "db"),
decode_responses=True)
- self.store_duplicate = redis.StrictRedis(
+ self.store_metadata = redis.StrictRedis(
host=cfg.get("ARDB_Metadata", "host"),
port=cfg.getint("ARDB_Metadata", "port"),
db=cfg.getint("ARDB_Metadata", "db"),
@@ -105,6 +105,7 @@ class Paste(object):
self.p_max_length_line = None
self.array_line_above_threshold = None
self.p_duplicate = None
+ self.p_tags = None
def get_p_content(self):
"""
@@ -277,12 +278,19 @@ class Paste(object):
return False, var
def _get_p_duplicate(self):
- self.p_duplicate = self.store_duplicate.smembers('dup:'+self.p_path)
+ self.p_duplicate = self.store_metadata.smembers('dup:'+self.p_path)
if self.p_duplicate is not None:
return list(self.p_duplicate)
else:
return '[]'
+ def _get_p_tags(self):
+ self.p_tags = self.store_metadata.smembers('tag:'+path, tag)
+ if self.self.p_tags is not None:
+ return list(self.p_tags)
+ else:
+ return '[]'
+
def save_all_attributes_redis(self, key=None):
"""
Saving all the attributes in a "Redis-like" Database (Redis, LevelDB)
@@ -333,7 +341,7 @@ class Paste(object):
Save an attribute as a field
"""
for tuple in value:
- self.store_duplicate.sadd('dup:'+self.p_path, tuple)
+ self.store_metadata.sadd('dup:'+self.p_path, tuple)
def save_others_pastes_attribute_duplicate(self, list_value):
"""
@@ -341,7 +349,7 @@ class Paste(object):
"""
for hash_type, path, percent, date in list_value:
to_add = (hash_type, self.p_path, percent, date)
- self.store_duplicate.sadd('dup:'+path,to_add)
+ self.store_metadata.sadd('dup:'+path,to_add)
def _get_from_redis(self, r_serv):
ans = {}
diff --git a/bin/packages/config.cfg.sample b/bin/packages/config.cfg.sample
index 1eec715d..ae015d8e 100644
--- a/bin/packages/config.cfg.sample
+++ b/bin/packages/config.cfg.sample
@@ -92,6 +92,11 @@ host = localhost
port = 6380
db = 0
+[Redis_Log_submit]
+host = localhost
+port = 6380
+db = 1
+
[Redis_Queues]
host = localhost
port = 6381
@@ -157,6 +162,11 @@ host = localhost
port = 6382
db = 7
+[ARDB_Statistics]
+host = localhost
+port = 6382
+db = 8
+
[Url]
cc_critical = DE
diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg
index 975b7b2c..71044cfb 100644
--- a/bin/packages/modules.cfg
+++ b/bin/packages/modules.cfg
@@ -82,6 +82,10 @@ subscribe = Redis_alertHandler
[Tags]
subscribe = Redis_Tags
+publish = Redis_Tags_feed
+
+[misp_the_hive_feeder]
+subscribe = Redis_Tags_feed
#[send_to_queue]
#subscribe = Redis_Cve
@@ -120,3 +124,6 @@ publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
[Bitcoin]
subscribe = Redis_Global
publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags
+
+[submit_paste]
+publish = Redis_Mixer
diff --git a/bin/submit_paste.py b/bin/submit_paste.py
new file mode 100755
index 00000000..49c8e1f0
--- /dev/null
+++ b/bin/submit_paste.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python3
+# -*-coding:UTF-8 -*
+
+import configparser
+import os
+import sys
+import gzip
+import io
+import redis
+import base64
+import datetime
+import time
+
+from sflock.main import unpack
+import sflock
+
+from Helper import Process
+from pubsublogger import publisher
+
+def create_paste(uuid, paste_content, ltags, ltagsgalaxies, name):
+
+ now = datetime.datetime.now()
+ save_path = 'submitted/' + now.strftime("%Y") + '/' + now.strftime("%m") + '/' + now.strftime("%d") + '/' + name + '.gz'
+
+ full_path = filename = os.path.join(os.environ['AIL_HOME'],
+ p.config.get("Directories", "pastes"), save_path)
+
+ if os.path.isfile(full_path):
+ addError(uuid, 'File: ' + save_path + ' already exist in submitted pastes')
+ return 1
+
+ gzipencoded = gzip.compress(paste_content)
+ gzip64encoded = base64.standard_b64encode(gzipencoded).decode()
+
+ # send paste to Global module
+ relay_message = "{0} {1}".format(save_path, gzip64encoded)
+ p.populate_set_out(relay_message, 'Mixer')
+
+ # add tags
+ add_tags(ltags, ltagsgalaxies, full_path)
+
+ r_serv_log_submit.incr(uuid + ':nb_end')
+ r_serv_log_submit.incr(uuid + ':nb_sucess')
+
+ if r_serv_log_submit.get(uuid + ':nb_end') == r_serv_log_submit.get(uuid + ':nb_total'):
+ r_serv_log_submit.set(uuid + ':end', 1)
+
+ print(' {} send to Global'.format(save_path))
+ r_serv_log_submit.sadd(uuid + ':paste_submit_link', full_path)
+
+ return 0
+
+def addError(uuid, errorMessage):
+ print(errorMessage)
+ error = r_serv_log_submit.get(uuid + ':error')
+ if error != None:
+ r_serv_log_submit.set(uuid + ':error', error + '
' + errorMessage)
+ r_serv_log_submit.incr(uuid + ':nb_end')
+
+def abord_file_submission(uuid, errorMessage):
+ addError(uuid, errorMessage)
+ r_serv_log_submit.set(uuid + ':end', 1)
+ remove_submit_uuid(uuid)
+
+
+def remove_submit_uuid(uuid):
+ # save temp value on disk
+ r_serv_db.delete(uuid + ':ltags')
+ r_serv_db.delete(uuid + ':ltagsgalaxies')
+ r_serv_db.delete(uuid + ':paste_content')
+ r_serv_db.delete(uuid + ':isfile')
+ r_serv_db.delete(uuid + ':password')
+
+ r_serv_log_submit.expire(uuid + ':end', expire_time)
+ r_serv_log_submit.expire(uuid + ':processing', expire_time)
+ r_serv_log_submit.expire(uuid + ':nb_total', expire_time)
+ r_serv_log_submit.expire(uuid + ':nb_sucess', expire_time)
+ r_serv_log_submit.expire(uuid + ':nb_end', expire_time)
+ r_serv_log_submit.expire(uuid + ':error', expire_time)
+ r_serv_log_submit.srem(uuid + ':paste_submit_link', '')
+ r_serv_log_submit.expire(uuid + ':paste_submit_link', expire_time)
+
+ # delete uuid
+ r_serv_db.srem('submitted:uuid', uuid)
+ print('{} all file submitted'.format(uuid))
+
+def add_tags(tags, tagsgalaxies, path):
+ list_tag = tags.split(',')
+ list_tag_galaxies = tagsgalaxies.split(',')
+
+ if list_tag != ['']:
+ for tag in list_tag:
+ #add tag
+ r_serv_metadata.sadd('tag:'+path, tag)
+ r_serv_tags.sadd(tag, path)
+ #add new tag in list of all used tags
+ r_serv_tags.sadd('list_tags', tag)
+
+ if list_tag_galaxies != ['']:
+ for tag in list_tag_galaxies:
+ #add tag
+ r_serv_metadata.sadd('tag:'+path, tag)
+ r_serv_tags.sadd(tag, path)
+ #add new tag in list of all used tags
+ r_serv_tags.sadd('list_tags', tag)
+
+def verify_extention_filename(filename):
+ if not '.' in filename:
+ return True
+ else:
+ file_type = filename.rsplit('.', 1)[1]
+
+ #txt file
+ if file_type in ALLOWED_EXTENSIONS:
+ return True
+ else:
+ return False
+
+if __name__ == "__main__":
+
+ publisher.port = 6380
+ publisher.channel = "Script"
+
+ configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
+ if not os.path.exists(configfile):
+ raise Exception('Unable to find the configuration file. \
+ Did you set environment variables? \
+ Or activate the virtualenv.')
+
+ cfg = configparser.ConfigParser()
+ cfg.read(configfile)
+
+ r_serv_db = redis.StrictRedis(
+ host=cfg.get("ARDB_DB", "host"),
+ port=cfg.getint("ARDB_DB", "port"),
+ db=cfg.getint("ARDB_DB", "db"),
+ decode_responses=True)
+
+ r_serv_log_submit = redis.StrictRedis(
+ host=cfg.get("Redis_Log_submit", "host"),
+ port=cfg.getint("Redis_Log_submit", "port"),
+ db=cfg.getint("Redis_Log_submit", "db"),
+ decode_responses=True)
+
+ r_serv_tags = redis.StrictRedis(
+ host=cfg.get("ARDB_Tags", "host"),
+ port=cfg.getint("ARDB_Tags", "port"),
+ db=cfg.getint("ARDB_Tags", "db"),
+ decode_responses=True)
+
+ r_serv_metadata = redis.StrictRedis(
+ host=cfg.get("ARDB_Metadata", "host"),
+ port=cfg.getint("ARDB_Metadata", "port"),
+ db=cfg.getint("ARDB_Metadata", "db"),
+ decode_responses=True)
+
+ expire_time = 120
+ MAX_FILE_SIZE = 1000000000
+ ALLOWED_EXTENSIONS = ['txt', 'sh', 'pdf']
+
+ config_section = 'submit_paste'
+ p = Process(config_section)
+
+ while True:
+
+ # paste submitted
+ if r_serv_db.scard('submitted:uuid') > 0:
+ uuid = r_serv_db.srandmember('submitted:uuid')
+
+ # get temp value save on disk
+ ltags = r_serv_db.get(uuid + ':ltags')
+ ltagsgalaxies = r_serv_db.get(uuid + ':ltagsgalaxies')
+ paste_content = r_serv_db.get(uuid + ':paste_content')
+ isfile = r_serv_db.get(uuid + ':isfile')
+ password = r_serv_db.get(uuid + ':password')
+
+ # needed if redis is restarted
+ r_serv_log_submit.set(uuid + ':end', 0)
+ r_serv_log_submit.set(uuid + ':processing', 0)
+ r_serv_log_submit.set(uuid + ':nb_total', -1)
+ r_serv_log_submit.set(uuid + ':nb_end', 0)
+ r_serv_log_submit.set(uuid + ':nb_sucess', 0)
+ r_serv_log_submit.set(uuid + ':error', 'error:')
+ r_serv_log_submit.sadd(uuid + ':paste_submit_link', '')
+
+
+ r_serv_log_submit.set(uuid + ':processing', 1)
+
+ if isfile == 'True':
+ file_full_path = paste_content
+
+ if not os.path.exists(file_full_path):
+ abord_file_submission(uuid, "Server Error, the archive can't be found")
+ continue
+
+ #verify file lengh
+ if os.stat(file_full_path).st_size > MAX_FILE_SIZE:
+ abord_file_submission(uuid, 'File :{} too large'.format(file_full_path))
+
+ else:
+ filename = file_full_path.split('/')[-1]
+ if not '.' in filename:
+ # read file
+ try:
+ with open(file_full_path,'r') as f:
+ content = f.read()
+ except:
+ abord_file_submission(uuid, "file error")
+ continue
+ r_serv_log_submit.set(uuid + ':nb_total', 1)
+ create_paste(uuid, content, ltags, ltagsgalaxies, uuid)
+ remove_submit_uuid(uuid)
+
+ else:
+ file_type = filename.rsplit('.', 1)[1]
+
+ #txt file
+ if file_type in ALLOWED_EXTENSIONS:
+ with open(file_full_path,'r') as f:
+ content = f.read()
+ r_serv_log_submit.set(uuid + ':nb_total', 1)
+ create_paste(uuid, content.encode(), ltags, ltagsgalaxies, uuid)
+ remove_submit_uuid(uuid)
+ #compressed file
+ else:
+ #decompress file
+ try:
+ if password == '':
+ files = unpack(file_full_path.encode())
+ #print(files.children)
+ else:
+ try:
+ files = unpack(file_full_path.encode(), password=password.encode())
+ #print(files.children)
+ except sflock.exception.IncorrectUsageException:
+ abord_file_submission(uuid, "Wrong Password")
+ continue
+ except:
+ abord_file_submission(uuid, "file decompression error")
+ continue
+ print('unpacking {} file'.format(files.unpacker))
+ if(not files.children):
+ abord_file_submission(uuid, "Empty compressed file")
+ continue
+ # set number of files to submit
+ r_serv_log_submit.set(uuid + ':nb_total', len(files.children))
+ n = 1
+ for child in files.children:
+ if verify_extention_filename(child.filename.decode()):
+ create_paste(uuid, child.contents, ltags, ltagsgalaxies, uuid+'_'+ str(n) )
+ n = n + 1
+ else:
+ print('bad extention')
+ addError(uuid, 'Bad file extension: {}'.format(child.filename.decode()) )
+
+ except FileNotFoundError:
+ print('file not found')
+ addError(uuid, 'File not found: {}'.format(file_full_path), uuid )
+
+ remove_submit_uuid(uuid)
+
+
+
+ # textarea input paste
+ else:
+ r_serv_log_submit.set(uuid + ':nb_total', 1)
+ create_paste(uuid, paste_content.encode(), ltags, ltagsgalaxies, uuid)
+ remove_submit_uuid(uuid)
+ time.sleep(0.5)
+
+ # wait for paste
+ else:
+ publisher.debug("Script submit_paste is Idling 10s")
+ time.sleep(3)
diff --git a/configs/keys/mispKEYS.py.sample b/configs/keys/mispKEYS.py.sample
new file mode 100644
index 00000000..55e00922
--- /dev/null
+++ b/configs/keys/mispKEYS.py.sample
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+misp_url = 'http://10.1.0.143'
+misp_key = 'c5VRXJahYVux3rkPULbbILTNmAwlXU8Eas9zvl36' # The MISP auth key can be found on the MISP web interface under the automation section
+misp_verifycert = True
diff --git a/configs/keys/theHiveKEYS.py.sample b/configs/keys/theHiveKEYS.py.sample
new file mode 100644
index 00000000..7ff9925a
--- /dev/null
+++ b/configs/keys/theHiveKEYS.py.sample
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+the_hive_url = 'http://10.1.0.145:9000'
+the_hive_key = 'KfiPGz3Bi8E5styWbC2eU4WiTKDGdf22' # The Hive auth key can be found on the The Hive web interface under the User Management
+the_hive_verifycert = True
diff --git a/doc/screenshots/paste_submit.png b/doc/screenshots/paste_submit.png
new file mode 100644
index 00000000..6ec13bd1
Binary files /dev/null and b/doc/screenshots/paste_submit.png differ
diff --git a/doc/screenshots/tag_auto_export.png b/doc/screenshots/tag_auto_export.png
new file mode 100644
index 00000000..73a88c10
Binary files /dev/null and b/doc/screenshots/tag_auto_export.png differ
diff --git a/installing_deps.sh b/installing_deps.sh
index 93a38a1c..9376dd65 100755
--- a/installing_deps.sh
+++ b/installing_deps.sh
@@ -33,6 +33,9 @@ sudo pip install nose
sudo apt-get install libfuzzy-dev -y
sudo apt-get install build-essential libffi-dev automake autoconf libtool -y
+# sflock, gz requirement
+sudo apt-get install p7zip-full -y
+
# REDIS #
test ! -d redis/ && git clone https://github.com/antirez/redis.git
pushd redis/
diff --git a/mispKEYS.py.default b/mispKEYS.py.default
deleted file mode 100644
index 42c534b8..00000000
--- a/mispKEYS.py.default
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-misp_url = ''
-misp_key = '' # The MISP auth key can be found on the MISP web interface under the automation section
-misp_verifycert = True
diff --git a/pip3_packages_requirement.txt b/pip3_packages_requirement.txt
index 78b19d64..7ab82b6b 100644
--- a/pip3_packages_requirement.txt
+++ b/pip3_packages_requirement.txt
@@ -1,5 +1,7 @@
pymisp
+thehive4py
+
redis
#filemagic conflict with magic
crcmod
@@ -56,6 +58,9 @@ pycountry
# To fetch Onion urls
PySocks
+# decompress files
+sflock
+
#ASN lookup requirements
#https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/adns-python/adns-python-1.2.1.tar.gz
https://github.com/trolldbois/python3-adns/archive/master.zip
diff --git a/var/www/Flask_server.py b/var/www/Flask_server.py
index 9c8ad621..068bee65 100755
--- a/var/www/Flask_server.py
+++ b/var/www/Flask_server.py
@@ -7,7 +7,7 @@ import json
import datetime
import time
import calendar
-from flask import Flask, render_template, jsonify, request
+from flask import Flask, render_template, jsonify, request, Request
import flask
import importlib
import os
@@ -28,6 +28,7 @@ cfg = Flask_config.cfg
Flask_config.app = Flask(__name__, static_url_path='/static/')
app = Flask_config.app
+app.config['MAX_CONTENT_LENGTH'] = 900 * 1024 * 1024
# ========= HEADER GENERATION ========
@@ -134,6 +135,19 @@ for tag in taxonomies.get('gdpr').machinetags():
for tag in taxonomies.get('fpf').machinetags():
r_serv_tags.sadd('active_tag_fpf', tag)
+# ========== INITIAL tags auto export ============
+r_serv_db = redis.StrictRedis(
+ host=cfg.get("ARDB_DB", "host"),
+ port=cfg.getint("ARDB_DB", "port"),
+ db=cfg.getint("ARDB_DB", "db"),
+ decode_responses=True)
+infoleak_tags = taxonomies.get('infoleak').machinetags()
+infoleak_automatic_tags = []
+for tag in taxonomies.get('infoleak').machinetags():
+ if tag.split('=')[0][:] == 'infoleak:automatic-detection':
+ r_serv_db.sadd('list_export_tags', tag)
+
+r_serv_db.sadd('list_export_tags', 'infoleak:submission="manual"')
# ============ MAIN ============
if __name__ == "__main__":
diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py
index 26edccfa..80ef9f18 100644
--- a/var/www/modules/Flask_config.py
+++ b/var/www/modules/Flask_config.py
@@ -7,9 +7,11 @@
import configparser
import redis
import os
+import sys
# FLASK #
app = None
+#secret_key = 'ail-super-secret_key01C'
# CONFIG #
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
@@ -21,7 +23,6 @@ if not os.path.exists(configfile):
cfg = configparser.ConfigParser()
cfg.read(configfile)
-
# REDIS #
r_serv = redis.StrictRedis(
host=cfg.get("Redis_Queues", "host"),
@@ -35,6 +36,12 @@ r_serv_log = redis.StrictRedis(
db=cfg.getint("Redis_Log", "db"),
decode_responses=True)
+r_serv_log_submit = redis.StrictRedis(
+ host=cfg.get("Redis_Log_submit", "host"),
+ port=cfg.getint("Redis_Log_submit", "port"),
+ db=cfg.getint("Redis_Log_submit", "db"),
+ decode_responses=True)
+
r_serv_charts = redis.StrictRedis(
host=cfg.get("ARDB_Trending", "host"),
port=cfg.getint("ARDB_Trending", "port"),
@@ -77,6 +84,56 @@ r_serv_metadata = redis.StrictRedis(
db=cfg.getint("ARDB_Metadata", "db"),
decode_responses=True)
+r_serv_db = redis.StrictRedis(
+ host=cfg.get("ARDB_DB", "host"),
+ port=cfg.getint("ARDB_DB", "port"),
+ db=cfg.getint("ARDB_DB", "db"),
+ decode_responses=True)
+
+r_serv_statistics = redis.StrictRedis(
+ host=cfg.get("ARDB_Statistics", "host"),
+ port=cfg.getint("ARDB_Statistics", "port"),
+ db=cfg.getint("ARDB_Statistics", "db"),
+ decode_responses=True)
+
+
+sys.path.append('../../configs/keys')
+# MISP #
+try:
+ from pymisp import PyMISP
+ from mispKEYS import misp_url, misp_key, misp_verifycert
+ pymisp = PyMISP(misp_url, misp_key, misp_verifycert)
+ misp_event_url = misp_url + '/events/view/'
+ print('Misp connected')
+except:
+ print('Misp not connected')
+ pymisp = False
+ misp_event_url = '#'
+# The Hive #
+try:
+ from thehive4py.api import TheHiveApi
+ import thehive4py.exceptions
+ from theHiveKEYS import the_hive_url, the_hive_key, the_hive_verifycert
+ if the_hive_url == '':
+ HiveApi = False
+ hive_case_url = '#'
+ print('The HIVE not connected')
+ else:
+ HiveApi = TheHiveApi(the_hive_url, the_hive_key, cert=the_hive_verifycert)
+ hive_case_url = the_hive_url+'/index.html#/case/id_here/details'
+except:
+ print('The HIVE not connected')
+ HiveApi = False
+ hive_case_url = '#'
+
+if HiveApi != False:
+ try:
+ HiveApi.get_alert(0)
+ print('The Hive connected')
+ except thehive4py.exceptions.AlertException:
+ HiveApi = False
+ print('The Hive not connected')
+
# VARIABLES #
max_preview_char = int(cfg.get("Flask", "max_preview_char")) # Maximum number of character to display in the tooltip
max_preview_modal = int(cfg.get("Flask", "max_preview_modal")) # Maximum number of character to display in the modal
@@ -84,3 +141,5 @@ max_preview_modal = int(cfg.get("Flask", "max_preview_modal")) # Maximum number
DiffMaxLineLength = int(cfg.get("Flask", "DiffMaxLineLength"))#Use to display the estimated percentage instead of a raw value
bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info']
+
+UPLOAD_FOLDER = os.path.join(os.environ['AIL_FLASK'], 'submitted')
diff --git a/var/www/modules/PasteSubmit/Flask_PasteSubmit.py b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py
new file mode 100644
index 00000000..34e8c458
--- /dev/null
+++ b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py
@@ -0,0 +1,591 @@
+#!/usr/bin/env python3
+# -*-coding:UTF-8 -*
+
+'''
+ Flask functions and routes for the trending modules page
+'''
+import redis
+from flask import Flask, render_template, jsonify, request, Blueprint, url_for, redirect
+
+import unicodedata
+import string
+import subprocess
+import os
+import sys
+import datetime
+import uuid
+from io import BytesIO
+from Date import Date
+import json
+
+import Paste
+
+from pytaxonomies import Taxonomies
+from pymispgalaxies import Galaxies, Clusters
+
+try:
+ from pymisp.mispevent import MISPObject
+ flag_misp = True
+except:
+ flag_misp = False
+try:
+ from thehive4py.models import Case, CaseTask, CustomFieldHelper, CaseObservable
+ flag_hive = True
+except:
+ flag_hive = False
+
+# ============ VARIABLES ============
+import Flask_config
+
+app = Flask_config.app
+cfg = Flask_config.cfg
+r_serv_tags = Flask_config.r_serv_tags
+r_serv_metadata = Flask_config.r_serv_metadata
+r_serv_db = Flask_config.r_serv_db
+r_serv_log_submit = Flask_config.r_serv_log_submit
+
+pymisp = Flask_config.pymisp
+if pymisp is False:
+ flag_misp = False
+
+HiveApi = Flask_config.HiveApi
+if HiveApi is False:
+ flag_hive = False
+
+PasteSubmit = Blueprint('PasteSubmit', __name__, template_folder='templates')
+
+valid_filename_chars = "-_ %s%s" % (string.ascii_letters, string.digits)
+
+ALLOWED_EXTENSIONS = set(['txt', 'sh', 'pdf', 'zip', 'gz', 'tar.gz'])
+UPLOAD_FOLDER = Flask_config.UPLOAD_FOLDER
+
+misp_event_url = Flask_config.misp_event_url
+hive_case_url = Flask_config.hive_case_url
+
+# ============ FUNCTIONS ============
+def one():
+ return 1
+
+def allowed_file(filename):
+ if not '.' in filename:
+ return True
+ else:
+ return filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
+
+def clean_filename(filename, whitelist=valid_filename_chars, replace=' '):
+ # replace characters
+ for r in replace:
+ filename = filename.replace(r,'_')
+
+ # keep only valid ascii chars
+ cleaned_filename = unicodedata.normalize('NFKD', filename).encode('ASCII', 'ignore').decode()
+
+ # keep only whitelisted chars
+ return ''.join(c for c in cleaned_filename if c in whitelist)
+
+def launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password, isfile = False):
+
+ # save temp value on disk
+ r_serv_db.set(UUID + ':ltags', ltags)
+ r_serv_db.set(UUID + ':ltagsgalaxies', ltagsgalaxies)
+ r_serv_db.set(UUID + ':paste_content', paste_content)
+ r_serv_db.set(UUID + ':password', password)
+ r_serv_db.set(UUID + ':isfile', isfile)
+
+ r_serv_log_submit.set(UUID + ':end', 0)
+ r_serv_log_submit.set(UUID + ':processing', 0)
+ r_serv_log_submit.set(UUID + ':nb_total', -1)
+ r_serv_log_submit.set(UUID + ':nb_end', 0)
+ r_serv_log_submit.set(UUID + ':nb_sucess', 0)
+ r_serv_log_submit.set(UUID + ':error', 'error:')
+ r_serv_log_submit.sadd(UUID + ':paste_submit_link', '')
+
+
+ # save UUID on disk
+ r_serv_db.sadd('submitted:uuid', UUID)
+
+
+def addTagsVerification(tags, tagsgalaxies):
+
+ list_tag = tags.split(',')
+ list_tag_galaxies = tagsgalaxies.split(',')
+
+ taxonomies = Taxonomies()
+ active_taxonomies = r_serv_tags.smembers('active_taxonomies')
+
+ active_galaxies = r_serv_tags.smembers('active_galaxies')
+
+ if list_tag != ['']:
+ for tag in list_tag:
+ # verify input
+ tax = tag.split(':')[0]
+ if tax in active_taxonomies:
+ if tag in r_serv_tags.smembers('active_tag_' + tax):
+ pass
+ else:
+ return False
+ else:
+ return False
+
+ if list_tag_galaxies != ['']:
+ for tag in list_tag_galaxies:
+ # verify input
+ gal = tag.split(':')[1]
+ gal = gal.split('=')[0]
+
+ if gal in active_galaxies:
+ if tag in r_serv_tags.smembers('active_tag_galaxies_' + gal):
+ pass
+ else:
+ return False
+ else:
+ return False
+ return True
+
+def date_to_str(date):
+ return "{0}-{1}-{2}".format(date.year, date.month, date.day)
+
+def misp_create_event(distribution, threat_level_id, analysis, info, l_tags, publish, path):
+
+ paste = Paste.Paste(path)
+ source = path.split('/')[-6:]
+ source = '/'.join(source)[:-3]
+ ail_uuid = r_serv_db.get('ail:uuid')
+ pseudofile = BytesIO(paste.get_p_content().encode())
+
+ temp = paste._get_p_duplicate()
+
+ #beautifier
+ if not temp:
+ temp = ''
+
+ p_duplicate_number = len(temp) if len(temp) >= 0 else 0
+
+ to_ret = ""
+ for dup in temp[:10]:
+ dup = dup.replace('\'','\"').replace('(','[').replace(')',']')
+ dup = json.loads(dup)
+ algo = dup[0]
+ path = dup[1].split('/')[-6:]
+ path = '/'.join(path)[:-3] # -3 removes .gz
+ if algo == 'tlsh':
+ perc = 100 - int(dup[2])
+ else:
+ perc = dup[2]
+ to_ret += "{}: {} [{}%]\n".format(path, algo, perc)
+ p_duplicate = to_ret
+
+ today = datetime.date.today()
+ # [0-3]
+ if publish == 'True':
+ published = True
+ else:
+ published = False
+ org_id = None
+ orgc_id = None
+ sharing_group_id = None
+ date = today
+ event = pymisp.new_event(distribution, threat_level_id,
+ analysis, info, date,
+ published, orgc_id, org_id, sharing_group_id)
+ eventUuid = event['Event']['uuid']
+ eventid = event['Event']['id']
+
+ r_serv_metadata.set('misp_events:'+path, eventid)
+
+ # add tags
+ for tag in l_tags:
+ pymisp.tag(eventUuid, tag)
+
+ # create attributes
+ obj_name = 'ail-leak'
+ leak_obj = MISPObject(obj_name)
+ leak_obj.add_attribute('sensor', value=ail_uuid, type="text")
+ leak_obj.add_attribute('origin', value=source, type='text')
+ leak_obj.add_attribute('last-seen', value=date_to_str(paste.p_date), type='datetime')
+ leak_obj.add_attribute('raw-data', value=source, data=pseudofile, type="attachment")
+
+ if p_duplicate_number > 0:
+ leak_obj.add_attribute('duplicate', value=p_duplicate, type='text')
+ leak_obj.add_attribute('duplicate_number', value=p_duplicate_number, type='counter')
+
+ try:
+ templateID = [x['ObjectTemplate']['id'] for x in pymisp.get_object_templates_list() if x['ObjectTemplate']['name'] == obj_name][0]
+ except IndexError:
+ valid_types = ", ".join([x['ObjectTemplate']['name'] for x in pymisp.get_object_templates_list()])
+ print ("Template for type {} not found! Valid types are: {%s}".format(obj_name, valid_types))
+ r = pymisp.add_object(eventid, templateID, leak_obj)
+ if 'errors' in r:
+ print(r)
+ return False
+ else:
+ event_url = misp_event_url + eventid
+ return event_url
+
+def hive_create_case(hive_tlp, threat_level, hive_description, hive_case_title, l_tags, path):
+
+ ail_uuid = r_serv_db.get('ail:uuid')
+ source = path.split('/')[-6:]
+ source = '/'.join(source)[:-3]
+ # get paste date
+ var = path.split('/')
+ last_seen = "{0}-{1}-{2}".format(var[-4], var[-3], var[-2])
+
+ case = Case(title=hive_case_title,
+ tlp=hive_tlp,
+ severity=threat_level,
+ flag=False,
+ tags=l_tags,
+ description='hive_description')
+
+ # Create the case
+ id = None
+ response = HiveApi.create_case(case)
+ if response.status_code == 201:
+ id = response.json()['id']
+
+ observ_sensor = CaseObservable(dataType="other", data=[ail_uuid], message="sensor")
+ observ_file = CaseObservable(dataType="file", data=[path], tags=l_tags)
+ observ_source = CaseObservable(dataType="other", data=[source], message="source")
+ observ_last_seen = CaseObservable(dataType="other", data=[last_seen], message="last-seen")
+
+ res = HiveApi.create_case_observable(id,observ_sensor)
+ if res.status_code != 201:
+ print('ko: {}/{}'.format(res.status_code, res.text))
+ res = HiveApi.create_case_observable(id, observ_source)
+ if res.status_code != 201:
+ print('ko: {}/{}'.format(res.status_code, res.text))
+ res = HiveApi.create_case_observable(id, observ_file)
+ if res.status_code != 201:
+ print('ko: {}/{}'.format(res.status_code, res.text))
+ res = HiveApi.create_case_observable(id, observ_last_seen)
+ if res.status_code != 201:
+ print('ko: {}/{}'.format(res.status_code, res.text))
+
+ r_serv_metadata.set('hive_cases:'+path, id)
+
+ return hive_case_url.replace('id_here', id)
+ else:
+ print('ko: {}/{}'.format(response.status_code, response.text))
+ return False
+
+# ============= ROUTES ==============
+
+@PasteSubmit.route("/PasteSubmit/", methods=['GET'])
+def PasteSubmit_page():
+ #active taxonomies
+ active_taxonomies = r_serv_tags.smembers('active_taxonomies')
+
+ #active galaxies
+ active_galaxies = r_serv_tags.smembers('active_galaxies')
+
+ return render_template("PasteSubmit.html",
+ active_taxonomies = active_taxonomies,
+ active_galaxies = active_galaxies)
+
+@PasteSubmit.route("/PasteSubmit/submit", methods=['POST'])
+def submit():
+
+ #paste_name = request.form['paste_name']
+
+ password = request.form['password']
+ ltags = request.form['tags_taxonomies']
+ ltagsgalaxies = request.form['tags_galaxies']
+ paste_content = request.form['paste_content']
+
+ submitted_tag = 'infoleak:submission="manual"'
+
+ if ltags or ltagsgalaxies:
+ if not addTagsVerification(ltags, ltagsgalaxies):
+ content = 'INVALID TAGS'
+ return content, 400
+
+ # add submitted tags
+ if(ltags != ''):
+ ltags = ltags + ',' + submitted_tag
+ else:
+ ltags = submitted_tag
+
+ if 'file' in request.files:
+
+ file = request.files['file']
+ if file:
+
+ if file and allowed_file(file.filename):
+
+ # get UUID
+ UUID = str(uuid.uuid4())
+
+ '''if paste_name:
+ # clean file name
+ UUID = clean_filename(paste_name)'''
+
+ # create submitted dir
+ if not os.path.exists(UPLOAD_FOLDER):
+ os.makedirs(UPLOAD_FOLDER)
+
+ if not '.' in file.filename:
+ full_path = os.path.join(UPLOAD_FOLDER, UUID)
+ else:
+ if file.filename[-6:] == 'tar.gz':
+ file_type = 'tar.gz'
+ else:
+ file_type = file.filename.rsplit('.', 1)[1]
+ name = UUID + '.' + file_type
+ full_path = os.path.join(UPLOAD_FOLDER, name)
+
+ #Flask verify the file size
+ file.save(full_path)
+
+ paste_content = full_path
+
+ launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password ,True)
+
+ return render_template("submiting.html",
+ UUID = UUID)
+
+ else:
+ content = 'wrong file type, allowed_extensions: sh, pdf, zip, gz, tar.gz or remove the extension'
+ return content, 400
+
+
+ elif paste_content != '':
+ if sys.getsizeof(paste_content) < 900000:
+
+ # get id
+ UUID = str(uuid.uuid4())
+
+ #if paste_name:
+ # clean file name
+ #id = clean_filename(paste_name)
+
+ launch_submit(ltags, ltagsgalaxies, paste_content, UUID, password)
+
+ return render_template("submiting.html",
+ UUID = UUID)
+
+ else:
+ content = 'size error'
+ return content, 400
+
+ content = 'submit aborded'
+ return content, 400
+
+
+ return PasteSubmit_page()
+
+@PasteSubmit.route("/PasteSubmit/submit_status", methods=['GET'])
+def submit_status():
+ UUID = request.args.get('UUID')
+
+ if UUID:
+ end = r_serv_log_submit.get(UUID + ':end')
+ nb_total = r_serv_log_submit.get(UUID + ':nb_total')
+ nb_end = r_serv_log_submit.get(UUID + ':nb_end')
+ error = r_serv_log_submit.get(UUID + ':error')
+ processing = r_serv_log_submit.get(UUID + ':processing')
+ nb_sucess = r_serv_log_submit.get(UUID + ':nb_sucess')
+ paste_submit_link = list(r_serv_log_submit.smembers(UUID + ':paste_submit_link'))
+
+ if (end != None) and (nb_total != None) and (nb_end != None) and (error != None) and (processing != None) and (paste_submit_link != None):
+
+ link = ''
+ if paste_submit_link:
+ for paste in paste_submit_link:
+ url = url_for('showsavedpastes.showsavedpaste') + '?paste=' + paste
+ link += '' + paste +''
+
+ if nb_total == '-1':
+ in_progress = nb_sucess + ' / '
+ else:
+ in_progress = nb_sucess + ' / ' + nb_total
+
+ if int(nb_total) != 0:
+ prog = int(int(nb_end) * 100 / int(nb_total))
+ else:
+ prog = 0
+
+ if error == 'error:':
+ isError = False
+ else:
+ isError = True
+
+ if end == '0':
+ end = False
+ else:
+ end = True
+
+ if processing == '0':
+ processing = False
+ else:
+ processing = True
+
+ return jsonify(end=end,
+ in_progress=in_progress,
+ prog=prog,
+ link=link,
+ processing=processing,
+ isError=isError,
+ error=error)
+ else:
+ # FIXME TODO
+ print(end)
+ print(nb_total)
+ print(nb_end)
+ print(error)
+ print(processing)
+ print(nb_sucess)
+ return 'to do'
+ else:
+ return 'INVALID UUID'
+
+
+@PasteSubmit.route("/PasteSubmit/create_misp_event", methods=['POST'])
+def create_misp_event():
+
+ distribution = int(request.form['misp_data[Event][distribution]'])
+ threat_level_id = int(request.form['misp_data[Event][threat_level_id]'])
+ analysis = int(request.form['misp_data[Event][analysis]'])
+ info = request.form['misp_data[Event][info]']
+ path = request.form['paste']
+ publish = request.form.get('misp_publish')
+
+ #verify input
+ if (0 <= distribution <= 3) and (1 <= threat_level_id <= 4) and (0 <= analysis <= 2):
+
+ l_tags = list(r_serv_metadata.smembers('tag:'+path))
+ event = misp_create_event(distribution, threat_level_id, analysis, info, l_tags, publish, path)
+
+ if event != False:
+ return redirect(event)
+ else:
+ return 'error, event creation'
+ return 'error0'
+
+@PasteSubmit.route("/PasteSubmit/create_hive_case", methods=['POST'])
+def create_hive_case():
+
+ hive_tlp = int(request.form['hive_tlp'])
+ threat_level = int(request.form['threat_level_hive'])
+ hive_description = request.form['hive_description']
+ hive_case_title = request.form['hive_case_title']
+ path = request.form['paste']
+
+ #verify input
+ if (0 <= hive_tlp <= 3) and (1 <= threat_level <= 4):
+
+ l_tags = list(r_serv_metadata.smembers('tag:'+path))
+ case = hive_create_case(hive_tlp, threat_level, hive_description, hive_case_title, l_tags, path)
+
+ if case != False:
+ return redirect(case)
+ else:
+ return 'error'
+
+ return 'error'
+
+@PasteSubmit.route("/PasteSubmit/edit_tag_export")
+def edit_tag_export():
+ misp_auto_events = r_serv_db.get('misp:auto-events')
+ hive_auto_alerts = r_serv_db.get('hive:auto-alerts')
+
+ whitelist_misp = r_serv_db.scard('whitelist_misp')
+ whitelist_hive = r_serv_db.scard('whitelist_hive')
+
+ list_export_tags = list(r_serv_db.smembers('list_export_tags'))
+ status_misp = []
+ status_hive = []
+
+
+ for tag in list_export_tags:
+ if r_serv_db.sismember('whitelist_misp', tag):
+ status_misp.append(True)
+ else:
+ status_misp.append(False)
+
+ # empty whitelist
+ if whitelist_hive == 0:
+ for tag in list_export_tags:
+ status_hive.append(True)
+ else:
+ for tag in list_export_tags:
+ if r_serv_db.sismember('whitelist_hive', tag):
+ status_hive.append(True)
+ else:
+ status_hive.append(False)
+
+ if (misp_auto_events is not None) and (hive_auto_alerts is not None):
+
+ if int(misp_auto_events) == 1:
+ misp_active = True
+ else:
+ misp_active = False
+ if int(hive_auto_alerts) == 1:
+ hive_active = True
+ else:
+ hive_active = False
+ else:
+ misp_active = False
+ hive_active = False
+
+ nb_tags = str(r_serv_db.scard('list_export_tags'))
+ nb_tags_whitelist_misp = str(r_serv_db.scard('whitelist_misp')) + ' / ' + nb_tags
+ nb_tags_whitelist_hive = str(r_serv_db.scard('whitelist_hive')) + ' / ' + nb_tags
+
+ return render_template("edit_tag_export.html",
+ misp_active=misp_active,
+ hive_active=hive_active,
+ list_export_tags=list_export_tags,
+ status_misp=status_misp,
+ status_hive=status_hive,
+ nb_tags_whitelist_misp=nb_tags_whitelist_misp,
+ nb_tags_whitelist_hive=nb_tags_whitelist_hive,
+ flag_misp=flag_misp,
+ flag_hive=flag_hive)
+
+@PasteSubmit.route("/PasteSubmit/tag_export_edited", methods=['POST'])
+def tag_export_edited():
+ tag_enabled_misp = request.form.getlist('tag_enabled_misp')
+ tag_enabled_hive = request.form.getlist('tag_enabled_hive')
+
+ list_export_tags = list(r_serv_db.smembers('list_export_tags'))
+
+ r_serv_db.delete('whitelist_misp')
+ r_serv_db.delete('whitelist_hive')
+
+ for tag in tag_enabled_misp:
+ if r_serv_db.sismember('list_export_tags', tag):
+ r_serv_db.sadd('whitelist_misp', tag)
+ else:
+ return 'invalid input'
+
+ for tag in tag_enabled_hive:
+ if r_serv_db.sismember('list_export_tags', tag):
+ r_serv_db.sadd('whitelist_hive', tag)
+ else:
+ return 'invalid input'
+
+ return redirect(url_for('PasteSubmit.edit_tag_export'))
+
+@PasteSubmit.route("/PasteSubmit/enable_misp_auto_event")
+def enable_misp_auto_event():
+ r_serv_db.set('misp:auto-events', 1)
+ return edit_tag_export()
+
+@PasteSubmit.route("/PasteSubmit/disable_misp_auto_event")
+def disable_misp_auto_event():
+ r_serv_db.set('misp:auto-events', 0)
+ return edit_tag_export()
+
+@PasteSubmit.route("/PasteSubmit/enable_hive_auto_alert")
+def enable_hive_auto_alert():
+ r_serv_db.set('hive:auto-alerts', 1)
+ return edit_tag_export()
+
+@PasteSubmit.route("/PasteSubmit/disable_hive_auto_alert")
+def disable_hive_auto_alert():
+ r_serv_db.set('hive:auto-alerts', 0)
+ return edit_tag_export()
+
+# ========= REGISTRATION =========
+app.register_blueprint(PasteSubmit)
diff --git a/var/www/modules/PasteSubmit/templates/PasteSubmit.html b/var/www/modules/PasteSubmit/templates/PasteSubmit.html
new file mode 100644
index 00000000..ce1fb29f
--- /dev/null
+++ b/var/www/modules/PasteSubmit/templates/PasteSubmit.html
@@ -0,0 +1,200 @@
+
+
+
+