diff --git a/.gitignore b/.gitignore index d62fbd8b..4a266743 100644 --- a/.gitignore +++ b/.gitignore @@ -7,10 +7,15 @@ AILENV redis-leveldb redis +ardb +faup +tlsh Blooms LEVEL_DB_DATA PASTES -bin/indexdir/ +BASE64 +DATA_ARDB +indexdir/ logs/ # Webstuff @@ -20,7 +25,7 @@ var/www/static/ !var/www/static/js/moduleTrending.js !var/www/static/js/plot-graph.js !var/www/static/js/trendingchart.js -var/www/templates/header.html # auto-generated +var/www/templates/header.html # Local config bin/packages/config.cfg @@ -28,4 +33,6 @@ bin/packages/config.cfg # installed files nltk_data/ doc/all_modules.txt -doc/module-data-flow.png # auto-generated +# auto generated +doc/module-data-flow.png +doc/data-flow.png diff --git a/README.md b/README.md index b977c185..74426e4d 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,7 @@ Features * Detect Amazon AWS and Google API keys * Detect Bitcoin address and Bitcoin private keys * Detect private keys and certificate +* Tagging system with [MISP Galaxy](https://github.com/MISP/misp-galaxy) and [MISP Taxonomies](https://github.com/MISP/misp-taxonomies) tags Installation ------------ @@ -144,6 +145,11 @@ Browsing ![Browse-Pastes](./doc/screenshots/browse-important.png?raw=true "AIL framework browseImportantPastes") +Tagging system +-------- + +![Tags](./doc/screenshots/tags.png?raw=true "AIL framework tags") + Sentiment analysis ------------------ diff --git a/bin/ApiKey.py b/bin/ApiKey.py index 8ce7e2b4..e7ded9b2 100755 --- a/bin/ApiKey.py +++ b/bin/ApiKey.py @@ -41,6 +41,8 @@ def search_api_key(message): print(to_print) publisher.warning('{}Checked {} found Google API Key;{}'.format( to_print, len(google_api_key), paste.p_path)) + msg = 'infoleak:automatic-detection="google-api-key";{}'.format(filename) + p.populate_set_out(msg, 'Tags') if(len(aws_access_key) > 0 or len(aws_secret_key) > 0): print('found AWS key') @@ -48,8 +50,13 @@ def search_api_key(message): total = len(aws_access_key) + len(aws_secret_key) publisher.warning('{}Checked {} found AWS Key;{}'.format( to_print, total, paste.p_path)) + msg = 'infoleak:automatic-detection="aws-key";{}'.format(filename) + p.populate_set_out(msg, 'Tags') + msg = 'infoleak:automatic-detection="api-key";{}'.format(filename) + p.populate_set_out(msg, 'Tags') + msg = 'apikey;{}'.format(filename) p.populate_set_out(msg, 'alertHandler') #Send to duplicate diff --git a/bin/Base64.py b/bin/Base64.py index c7700994..960ca6de 100755 --- a/bin/Base64.py +++ b/bin/Base64.py @@ -65,6 +65,9 @@ def search_base64(content, message): msg = ('base64;{}'.format(message)) p.populate_set_out( msg, 'alertHandler') + msg = 'infoleak:automatic-detection="base64";{}'.format(message) + p.populate_set_out(msg, 'Tags') + def save_base64_as_file(decode, type, hash, json_data): filename_b64 = os.path.join(os.environ['AIL_HOME'], diff --git a/bin/Bitcoin.py b/bin/Bitcoin.py index 42468759..5ec2199f 100755 --- a/bin/Bitcoin.py +++ b/bin/Bitcoin.py @@ -63,7 +63,14 @@ def search_key(content, message, paste): publisher.warning(to_print) msg = ('bitcoin;{}'.format(message)) p.populate_set_out( msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="bitcoin-address";{}'.format(message) + p.populate_set_out(msg, 'Tags') + if(key): + msg = 'infoleak:automatic-detection="bitcoin-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') + to_print = 'Bitcoin;{};{};{};'.format(paste.p_source, paste.p_date, paste.p_name) publisher.warning('{}Detected {} Bitcoin private key;{}'.format( diff --git a/bin/Credential.py b/bin/Credential.py index fde80d12..5112f534 100755 --- a/bin/Credential.py +++ b/bin/Credential.py @@ -105,6 +105,9 @@ if __name__ == "__main__": msg = 'credential;{}'.format(filepath) p.populate_set_out(msg, 'alertHandler') + msg = 'infoleak:automatic-detection="credential";{}'.format(filepath) + p.populate_set_out(msg, 'Tags') + #Put in form, count occurences, then send to moduleStats creds_sites = {} site_occurence = re.findall(regex_site_for_stats, content) diff --git a/bin/CreditCards.py b/bin/CreditCards.py index a7441807..260d1345 100755 --- a/bin/CreditCards.py +++ b/bin/CreditCards.py @@ -85,6 +85,9 @@ if __name__ == "__main__": #send to Browse_warning_paste msg = 'creditcard;{}'.format(filename) p.populate_set_out(msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="credit-card";{}'.format(filename) + p.populate_set_out(msg, 'Tags') else: publisher.info('{}CreditCard related;{}'.format(to_print, paste.p_path)) else: diff --git a/bin/Cve.py b/bin/Cve.py index 9ac4efc8..bd240260 100755 --- a/bin/Cve.py +++ b/bin/Cve.py @@ -34,6 +34,9 @@ def search_cve(message): #send to Browse_warning_paste msg = 'cve;{}'.format(filepath) p.populate_set_out(msg, 'alertHandler') + + msg = 'infoleak:automatic-detection="cve";{}'.format(filepath) + p.populate_set_out(msg, 'Tags') #Send to duplicate p.populate_set_out(filepath, 'Duplicate') diff --git a/bin/Keys.py b/bin/Keys.py index 9f39cf50..7b1ec7dc 100755 --- a/bin/Keys.py +++ b/bin/Keys.py @@ -28,47 +28,76 @@ def search_key(paste): if '-----BEGIN PGP MESSAGE-----' in content: publisher.warning('{} has a PGP enc message'.format(paste.p_name)) + msg = 'infoleak:automatic-detection="pgp-message";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN CERTIFICATE-----' in content: publisher.warning('{} has a certificate message'.format(paste.p_name)) + + msg = 'infoleak:automatic-detection="certificate";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN RSA PRIVATE KEY-----' in content: publisher.warning('{} has a RSA private key message'.format(paste.p_name)) print('rsa private key message found') + + msg = 'infoleak:automatic-detection="rsa-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN PRIVATE KEY-----' in content: publisher.warning('{} has a private key message'.format(paste.p_name)) print('private key message found') + + msg = 'infoleak:automatic-detection="private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN ENCRYPTED PRIVATE KEY-----' in content: publisher.warning('{} has an encrypted private key message'.format(paste.p_name)) print('encrypted private key message found') + + msg = 'infoleak:automatic-detection="encrypted-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN OPENSSH PRIVATE KEY-----' in content: publisher.warning('{} has an openssh private key message'.format(paste.p_name)) print('openssh private key message found') + + msg = 'infoleak:automatic-detection="private-ssh-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN OpenVPN Static key V1-----' in content: publisher.warning('{} has an openssh private key message'.format(paste.p_name)) print('OpenVPN Static key message found') + + msg = 'infoleak:automatic-detection="vpn-static-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN DSA PRIVATE KEY-----' in content: publisher.warning('{} has a dsa private key message'.format(paste.p_name)) + + msg = 'infoleak:automatic-detection="dsa-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN EC PRIVATE KEY-----' in content: publisher.warning('{} has an ec private key message'.format(paste.p_name)) + + msg = 'infoleak:automatic-detection="ec-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if '-----BEGIN PGP PRIVATE KEY BLOCK-----' in content: publisher.warning('{} has a pgp private key block message'.format(paste.p_name)) + + msg = 'infoleak:automatic-detection="pgp-private-key";{}'.format(message) + p.populate_set_out(msg, 'Tags') find = True if find : diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index aca72e8e..ed05f676 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -160,6 +160,8 @@ function launching_scripts { sleep 0.1 screen -S "Script_AIL" -X screen -t "alertHandler" bash -c './alertHandler.py; read x' sleep 0.1 + screen -S "Script_AIL" -X screen -t "Tags" bash -c './Tags.py; read x' + sleep 0.1 screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x' } @@ -228,7 +230,7 @@ islogged=`screen -ls | egrep '[0-9]+.Logging_AIL' | cut -d. -f1` isqueued=`screen -ls | egrep '[0-9]+.Queue_AIL' | cut -d. -f1` isscripted=`screen -ls | egrep '[0-9]+.Script_AIL' | cut -d. -f1` -options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Killall" "Shutdown" "Update-config") +options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Killall" "Shutdown" "Update-config" "Update-thirdparty") menu() { echo "What do you want to Launch?:" @@ -328,6 +330,17 @@ for i in ${!options[@]}; do echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT fi ;; + Update-thirdparty) + echo -e "\t* Updating thirdparty..." + bash -c "(cd ../var/www && ./update_thirdparty.sh)" + exitStatus=$? + if [ $exitStatus -ge 1 ]; then + echo -e $RED"\t* Configuration not up-to-date"$DEFAULT + exit + else + echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT + fi + ;; esac fi done diff --git a/bin/Mail.py b/bin/Mail.py index abc112a6..c1d8cf70 100755 --- a/bin/Mail.py +++ b/bin/Mail.py @@ -76,6 +76,9 @@ if __name__ == "__main__": p.populate_set_out(filename, 'Duplicate') p.populate_set_out('mail;{}'.format(filename), 'alertHandler') + msg = 'infoleak:automatic-detection="mail";{}'.format(filename) + p.populate_set_out(msg, 'Tags') + else: publisher.info(to_print) #Send to ModuleStats diff --git a/bin/Onion.py b/bin/Onion.py index 77ed75fe..277f1c71 100755 --- a/bin/Onion.py +++ b/bin/Onion.py @@ -152,6 +152,9 @@ if __name__ == "__main__": for url in fetch(p, r_cache, urls, domains_list, path): publisher.info('{}Checked {};{}'.format(to_print, url, PST.p_path)) p.populate_set_out('onion;{}'.format(PST.p_path), 'alertHandler') + + msg = 'infoleak:automatic-detection="onion";{}'.format(PST.p_path) + p.populate_set_out(msg, 'Tags') else: publisher.info('{}Onion related;{}'.format(to_print, PST.p_path)) diff --git a/bin/Phone.py b/bin/Phone.py index e3f0f908..213db2b3 100755 --- a/bin/Phone.py +++ b/bin/Phone.py @@ -36,6 +36,10 @@ def search_phone(message): msg = 'phone;{}'.format(message) p.populate_set_out(msg, 'alertHandler') #Send to duplicate + + msg = 'infoleak:automatic-detection="phone-number";{}'.format(message) + p.populate_set_out(msg, 'Tags') + p.populate_set_out(message, 'Duplicate') stats = {} for phone_number in results: diff --git a/bin/SQLInjectionDetection.py b/bin/SQLInjectionDetection.py index 9e28de72..117f3dc0 100755 --- a/bin/SQLInjectionDetection.py +++ b/bin/SQLInjectionDetection.py @@ -82,6 +82,9 @@ def analyse(url, path): p.populate_set_out(path, 'Duplicate') #send to Browse_warning_paste p.populate_set_out('sqlinjection;{}'.format(path), 'alertHandler') + + msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path) + p.populate_set_out(msg, 'Tags') else: print("Potential SQL injection:") print(urllib.request.unquote(url)) diff --git a/bin/Tags.py b/bin/Tags.py new file mode 100755 index 00000000..f4939ec3 --- /dev/null +++ b/bin/Tags.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The Tags Module +================================ + +This module create tags. + +""" +import redis + +import time + +from pubsublogger import publisher +from Helper import Process +from packages import Paste + +if __name__ == '__main__': + + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'Tags' + + # Setup the I/O queues + p = Process(config_section) + + server = redis.StrictRedis( + host=p.config.get("ARDB_Tags", "host"), + port=p.config.get("ARDB_Tags", "port"), + db=p.config.get("ARDB_Tags", "db"), + decode_responses=True) + + server_metadata = redis.StrictRedis( + host=p.config.get("ARDB_Metadata", "host"), + port=p.config.get("ARDB_Metadata", "port"), + db=p.config.get("ARDB_Metadata", "db"), + decode_responses=True) + + # Sent to the logging a description of the module + publisher.info("Tags module started") + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + + if message is None: + publisher.debug("{} queue is empty, waiting 10s".format(config_section)) + time.sleep(10) + continue + + else: + tag, path = message.split(';') + # add the tag to the tags word_list + res = server.sadd('list_tags', tag) + if res == 1: + print("new tags added : {}".format(tag)) + # add the path to the tag set + res = server.sadd(tag, path) + if res == 1: + print("new paste: {}".format(path)) + print(" tagged: {}".format(tag)) + server_metadata.sadd('tag:'+path, tag) diff --git a/bin/packages/Paste.py b/bin/packages/Paste.py index ccaf3400..317743f4 100755 --- a/bin/packages/Paste.py +++ b/bin/packages/Paste.py @@ -340,7 +340,7 @@ class Paste(object): Save a new duplicate on others pastes """ for hash_type, path, percent, date in list_value: - to_add = [hash_type, self.p_path, percent, date] + to_add = (hash_type, self.p_path, percent, date) self.store_duplicate.sadd('dup:'+path,to_add) def _get_from_redis(self, r_serv): diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index b9e29506..975b7b2c 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -49,15 +49,15 @@ publish = Redis_CreditCards,Redis_Mail,Redis_Onion,Redis_Web,Redis_Credential,Re [CreditCards] subscribe = Redis_CreditCards -publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler +publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags [Mail] subscribe = Redis_Mail -publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler +publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags [Onion] subscribe = Redis_Onion -publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler +publish = Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler,Redis_Tags #publish = Redis_Global,Redis_ValidOnion,ZMQ_FetchedOnion,Redis_alertHandler [DumpValidOnion] @@ -72,7 +72,7 @@ subscribe = Redis_Url [SQLInjectionDetection] subscribe = Redis_Url -publish = Redis_alertHandler,Redis_Duplicate +publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags [ModuleStats] subscribe = Redis_ModuleStats @@ -80,9 +80,12 @@ subscribe = Redis_ModuleStats [alertHandler] subscribe = Redis_alertHandler +[Tags] +subscribe = Redis_Tags + #[send_to_queue] #subscribe = Redis_Cve -#publish = Redis_alertHandler +#publish = Redis_alertHandler,Redis_Tags [SentimentAnalysis] subscribe = Redis_Global @@ -92,28 +95,28 @@ subscribe = Redis_Global [Credential] subscribe = Redis_Credential -publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler +publish = Redis_Duplicate,Redis_ModuleStats,Redis_alertHandler,Redis_Tags [Cve] subscribe = Redis_Cve -publish = Redis_alertHandler,Redis_Duplicate +publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags [Phone] subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags [Keys] subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags [ApiKey] subscribe = Redis_ApiKey -publish = Redis_Duplicate,Redis_alertHandler +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags [Base64] subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags [Bitcoin] subscribe = Redis_Global -publish = Redis_Duplicate,Redis_alertHandler +publish = Redis_Duplicate,Redis_alertHandler,Redis_Tags diff --git a/doc/screenshots/galaxies_list.png b/doc/screenshots/galaxies_list.png new file mode 100644 index 00000000..31ae76d8 Binary files /dev/null and b/doc/screenshots/galaxies_list.png differ diff --git a/doc/screenshots/galaxy_tag_edit.png b/doc/screenshots/galaxy_tag_edit.png new file mode 100644 index 00000000..93806873 Binary files /dev/null and b/doc/screenshots/galaxy_tag_edit.png differ diff --git a/doc/screenshots/paste_tags_edit.png b/doc/screenshots/paste_tags_edit.png new file mode 100644 index 00000000..7127e43a Binary files /dev/null and b/doc/screenshots/paste_tags_edit.png differ diff --git a/doc/screenshots/tag_delete_confirm.png b/doc/screenshots/tag_delete_confirm.png new file mode 100644 index 00000000..88ad5197 Binary files /dev/null and b/doc/screenshots/tag_delete_confirm.png differ diff --git a/doc/screenshots/tags.png b/doc/screenshots/tags.png new file mode 100644 index 00000000..11ce1812 Binary files /dev/null and b/doc/screenshots/tags.png differ diff --git a/doc/screenshots/tags2.png b/doc/screenshots/tags2.png new file mode 100644 index 00000000..81ce4853 Binary files /dev/null and b/doc/screenshots/tags2.png differ diff --git a/doc/screenshots/tags_search.png b/doc/screenshots/tags_search.png new file mode 100644 index 00000000..892e15da Binary files /dev/null and b/doc/screenshots/tags_search.png differ diff --git a/installing_deps.sh b/installing_deps.sh index 0cc9b2f2..6bb73f6b 100755 --- a/installing_deps.sh +++ b/installing_deps.sh @@ -26,10 +26,11 @@ sudo apt-get install libev-dev libgmp-dev -y #Need for generate-data-flow graph sudo apt-get install graphviz -y -#needed for mathplotlib -sudo easy_install -U distribute +# install nosetests +sudo pip install nose -y + # ssdeep -sudo apt-get install libfuzzy-dev +sudo apt-get install libfuzzy-dev -y sudo apt-get install build-essential libffi-dev automake autoconf libtool -y # REDIS # @@ -70,10 +71,6 @@ if [ ! -f bin/packages/config.cfg ]; then cp bin/packages/config.cfg.sample bin/packages/config.cfg fi -pushd var/www/ -sudo ./update_thirdparty.sh -popd - if [ -z "$VIRTUAL_ENV" ]; then virtualenv -p python3 AILENV @@ -88,6 +85,10 @@ if [ -z "$VIRTUAL_ENV" ]; then fi +pushd var/www/ +./update_thirdparty.sh +popd + year1=20`date +%y` year2=20`date --date='-1 year' +%y` mkdir -p $AIL_HOME/{PASTES,Blooms,dumps} @@ -102,8 +103,6 @@ popd # Py tlsh pushd tlsh/py_ext -#python setup.py build -#python setup.py install python3 setup.py build python3 setup.py install @@ -112,8 +111,5 @@ HOME=$(pwd) python3 -m textblob.download_corpora python3 -m nltk.downloader vader_lexicon python3 -m nltk.downloader punkt -# install nosetests -sudo pip install nose - #Create the file all_module and update the graph in doc $AIL_HOME/doc/generate_modules_data_flow_graph.sh diff --git a/var/www/Flask_server.py b/var/www/Flask_server.py index 0be6854a..a03999ab 100755 --- a/var/www/Flask_server.py +++ b/var/www/Flask_server.py @@ -18,6 +18,8 @@ sys.path.append('./modules/') import Paste from Date import Date +from pytaxonomies import Taxonomies + # Import config import Flask_config @@ -113,6 +115,25 @@ def searchbox(): return render_template("searchbox.html") +# ========== INITIAL taxonomies ============ +r_serv_tags = redis.StrictRedis( + host=cfg.get("ARDB_Tags", "host"), + port=cfg.getint("ARDB_Tags", "port"), + db=cfg.getint("ARDB_Tags", "db"), + decode_responses=True) +# add default ail taxonomies +r_serv_tags.sadd('active_taxonomies', 'infoleak') +r_serv_tags.sadd('active_taxonomies', 'gdpr') +r_serv_tags.sadd('active_taxonomies', 'fpf') +# add default tags +taxonomies = Taxonomies() +for tag in taxonomies.get('infoleak').machinetags(): + r_serv_tags.sadd('active_tag_infoleak', tag) +for tag in taxonomies.get('gdpr').machinetags(): + r_serv_tags.sadd('active_tag_infoleak', tag) +for tag in taxonomies.get('fpf').machinetags(): + r_serv_tags.sadd('active_tag_infoleak', tag) + # ============ MAIN ============ if __name__ == "__main__": diff --git a/var/www/modules/Flask_config.py b/var/www/modules/Flask_config.py index 41745f21..26edccfa 100644 --- a/var/www/modules/Flask_config.py +++ b/var/www/modules/Flask_config.py @@ -82,3 +82,5 @@ max_preview_char = int(cfg.get("Flask", "max_preview_char")) # Maximum number of max_preview_modal = int(cfg.get("Flask", "max_preview_modal")) # Maximum number of character to display in the modal DiffMaxLineLength = int(cfg.get("Flask", "DiffMaxLineLength"))#Use to display the estimated percentage instead of a raw value + +bootstrap_label = ['primary', 'success', 'danger', 'warning', 'info'] diff --git a/var/www/modules/Tags/Flask_Tags.py b/var/www/modules/Tags/Flask_Tags.py new file mode 100644 index 00000000..a8dd4c8b --- /dev/null +++ b/var/www/modules/Tags/Flask_Tags.py @@ -0,0 +1,845 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +''' + Flask functions and routes for the trending modules page +''' +import redis +from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for + +import json +from datetime import datetime + +import Paste + +from pytaxonomies import Taxonomies +from pymispgalaxies import Galaxies, Clusters + +# ============ VARIABLES ============ +import Flask_config + +app = Flask_config.app +cfg = Flask_config.cfg +r_serv_tags = Flask_config.r_serv_tags +r_serv_metadata = Flask_config.r_serv_metadata +max_preview_char = Flask_config.max_preview_char +max_preview_modal = Flask_config.max_preview_modal +bootstrap_label = Flask_config.bootstrap_label + +Tags = Blueprint('Tags', __name__, template_folder='templates') + +galaxies = Galaxies() +clusters = Clusters(skip_duplicates=True) + +list_all_tags = {} +for name, c in clusters.items(): #galaxy name + tags + list_all_tags[name] = c + +list_galaxies = [] +for g in galaxies.values(): + list_galaxies.append(g.to_json()) + +list_clusters = [] +for c in clusters.values(): + list_clusters.append(c.to_json()) + +# tags numbers in galaxies +total_tags = {} +for name, tags in clusters.items(): #galaxie name + tags + total_tags[name] = len(tags) + +# ============ FUNCTIONS ============ +def one(): + return 1 + +def get_tags_with_synonyms(tag): + str_synonyms = ' - synonyms: ' + synonyms = r_serv_tags.smembers('synonym_tag_' + tag) + # synonyms to display + for synonym in synonyms: + str_synonyms = str_synonyms + synonym + ', ' + # add real tag + if str_synonyms != ' - synonyms: ': + return {'name':tag + str_synonyms,'id':tag} + else: + return {'name':tag,'id':tag} + +# ============= ROUTES ============== + +@Tags.route("/Tags/", methods=['GET']) +def Tags_page(): + return render_template("Tags.html") + +@Tags.route("/Tags/get_all_tags") +def get_all_tags(): + + all_tags = r_serv_tags.smembers('list_tags') + + list_tags = [] + for tag in all_tags: + t = tag.split(':')[0] + # add synonym + str_synonyms = ' - synonyms: ' + if t == 'misp-galaxy': + synonyms = r_serv_tags.smembers('synonym_tag_' + tag) + for synonym in synonyms: + str_synonyms = str_synonyms + synonym + ', ' + # add real tag + if str_synonyms != ' - synonyms: ': + list_tags.append({'name':tag + str_synonyms,'id':tag}) + else: + list_tags.append({'name':tag,'id':tag}) + + return jsonify(list_tags) + +@Tags.route("/Tags/get_all_tags_taxonomies") +def get_all_tags_taxonomies(): + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + active_taxonomie = r_serv_tags.smembers('active_taxonomies') + + list_tags = [] + for taxonomie in active_taxonomie: + #l_tags = taxonomies.get(taxonomie).machinetags() + l_tags = r_serv_tags.smembers('active_tag_' + taxonomie) + for tag in l_tags: + list_tags.append( tag ) + + return jsonify(list_tags) + +@Tags.route("/Tags/get_all_tags_galaxies") +def get_all_tags_galaxy(): + + active_galaxies = r_serv_tags.smembers('active_galaxies') + + list_tags = [] + for galaxy in active_galaxies: + l_tags = r_serv_tags.smembers('active_tag_galaxies_' + galaxy) + for tag in l_tags: + list_tags.append(get_tags_with_synonyms(tag)) + + return jsonify(list_tags) + +@Tags.route("/Tags/get_tags_taxonomie") +def get_tags_taxonomie(): + + taxonomie = request.args.get('taxonomie') + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + active_taxonomie = r_serv_tags.smembers('active_taxonomies') + + #verify input + if taxonomie in list_taxonomies: + if taxonomie in active_taxonomie: + + list_tags = [] + l_tags = r_serv_tags.smembers('active_tag_' + taxonomie) + for tag in l_tags: + list_tags.append( tag ) + + return jsonify(list_tags) + + else: + return 'this taxonomie is disable' + else: + return 'INCORRECT INPUT' + +@Tags.route("/Tags/get_tags_galaxy") +def get_tags_galaxy(): + + galaxy = request.args.get('galaxy') + + active_galaxies = r_serv_tags.smembers('active_galaxies') + + #verify input + if galaxy in active_galaxies: + + list_tags = [] + l_tags = r_serv_tags.smembers('active_tag_galaxies_' + galaxy) + for tag in l_tags: + list_tags.append(get_tags_with_synonyms(tag)) + + return jsonify(list_tags) + + else: + return 'this galaxy is disable' + + +@Tags.route("/Tags/get_tagged_paste") +def get_tagged_paste(): + + tags = request.args.get('ltags') + + list_tags = tags.split(',') + list_tag = [] + for tag in list_tags: + list_tag.append(tag.replace('"','\"')) + + # TODO verify input + + if(type(list_tags) is list): + # no tag + if list_tags is False: + print('empty') + # 1 tag + elif len(list_tags) < 2: + tagged_pastes = r_serv_tags.smembers(list_tags[0]) + + # 2 tags or more + else: + tagged_pastes = r_serv_tags.sinter(list_tags[0], *list_tags[1:]) + + else : + return 'INCORRECT INPUT' + + #TODO FIXME + currentSelectYear = int(datetime.now().year) + + all_content = [] + paste_date = [] + paste_linenum = [] + all_path = [] + allPastes = list(tagged_pastes) + paste_tags = [] + + for path in allPastes[0:50]: ######################moduleName + all_path.append(path) + paste = Paste.Paste(path) + content = paste.get_p_content() + content_range = max_preview_char if len(content)>max_preview_char else len(content)-1 + all_content.append(content[0:content_range].replace("\"", "\'").replace("\r", " ").replace("\n", " ")) + curr_date = str(paste._get_p_date()) + curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:] + paste_date.append(curr_date) + paste_linenum.append(paste.get_lines_info()[0]) + p_tags = r_serv_metadata.smembers('tag:'+path) + complete_tags = [] + l_tags = [] + for tag in p_tags: + complete_tag = tag + + tag = tag.split('=') + if len(tag) > 1: + if tag[1] != '': + tag = tag[1][1:-1] + # no value + else: + tag = tag[0][1:-1] + # use for custom tags + else: + tag = tag[0] + + l_tags.append( (tag,complete_tag) ) + + paste_tags.append(l_tags) + + if len(allPastes) > 10: + finished = False + else: + finished = True + + return render_template("tagged.html", + year=currentSelectYear, + all_path=all_path, + tags=tags, + list_tag = list_tag, + paste_tags=paste_tags, + bootstrap_label=bootstrap_label, + content=all_content, + paste_date=paste_date, + paste_linenum=paste_linenum, + char_to_display=max_preview_modal, + finished=finished) + + +@Tags.route("/Tags/remove_tag") +def remove_tag(): + + #TODO verify input + path = request.args.get('paste') + tag = request.args.get('tag') + + #remove tag + r_serv_metadata.srem('tag:'+path, tag) + r_serv_tags.srem(tag, path) + + if r_serv_tags.scard(tag) == 0: + r_serv_tags.srem('list_tags', tag) + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) + +@Tags.route("/Tags/confirm_tag") +def confirm_tag(): + + #TODO verify input + path = request.args.get('paste') + tag = request.args.get('tag') + + if(tag[9:28] == 'automatic-detection'): + + #remove automatic tag + r_serv_metadata.srem('tag:'+path, tag) + r_serv_tags.srem(tag, path) + + tag = tag.replace('automatic-detection','analyst-detection', 1) + #add analyst tag + r_serv_metadata.sadd('tag:'+path, tag) + r_serv_tags.sadd(tag, path) + #add new tag in list of all used tags + r_serv_tags.sadd('list_tags', tag) + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) + + return 'incompatible tag' + +@Tags.route("/Tags/addTags") +def addTags(): + + tags = request.args.get('tags') + tagsgalaxies = request.args.get('tagsgalaxies') + path = request.args.get('path') + + list_tag = tags.split(',') + list_tag_galaxies = tagsgalaxies.split(',') + + taxonomies = Taxonomies() + active_taxonomies = r_serv_tags.smembers('active_taxonomies') + + active_galaxies = r_serv_tags.smembers('active_galaxies') + + if not path: + return 'INCORRECT INPUT0' + + if list_tag != ['']: + for tag in list_tag: + # verify input + tax = tag.split(':')[0] + if tax in active_taxonomies: + if tag in r_serv_tags.smembers('active_tag_' + tax): + + #add tag + r_serv_metadata.sadd('tag:'+path, tag) + r_serv_tags.sadd(tag, path) + #add new tag in list of all used tags + r_serv_tags.sadd('list_tags', tag) + + else: + return 'INCORRECT INPUT1' + else: + return 'INCORRECT INPUT2' + + if list_tag_galaxies != ['']: + for tag in list_tag_galaxies: + # verify input + gal = tag.split(':')[1] + gal = gal.split('=')[0] + + if gal in active_galaxies: + if tag in r_serv_tags.smembers('active_tag_galaxies_' + gal): + + #add tag + r_serv_metadata.sadd('tag:'+path, tag) + r_serv_tags.sadd(tag, path) + #add new tag in list of all used tags + r_serv_tags.sadd('list_tags', tag) + + else: + return 'INCORRECT INPUT3' + else: + return 'INCORRECT INPUT4' + + return redirect(url_for('showsavedpastes.showsavedpaste', paste=path)) + + +@Tags.route("/Tags/taxonomies") +def taxonomies(): + + active_taxonomies = r_serv_tags.smembers('active_taxonomies') + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + id = [] + name = [] + description = [] + version = [] + enabled = [] + n_tags = [] + + for taxonomie in list_taxonomies: + id.append(taxonomie) + name.append(taxonomies.get(taxonomie).name) + description.append(taxonomies.get(taxonomie).description) + version.append(taxonomies.get(taxonomie).version) + if taxonomie in active_taxonomies: + enabled.append(True) + else: + enabled.append(False) + + n = str(r_serv_tags.scard('active_tag_' + taxonomie)) + n_tags.append(n + '/' + str(len(taxonomies.get(taxonomie).machinetags())) ) + + return render_template("taxonomies.html", + id=id, + all_name = name, + description = description, + version = version, + enabled = enabled, + n_tags=n_tags) + +@Tags.route("/Tags/edit_taxonomie") +def edit_taxonomie(): + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + id = request.args.get('taxonomie') + + #verify input + if id in list(taxonomies.keys()): + active_tag = r_serv_tags.smembers('active_tag_' + id) + list_tag = taxonomies.get(id).machinetags() + list_tag_desc = taxonomies.get(id).machinetags_expanded() + + active_taxonomies = r_serv_tags.smembers('active_taxonomies') + if id in active_taxonomies: + active = True + else: + active = False + + n = str(r_serv_tags.scard('active_tag_' + id)) + badge = n + '/' + str(len(taxonomies.get(id).machinetags())) + + name = taxonomies.get(id).name + description = taxonomies.get(id).description + version = taxonomies.get(id).version + + status = [] + for tag in list_tag: + if tag in active_tag: + status.append(True) + else: + status.append(False) + + return render_template("edit_taxonomie.html", + id=id, + name=name, + badge = badge, + description = description, + version = version, + active=active, + all_tags = list_tag, + list_tag_desc=list_tag_desc, + status = status) + + else: + return 'INVALID TAXONOMIE' + +@Tags.route("/Tags/disable_taxonomie") +def disable_taxonomie(): + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + id = request.args.get('taxonomie') + + if id in list_taxonomies: + r_serv_tags.srem('active_taxonomies', id) + for tag in taxonomies.get(id).machinetags(): + r_serv_tags.srem('active_tag_' + id, tag) + + return redirect(url_for('Tags.taxonomies')) + + else: + return "INCORRECT INPUT" + + + +@Tags.route("/Tags/active_taxonomie") +def active_taxonomie(): + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + id = request.args.get('taxonomie') + + # verify input + if id in list_taxonomies: + r_serv_tags.sadd('active_taxonomies', id) + for tag in taxonomies.get(id).machinetags(): + r_serv_tags.sadd('active_tag_' + id, tag) + + return redirect(url_for('Tags.taxonomies')) + + else: + return "INCORRECT INPUT" + +@Tags.route("/Tags/edit_taxonomie_tag") +def edit_taxonomie_tag(): + + taxonomies = Taxonomies() + list_taxonomies = list(taxonomies.keys()) + + arg1 = request.args.getlist('tag_enabled') + arg2 = request.args.getlist('tag_disabled') + + id = request.args.get('taxonomie') + + #verify input + if id in list_taxonomies: + list_tag = taxonomies.get(id).machinetags() + + #check tags validity + if ( all(elem in list_tag for elem in arg1) or (len(arg1) == 0) ) and ( all(elem in list_tag for elem in arg2) or (len(arg2) == 0) ): + + active_tag = r_serv_tags.smembers('active_tag_' + id) + + diff = list(set(arg1) ^ set(list_tag)) + + #remove tags + for tag in diff: + r_serv_tags.srem('active_tag_' + id, tag) + + #all tags unchecked + if len(arg1) == 0 and len(arg2) == 0: + r_serv_tags.srem('active_taxonomies', id) + + #add new tags + for tag in arg2: + r_serv_tags.sadd('active_taxonomies', id) + r_serv_tags.sadd('active_tag_' + id, tag) + + return redirect(url_for('Tags.taxonomies')) + else: + return "INCORRECT INPUT" + + else: + return "INCORRECT INPUT" + +@Tags.route("/Tags/galaxies") +def galaxies(): + + active_galaxies = r_serv_tags.smembers('active_galaxies') + + name = [] + icon = [] + version = [] + all_type = [] + namespace = [] + description = [] + enabled = [] + n_tags = [] + + for galaxie_json in list_galaxies: + + galaxie = json.loads(galaxie_json) + + name.append(galaxie['name']) + icon.append(galaxie['icon']) + version.append(galaxie['version']) + type = galaxie['type'] + all_type.append(type) + namespace.append(galaxie['namespace']) + description.append(galaxie['description']) + + + if type in active_galaxies: + enabled.append(True) + else: + enabled.append(False) + + n = str(r_serv_tags.scard('active_tag_galaxies_' + type)) + n_tags.append(n + '/' + str(total_tags[type]) ) + + return render_template("galaxies.html", + name=name, + icon = icon, + version = version, + description = description, + namespace = namespace, + all_type = all_type, + enabled = enabled, + n_tags=n_tags) + + +@Tags.route("/Tags/edit_galaxy") +def edit_galaxy(): + + id = request.args.get('galaxy') + + for clusters_json in list_clusters: + + #get clusters + cluster = json.loads(clusters_json) + + if cluster['type'] == id: + + type = id + active_tag = r_serv_tags.smembers('active_tag_galaxies_' + type) + + n = str(r_serv_tags.scard('active_tag_galaxies_' + type)) + badge = n + '/' + str(total_tags[type]) + + name = cluster['name'] + description = cluster['description'] + version = cluster['version'] + source = cluster['source'] + + val = cluster['values'] + + tags = [] + for data in val: + try: + meta = data['meta'] + except KeyError: + meta = [] + tag_name = data['value'] + tag_name = 'misp-galaxy:{}="{}"'.format(type, tag_name) + try: + tag_description = data['description'] + except KeyError: + tag_description = '' + + tags.append( (tag_name, tag_description, meta) ) + + status = [] + for tag in tags: + if tag[0] in active_tag: + status.append(True) + else: + status.append(False) + + active_galaxies = r_serv_tags.smembers('active_galaxies') + if id in active_galaxies: + active = True + else: + active = False + + return render_template("edit_galaxy.html", + id = type, + name = name, + badge = badge, + description = description, + version = version, + active = active, + tags = tags, + status = status) + + + return 'INVALID GALAXY' + + +@Tags.route("/Tags/active_galaxy") +def active_galaxy(): + + id = request.args.get('galaxy') + + # verify input + try: + l_tags = list_all_tags[id] + except KeyError: + return "INCORRECT INPUT" + + r_serv_tags.sadd('active_galaxies', id) + for tag in l_tags: + r_serv_tags.sadd('active_tag_galaxies_' + id, 'misp-galaxy:{}="{}"'.format(id, tag)) + + #save synonyms + for clusters_json in list_clusters: + + #get clusters + cluster = json.loads(clusters_json) + + if cluster['type'] == id: + + val = cluster['values'] + + tags = [] + for data in val: + try: + meta = data['meta'] + synonyms = meta['synonyms'] + tag_name = data['value'] + tag_name = 'misp-galaxy:{}="{}"'.format(id, tag_name) + #save synonyms + for synonym in synonyms: + r_serv_tags.sadd('synonym_tag_' + tag_name, synonym) + + except KeyError: + pass + + break + + return redirect(url_for('Tags.galaxies')) + + +@Tags.route("/Tags/disable_galaxy") +def disable_galaxy(): + + id = request.args.get('galaxy') + + # verify input + try: + l_tags = list_all_tags[id] + except KeyError: + return "INCORRECT INPUT" + + r_serv_tags.srem('active_galaxies', id) + for tag in l_tags: + tag_name = 'misp-galaxy:{}="{}"'.format(id, tag) + r_serv_tags.srem('active_tag_galaxies_' + id, tag_name) + r_serv_tags.delete('synonym_tag_' + tag_name) + + return redirect(url_for('Tags.galaxies')) + + +@Tags.route("/Tags/edit_galaxy_tag") +def edit_galaxy_tag(): + + arg1 = request.args.getlist('tag_enabled') + arg2 = request.args.getlist('tag_disabled') + + id = request.args.get('galaxy') + + #verify input + try: + l_tags = list_all_tags[id] + except KeyError: + return "INCORRECT INPUT" + + #get full tags + list_tag = [] + for tag in l_tags: + list_tag.append('misp-galaxy:{}="{}"'.format(id, tag)) + + + #check tags validity + if ( all(elem in list_tag for elem in arg1) or (len(arg1) == 0) ) and ( all(elem in list_tag for elem in arg2) or (len(arg2) == 0) ): + + active_tag = r_serv_tags.smembers('active_tag_galaxies_' + id) + + diff = list(set(arg1) ^ set(list_tag)) + + #remove tags + for tag in diff: + r_serv_tags.srem('active_tag_galaxies_' + id, tag) + r_serv_tags.delete('synonym_tag_' + tag) + + #all tags unchecked + if len(arg1) == 0 and len(arg2) == 0: + r_serv_tags.srem('active_galaxies', id) + + #add new tags + for tag in arg2: + r_serv_tags.sadd('active_galaxies', id) + r_serv_tags.sadd('active_tag_galaxies_' + id, tag) + + #get tags synonyms + for clusters_json in list_clusters: + + #get clusters + cluster = json.loads(clusters_json) + + if cluster['type'] == id: + + val = cluster['values'] + + tags = [] + for data in val: + try: + meta = data['meta'] + synonyms = meta['synonyms'] + tag_name = data['value'] + tag_name = 'misp-galaxy:{}="{}"'.format(id, tag_name) + if tag_name in arg2: + #save synonyms + for synonym in synonyms: + r_serv_tags.sadd('synonym_tag_' + tag_name, synonym) + + except KeyError: + pass + break + + return redirect(url_for('Tags.galaxies')) + + else: + return "INCORRECT INPUT" + +@Tags.route("/Tags/tag_galaxy_info") +def tag_galaxy_info(): + + galaxy = request.args.get('galaxy') + tag = request.args.get('tag') + + full_tag = tag + title = tag.split(':')[1] + tag = tag.split('=')[1] + tag = tag[1:-1] + + #get clusters + for clusters_json in list_clusters: + cluster = json.loads(clusters_json) + + if cluster['type'] == galaxy: + val = cluster['values'] + source = cluster['source'] + + for data in val: + if tag == data['value']: + try: + description = data['description'] + except KeyError: + description = '' + if r_serv_tags.sismember('active_tag_galaxies_' + galaxy, full_tag): + active = True + else: + active = False + + synonyms = [] + metadata = [] + list_metadata = [] + try: + meta = data['meta'] + for key in meta: + if key != 'synonyms': + if type(meta[key]) is list: + for item in meta[key]: + list_metadata.append(key + ' : ' + item) + else: + list_metadata.append(key + ' : ' + meta[key]) + try: + synonyms = meta['synonyms'] + bool_synonyms = True + except KeyError: + synonyms = [] + bool_synonyms = False + except KeyError: + pass + + if synonyms: + bool_synonyms = True + else: + bool_synonyms = False + if list_metadata: + metadata = True + else: + metadata = False + + return render_template("tag_galaxy_info.html", + title = title, + description = description, + source = source, + active = active, + synonyms = synonyms, + bool_synonyms = bool_synonyms, + metadata = metadata, + list_metadata = list_metadata) + + return 'INVALID INPUT' + +# ========= REGISTRATION ========= +app.register_blueprint(Tags) diff --git a/var/www/modules/Tags/templates/Tags.html b/var/www/modules/Tags/templates/Tags.html new file mode 100644 index 00000000..f3cf1a44 --- /dev/null +++ b/var/www/modules/Tags/templates/Tags.html @@ -0,0 +1,103 @@ + + + +
+ + + +Name | +Description | +Namespace | +Version | +Enabled | +Active Tags | +
---|---|---|---|---|---|
+ {{ name[loop.index0] }}
+
+
+
+ |
+ {{ description[loop.index0] }} | +{{ namespace[loop.index0] }} | +{{ version[loop.index0] }} | +
+ {% if enabled[loop.index0] %}
+
+ {% endif %}
+ {% if not enabled[loop.index0] %}
+
+
+ {% endif %}
+ |
+
+ + Active Tags {{ n_tags[loop.index0] }} + | + +
# | +Path | +Date | +# of lines | +Action | +
---|---|---|---|---|
{{ loop.index0 }} | +{{ path }}
+
+ {% for tag in paste_tags[loop.index0] %}
+
+ {{ tag[0] }}
+
+ {% endfor %}
+
+ |
+ {{ paste_date[loop.index0] }} | +{{ paste_linenum[loop.index0] }} | +
|
+
Name | +Description | +Version | +Enabled | +Active Tags | +
---|---|---|---|---|
{{ name }} | +{{ description[loop.index0] }} | +{{ version[loop.index0] }} | +
+ {% if enabled[loop.index0] %}
+
+ {% endif %}
+ {% if not enabled[loop.index0] %}
+
+
+ {% endif %}
+ |
+
+ + Active Tags {{ n_tags[loop.index0] }} + | + +
" - ] ).draw( false ); - $("#myTable_"+moduleName).attr('data-numElem', curr_numElem+1); + var tag = "" + for(j=0; j
" + ] ).draw( false ); + $("#myTable_"+moduleName).attr('data-numElem', curr_numElem+1); } } $("#load_more_json_button1").removeAttr('disabled'); @@ -122,7 +137,6 @@ $("#myTable_"+moduleName).attr('data-numElem', "{{ all_path|length }}"); $(document).ready(function(){ $('[data-toggle="tooltip"]').tooltip(); $("[data-toggle='modal']").off('click.openmodal').on("click.openmodal", function (event) { - //get_html_and_update_modal(event); get_html_and_update_modal(event, $(this)); }); diff --git a/var/www/modules/search/Flask_search.py b/var/www/modules/search/Flask_search.py index afce2452..818e3e61 100644 --- a/var/www/modules/search/Flask_search.py +++ b/var/www/modules/search/Flask_search.py @@ -22,8 +22,10 @@ import Flask_config app = Flask_config.app cfg = Flask_config.cfg r_serv_pasteName = Flask_config.r_serv_pasteName +r_serv_metadata = Flask_config.r_serv_metadata max_preview_char = Flask_config.max_preview_char max_preview_modal = Flask_config.max_preview_modal +bootstrap_label = Flask_config.bootstrap_label baseindexpath = os.path.join(os.environ['AIL_HOME'], cfg.get("Indexer", "path")) @@ -95,6 +97,7 @@ def search(): c = [] #preview of the paste content paste_date = [] paste_size = [] + paste_tags = [] index_name = request.form['index_name'] num_elem_to_get = 50 @@ -119,13 +122,15 @@ def search(): # Search full line schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT) + print(selected_index) ix = index.open_dir(selected_index) with ix.searcher() as searcher: query = QueryParser("content", ix.schema).parse(" ".join(q)) results = searcher.search_page(query, 1, pagelen=num_elem_to_get) for x in results: r.append(x.items()[0][1]) - paste = Paste.Paste(x.items()[0][1]) + path = x.items()[0][1] + paste = Paste.Paste(path) content = paste.get_p_content() content_range = max_preview_char if len(content)>max_preview_char else len(content)-1 c.append(content[0:content_range]) @@ -133,6 +138,24 @@ def search(): curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:] paste_date.append(curr_date) paste_size.append(paste._get_p_size()) + p_tags = r_serv_metadata.smembers('tag:'+path) + l_tags = [] + for tag in p_tags: + complete_tag = tag + tag = tag.split('=') + if len(tag) > 1: + if tag[1] != '': + tag = tag[1][1:-1] + # no value + else: + tag = tag[0][1:-1] + # use for custom tags + else: + tag = tag[0] + + l_tags.append( (tag, complete_tag) ) + + paste_tags.append(l_tags) results = searcher.search(query) num_res = len(results) @@ -142,6 +165,8 @@ def search(): query=request.form['query'], paste_date=paste_date, paste_size=paste_size, char_to_display=max_preview_modal, num_res=num_res, index_min=index_min, index_max=index_max, + bootstrap_label=bootstrap_label, + paste_tags=paste_tags, index_list=get_index_list(selected_index) ) @@ -165,6 +190,7 @@ def get_more_search_result(): preview_array = [] date_array = [] size_array = [] + list_tags = [] schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT) @@ -173,8 +199,9 @@ def get_more_search_result(): query = QueryParser("content", ix.schema).parse(" ".join(q)) results = searcher.search_page(query, page_offset, num_elem_to_get) for x in results: - path_array.append(x.items()[0][1]) - paste = Paste.Paste(x.items()[0][1]) + path = x.items()[0][1] + path_array.append(path) + paste = Paste.Paste(path) content = paste.get_p_content() content_range = max_preview_char if len(content)>max_preview_char else len(content)-1 preview_array.append(content[0:content_range]) @@ -182,11 +209,30 @@ def get_more_search_result(): curr_date = curr_date[0:4]+'/'+curr_date[4:6]+'/'+curr_date[6:] date_array.append(curr_date) size_array.append(paste._get_p_size()) + p_tags = r_serv_metadata.smembers('tag:'+path) + l_tags = [] + for tag in p_tags: + tag = tag.split('=') + if len(tag) > 1: + if tag[1] != '': + tag = tag[1][1:-1] + # no value + else: + tag = tag[0][1:-1] + # use for custom tags + else: + tag = tag[0] + + l_tags.append(tag) + list_tags.append(l_tags) + to_return = {} to_return["path_array"] = path_array to_return["preview_array"] = preview_array to_return["date_array"] = date_array to_return["size_array"] = size_array + to_return["list_tags"] = list_tags + to_return["bootstrap_label"] = bootstrap_label if len(path_array) < num_elem_to_get: #pagelength to_return["moreData"] = False else: diff --git a/var/www/modules/search/templates/search.html b/var/www/modules/search/templates/search.html index 43895a9f..bed826dd 100644 --- a/var/www/modules/search/templates/search.html +++ b/var/www/modules/search/templates/search.html @@ -75,7 +75,7 @@