diff --git a/README.md b/README.md index 6325283e..0c500efd 100644 --- a/README.md +++ b/README.md @@ -61,6 +61,11 @@ linux based distributions, you can replace it with [installing_deps_archlinux.sh There is also a [Travis file](.travis.yml) used for automating the installation that can be used to build and install AIL on other systems. +Installation Notes +------------ + +In order to use AIL combined with **ZFS** or **unprivileged LXC** it's necessary to disable Direct I/O in `$AIL_HOME/configs/6382.conf` by changing the value of the directive `use_direct_io_for_flush_and_compaction` to `false`. + Python 3 Upgrade ------------ diff --git a/ansible/README.md b/ansible/README.md index 36fc3fa7..79ac2b3f 100644 --- a/ansible/README.md +++ b/ansible/README.md @@ -1,3 +1,11 @@ +:warning: +Not maintained at the moment. +If you are interested to get this running, please: + +Fork -> Branch -> PR + +In case of questions please Join our [GITTER](https://gitter.im/SteveClement/AIL-framework) chat. + # AIL-framework-ansible This Ansible role can be used to deploy the AIL-Framework on a host. diff --git a/bin/Curve.py b/bin/Curve.py index 07f690de..8e228039 100755 --- a/bin/Curve.py +++ b/bin/Curve.py @@ -48,6 +48,9 @@ top_termFreq_setName_week = ["TopTermFreq_set_week", 7] top_termFreq_setName_month = ["TopTermFreq_set_month", 31] top_termFreq_set_array = [top_termFreq_setName_day,top_termFreq_setName_week, top_termFreq_setName_month] +# create direct link in mail +full_paste_url = "/showsavedpaste/?paste=" + def check_if_tracked_term(term, path): if term in server_term.smembers(TrackedTermsSet_Name): #add_paste to tracked_word_set @@ -59,9 +62,14 @@ def check_if_tracked_term(term, path): # Send a notification only when the member is in the set if term in server_term.smembers(TrackedTermsNotificationEnabled_Name): + # create mail body + mail_body = ("AIL Framework,\n" + "New occurrence for term: " + term + "\n" + ''+full_paste_url + path) + # Send to every associated email adress for email in server_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + term): - sendEmailNotification(email, term) + sendEmailNotification(email, 'Term', mail_body) def getValueOverRange(word, startDate, num_day): @@ -96,6 +104,9 @@ if __name__ == "__main__": # FUNCTIONS # publisher.info("Script Curve started") + # create direct link in mail + full_paste_url = p.config.get("Notifications", "ail_domain") + full_paste_url + # FILE CURVE SECTION # csv_path = os.path.join(os.environ['AIL_HOME'], p.config.get("Directories", "wordtrending_csv")) diff --git a/bin/LAUNCH.sh b/bin/LAUNCH.sh index 5621287a..3acd7bb2 100755 --- a/bin/LAUNCH.sh +++ b/bin/LAUNCH.sh @@ -12,10 +12,21 @@ CYAN="\\033[1;36m" [ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1; [ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1; [ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1; +[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1; export PATH=$AIL_HOME:$PATH export PATH=$AIL_REDIS:$PATH export PATH=$AIL_ARDB:$PATH +export PATH=$AIL_BIN:$PATH +export PATH=$AIL_FLASK:$PATH + +isredis=`screen -ls | egrep '[0-9]+.Redis_AIL' | cut -d. -f1` +isardb=`screen -ls | egrep '[0-9]+.ARDB_AIL' | cut -d. -f1` +islogged=`screen -ls | egrep '[0-9]+.Logging_AIL' | cut -d. -f1` +isqueued=`screen -ls | egrep '[0-9]+.Queue_AIL' | cut -d. -f1` +isscripted=`screen -ls | egrep '[0-9]+.Script_AIL' | cut -d. -f1` +isflasked=`screen -ls | egrep '[0-9]+.Flask_AIL' | cut -d. -f1` function helptext { echo -e $YELLOW" @@ -35,15 +46,18 @@ function helptext { - All the ZMQ queuing modules. - All the ZMQ processing modules. - All Redis in memory servers. - - All Level-DB on disk servers. + - All ARDB on disk servers. "$DEFAULT" (Inside screen Daemons) - "$RED" - But first of all you'll need to edit few path where you installed - your redis & ardb servers. "$DEFAULT" Usage: ----- + LAUNCH.sh + [-l | --launchAuto] + [-k | --killAll] + [-c | --configUpdate] + [-t | --thirdpartyUpdate] + [-h | --help] " } @@ -68,16 +82,16 @@ function launching_ardb { echo -e $GREEN"\t* Launching ARDB servers"$DEFAULT sleep 0.1 - screen -S "ARDB_AIL" -X screen -t "6382" bash -c 'ardb-server '$conf_dir'6382.conf ; read x' + screen -S "ARDB_AIL" -X screen -t "6382" bash -c 'cd '${AIL_HOME}'; ardb-server '$conf_dir'6382.conf ; read x' } function launching_logs { screen -dmS "Logging_AIL" sleep 0.1 echo -e $GREEN"\t* Launching logging process"$DEFAULT - screen -S "Logging_AIL" -X screen -t "LogQueue" bash -c 'log_subscriber -p 6380 -c Queuing -l ../logs/; read x' + screen -S "Logging_AIL" -X screen -t "LogQueue" bash -c 'cd '${AIL_BIN}'; log_subscriber -p 6380 -c Queuing -l ../logs/; read x' sleep 0.1 - screen -S "Logging_AIL" -X screen -t "LogScript" bash -c 'log_subscriber -p 6380 -c Script -l ../logs/; read x' + screen -S "Logging_AIL" -X screen -t "LogScript" bash -c 'cd '${AIL_BIN}'; log_subscriber -p 6380 -c Script -l ../logs/; read x' } function launching_queues { @@ -85,88 +99,100 @@ function launching_queues { sleep 0.1 echo -e $GREEN"\t* Launching all the queues"$DEFAULT - screen -S "Queue_AIL" -X screen -t "Queues" bash -c 'python3 launch_queues.py; read x' + screen -S "Queue_AIL" -X screen -t "Queues" bash -c 'cd '${AIL_BIN}'; python3 launch_queues.py; read x' } -function launching_scripts { +function checking_configuration { + bin_dir=${AIL_HOME}/bin echo -e "\t* Checking configuration" - bash -c "python3 Update-conf.py" + if [ "$1" == "automatic" ]; then + bash -c "python3 $bin_dir/Update-conf.py True" + else + bash -c "python3 $bin_dir/Update-conf.py False" + fi + exitStatus=$? if [ $exitStatus -ge 1 ]; then echo -e $RED"\t* Configuration not up-to-date"$DEFAULT exit fi echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT +} + +function launching_scripts { + checking_configuration $1; screen -dmS "Script_AIL" sleep 0.1 echo -e $GREEN"\t* Launching ZMQ scripts"$DEFAULT - screen -S "Script_AIL" -X screen -t "ModuleInformation" bash -c './ModulesInformationV2.py -k 0 -c 1; read x' + screen -S "Script_AIL" -X screen -t "ModuleInformation" bash -c 'cd '${AIL_BIN}'; ./ModulesInformationV2.py -k 0 -c 1; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Mixer" bash -c './Mixer.py; read x' + screen -S "Script_AIL" -X screen -t "Mixer" bash -c 'cd '${AIL_BIN}'; ./Mixer.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Global" bash -c './Global.py; read x' + screen -S "Script_AIL" -X screen -t "Global" bash -c 'cd '${AIL_BIN}'; ./Global.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Duplicates" bash -c './Duplicates.py; read x' + screen -S "Script_AIL" -X screen -t "Duplicates" bash -c 'cd '${AIL_BIN}'; ./Duplicates.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Lines" bash -c './Lines.py; read x' + screen -S "Script_AIL" -X screen -t "Lines" bash -c 'cd '${AIL_BIN}'; ./Lines.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c './DomClassifier.py; read x' + screen -S "Script_AIL" -X screen -t "DomClassifier" bash -c 'cd '${AIL_BIN}'; ./DomClassifier.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Categ" bash -c './Categ.py; read x' + screen -S "Script_AIL" -X screen -t "Categ" bash -c 'cd '${AIL_BIN}'; ./Categ.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Tokenize" bash -c './Tokenize.py; read x' + screen -S "Script_AIL" -X screen -t "Tokenize" bash -c 'cd '${AIL_BIN}'; ./Tokenize.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "CreditCards" bash -c './CreditCards.py; read x' + screen -S "Script_AIL" -X screen -t "CreditCards" bash -c 'cd '${AIL_BIN}'; ./CreditCards.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Onion" bash -c './Onion.py; read x' + screen -S "Script_AIL" -X screen -t "Onion" bash -c 'cd '${AIL_BIN}'; ./Onion.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Mail" bash -c './Mail.py; read x' + screen -S "Script_AIL" -X screen -t "Mail" bash -c 'cd '${AIL_BIN}'; ./Mail.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "ApiKey" bash -c './ApiKey.py; read x' + screen -S "Script_AIL" -X screen -t "ApiKey" bash -c 'cd '${AIL_BIN}'; ./ApiKey.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Web" bash -c './Web.py; read x' + screen -S "Script_AIL" -X screen -t "Web" bash -c 'cd '${AIL_BIN}'; ./Web.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Credential" bash -c './Credential.py; read x' + screen -S "Script_AIL" -X screen -t "Credential" bash -c 'cd '${AIL_BIN}'; ./Credential.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Curve" bash -c './Curve.py; read x' + screen -S "Script_AIL" -X screen -t "Curve" bash -c 'cd '${AIL_BIN}'; ./Curve.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "CurveManageTopSets" bash -c './CurveManageTopSets.py; read x' + screen -S "Script_AIL" -X screen -t "CurveManageTopSets" bash -c 'cd '${AIL_BIN}'; ./CurveManageTopSets.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "RegexForTermsFrequency" bash -c './RegexForTermsFrequency.py; read x' + screen -S "Script_AIL" -X screen -t "RegexForTermsFrequency" bash -c 'cd '${AIL_BIN}'; ./RegexForTermsFrequency.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "SetForTermsFrequency" bash -c './SetForTermsFrequency.py; read x' + screen -S "Script_AIL" -X screen -t "SetForTermsFrequency" bash -c 'cd '${AIL_BIN}'; ./SetForTermsFrequency.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Indexer" bash -c './Indexer.py; read x' + screen -S "Script_AIL" -X screen -t "Indexer" bash -c 'cd '${AIL_BIN}'; ./Indexer.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Keys" bash -c './Keys.py; read x' + screen -S "Script_AIL" -X screen -t "Keys" bash -c 'cd '${AIL_BIN}'; ./Keys.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Decoder" bash -c './Decoder.py; read x' + screen -S "Script_AIL" -X screen -t "Decoder" bash -c 'cd '${AIL_BIN}'; ./Decoder.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c './Bitcoin.py; read x' + screen -S "Script_AIL" -X screen -t "Bitcoin" bash -c 'cd '${AIL_BIN}'; ./Bitcoin.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Phone" bash -c './Phone.py; read x' + screen -S "Script_AIL" -X screen -t "Phone" bash -c 'cd '${AIL_BIN}'; ./Phone.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Release" bash -c './Release.py; read x' + screen -S "Script_AIL" -X screen -t "Release" bash -c 'cd '${AIL_BIN}'; ./Release.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Cve" bash -c './Cve.py; read x' + screen -S "Script_AIL" -X screen -t "Cve" bash -c 'cd '${AIL_BIN}'; ./Cve.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "WebStats" bash -c './WebStats.py; read x' + screen -S "Script_AIL" -X screen -t "WebStats" bash -c 'cd '${AIL_BIN}'; ./WebStats.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "ModuleStats" bash -c './ModuleStats.py; read x' + screen -S "Script_AIL" -X screen -t "ModuleStats" bash -c 'cd '${AIL_BIN}'; ./ModuleStats.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c './SQLInjectionDetection.py; read x' + screen -S "Script_AIL" -X screen -t "SQLInjectionDetection" bash -c 'cd '${AIL_BIN}'; ./SQLInjectionDetection.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "alertHandler" bash -c './alertHandler.py; read x' + screen -S "Script_AIL" -X screen -t "LibInjection" bash -c 'cd '${AIL_BIN}'; ./LibInjection.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c './MISP_The_Hive_feeder.py; read x' + screen -S "Script_AIL" -X screen -t "alertHandler" bash -c 'cd '${AIL_BIN}'; ./alertHandler.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "Tags" bash -c './Tags.py; read x' + screen -S "Script_AIL" -X screen -t "MISPtheHIVEfeeder" bash -c 'cd '${AIL_BIN}'; ./MISP_The_Hive_feeder.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c './SentimentAnalysis.py; read x' + screen -S "Script_AIL" -X screen -t "Tags" bash -c 'cd '${AIL_BIN}'; ./Tags.py; read x' sleep 0.1 - screen -S "Script_AIL" -X screen -t "SubmitPaste" bash -c './submit_paste.py; read x' + screen -S "Script_AIL" -X screen -t "SentimentAnalysis" bash -c 'cd '${AIL_BIN}'; ./SentimentAnalysis.py; read x' + sleep 0.1 + screen -S "Script_AIL" -X screen -t "SubmitPaste" bash -c 'cd '${AIL_BIN}'; ./submit_paste.py; read x' } @@ -215,136 +241,203 @@ function checking_ardb { sleep 0.2 bash -c $redis_dir'redis-cli -p 6382 PING | grep "PONG" &> /dev/null' if [ ! $? == 0 ]; then - echo -e $RED"\t6382 not ready"$DEFAULT + echo -e $RED"\t6382 ARDB not ready"$DEFAULT flag_ardb=1 fi return $flag_ardb; } -#If no params, display the help -#[[ $@ ]] || { helptext; exit 1;} - -helptext; - -############### TESTS ################### -isredis=`screen -ls | egrep '[0-9]+.Redis_AIL' | cut -d. -f1` -isardb=`screen -ls | egrep '[0-9]+.ARDB_AIL' | cut -d. -f1` -islogged=`screen -ls | egrep '[0-9]+.Logging_AIL' | cut -d. -f1` -isqueued=`screen -ls | egrep '[0-9]+.Queue_AIL' | cut -d. -f1` -isscripted=`screen -ls | egrep '[0-9]+.Script_AIL' | cut -d. -f1` - -options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Killall" "Shutdown" "Update-config" "Update-thirdparty") - -menu() { - echo "What do you want to Launch?:" - for i in ${!options[@]}; do - printf "%3d%s) %s\n" $((i+1)) "${choices[i]:- }" "${options[i]}" - done - [[ "$msg" ]] && echo "$msg"; : +function launch_redis { + if [[ ! $isredis ]]; then + launching_redis; + else + echo -e $RED"\t* A screen is already launched"$DEFAULT + fi } -prompt="Check an option (again to uncheck, ENTER when done): " -while menu && read -rp "$prompt" numinput && [[ "$numinput" ]]; do - for num in $numinput; do - [[ "$num" != *[![:digit:]]* ]] && (( num > 0 && num <= ${#options[@]} )) || { - msg="Invalid option: $num"; break - } - ((num--)); msg="${options[num]} was ${choices[num]:+un}checked" - [[ "${choices[num]}" ]] && choices[num]="" || choices[num]="+" - done -done - -for i in ${!options[@]}; do - if [[ "${choices[i]}" ]]; then - case ${options[i]} in - Redis) - if [[ ! $isredis ]]; then - launching_redis; - else - echo -e $RED"\t* A screen is already launched"$DEFAULT - fi - ;; - Ardb) - if [[ ! $isardb ]]; then - launching_ardb; - else - echo -e $RED"\t* A screen is already launched"$DEFAULT - fi - ;; - Logs) - if [[ ! $islogged ]]; then - launching_logs; - else - echo -e $RED"\t* A screen is already launched"$DEFAULT - fi - ;; - Queues) - if [[ ! $isqueued ]]; then - launching_queues; - else - echo -e $RED"\t* A screen is already launched"$DEFAULT - fi - ;; - Scripts) - if [[ ! $isscripted ]]; then - sleep 1 - if checking_redis && checking_ardb; then - launching_scripts; - else - echo -e $YELLOW"\tScript not started, waiting 5 secondes"$DEFAULT - sleep 5 - if checking_redis && checking_ardb; then - launching_scripts; - else - echo -e $RED"\tScript not started"$DEFAULT - fi; - fi; - else - echo -e $RED"\t* A screen is already launched"$DEFAULT - fi - ;; - Killall) - if [[ $isredis || $isardb || $islogged || $isqueued || $isscripted ]]; then - echo -e $GREEN"Gracefully closing redis servers"$DEFAULT - shutting_down_redis; - sleep 0.2 - echo -e $GREEN"Gracefully closing ardb servers"$DEFAULT - shutting_down_ardb; - echo -e $GREEN"Killing all"$DEFAULT - kill $isredis $isardb $islogged $isqueued $isscripted - sleep 0.2 - echo -e $ROSE`screen -ls`$DEFAULT - echo -e $GREEN"\t* $isredis $isardb $islogged $isqueued $isscripted killed."$DEFAULT - else - echo -e $RED"\t* No screen to kill"$DEFAULT - fi - ;; - Shutdown) - bash -c "./Shutdown.py" - ;; - Update-config) - echo -e "\t* Checking configuration" - bash -c "./Update-conf.py" - exitStatus=$? - if [ $exitStatus -ge 1 ]; then - echo -e $RED"\t* Configuration not up-to-date"$DEFAULT - exit - else - echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT - fi - ;; - Update-thirdparty) - echo -e "\t* Updating thirdparty..." - bash -c "(cd ../var/www && ./update_thirdparty.sh)" - exitStatus=$? - if [ $exitStatus -ge 1 ]; then - echo -e $RED"\t* Configuration not up-to-date"$DEFAULT - exit - else - echo -e $GREEN"\t* Configuration up-to-date"$DEFAULT - fi - ;; - esac +function launch_ardb { + if [[ ! $isardb ]]; then + launching_ardb; + else + echo -e $RED"\t* A screen is already launched"$DEFAULT fi +} + +function launch_logs { + if [[ ! $islogged ]]; then + launching_logs; + else + echo -e $RED"\t* A screen is already launched"$DEFAULT + fi +} + +function launch_queues { + if [[ ! $isqueued ]]; then + launching_queues; + else + echo -e $RED"\t* A screen is already launched"$DEFAULT + fi +} + +function launch_scripts { + if [[ ! $isscripted ]]; then + sleep 1 + if checking_ardb && checking_redis; then + launching_scripts $1; + else + no_script_launched=true + while $no_script_launched; do + echo -e $YELLOW"\tScript not started, waiting 5 more secondes"$DEFAULT + sleep 5 + if checking_redis && checking_ardb; then + launching_scripts $1; + no_script_launched=false + else + echo -e $RED"\tScript not started"$DEFAULT + fi; + done + fi; + else + echo -e $RED"\t* A screen is already launched"$DEFAULT + fi +} + +function launch_flask { + if [[ ! $isflasked ]]; then + flask_dir=${AIL_FLASK} + screen -dmS "Flask_AIL" + sleep 0.1 + echo -e $GREEN"\t* Launching Flask server"$DEFAULT + screen -S "Flask_AIL" -X screen -t "Flask_server" bash -c "cd $flask_dir; ls; ./Flask_server.py; read x" + else + echo -e $RED"\t* A Flask screen is already launched"$DEFAULT + fi +} + +function killall { + if [[ $isredis || $isardb || $islogged || $isqueued || $isscripted || $isflasked ]]; then + echo -e $GREEN"Gracefully closing redis servers"$DEFAULT + shutting_down_redis; + sleep 0.2 + echo -e $GREEN"Gracefully closing ardb servers"$DEFAULT + shutting_down_ardb; + echo -e $GREEN"Killing all"$DEFAULT + kill $isredis $isardb $islogged $isqueued $isscripted $isflasked + sleep 0.2 + echo -e $ROSE`screen -ls`$DEFAULT + echo -e $GREEN"\t* $isredis $isardb $islogged $isqueued $isscripted killed."$DEFAULT + else + echo -e $RED"\t* No screen to kill"$DEFAULT + fi +} + +function shutdown { + bash -c "./Shutdown.py" +} + +function update_thirdparty { + echo -e "\t* Updating thirdparty..." + bash -c "(cd ${AIL_FLASK}; ./update_thirdparty.sh)" + exitStatus=$? + if [ $exitStatus -ge 1 ]; then + echo -e $RED"\t* Thirdparty not up-to-date"$DEFAULT + exit + else + echo -e $GREEN"\t* Thirdparty updated"$DEFAULT + fi +} + +function launch_all { + launch_redis; + launch_ardb; + launch_logs; + launch_queues; + launch_scripts $1; + launch_flask; +} + +#If no params, display the menu +[[ $@ ]] || { + + helptext; + + options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Flask" "Killall" "Shutdown" "Update-config" "Update-thirdparty") + + menu() { + echo "What do you want to Launch?:" + for i in ${!options[@]}; do + printf "%3d%s) %s\n" $((i+1)) "${choices[i]:- }" "${options[i]}" + done + [[ "$msg" ]] && echo "$msg"; : + } + + prompt="Check an option (again to uncheck, ENTER when done): " + while menu && read -rp "$prompt" numinput && [[ "$numinput" ]]; do + for num in $numinput; do + [[ "$num" != *[![:digit:]]* ]] && (( num > 0 && num <= ${#options[@]} )) || { + msg="Invalid option: $num"; break + } + ((num--)); msg="${options[num]} was ${choices[num]:+un}checked" + [[ "${choices[num]}" ]] && choices[num]="" || choices[num]="+" + done + done + + for i in ${!options[@]}; do + if [[ "${choices[i]}" ]]; then + case ${options[i]} in + Redis) + launch_redis + ;; + Ardb) + launch_ardb; + ;; + Logs) + launch_logs; + ;; + Queues) + launch_queues; + ;; + Scripts) + launch_scripts; + ;; + Flask) + launch_flask; + ;; + Killall) + killall; + ;; + Shutdown) + shutdown; + ;; + Update-config) + checking_configuration "manual"; + ;; + Update-thirdparty) + update_thirdparty; + ;; + esac + fi + done + + exit +} + +while [ "$1" != "" ]; do + case $1 in + -l | --launchAuto ) launch_all "automatic"; + ;; + -k | --killAll ) killall; + ;; + -c | --configUpdate ) checking_configuration "manual"; + ;; + -t | --thirdpartyUpdate ) update_thirdparty; + ;; + -h | --help ) helptext; + exit + ;; + * ) helptext + exit 1 + esac + shift done diff --git a/bin/LibInjection.py b/bin/LibInjection.py new file mode 100755 index 00000000..4ad388d5 --- /dev/null +++ b/bin/LibInjection.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +# -*-coding:UTF-8 -* + +""" +The LibInjection Module +================================ + +This module is consuming the Redis-list created by the Web module. + +It tries to identify SQL Injections with libinjection. + +""" + +import time +import string +import urllib.request +import re +import pylibinjection +import pprint + +from pubsublogger import publisher +from Helper import Process +from packages import Paste +from pyfaup.faup import Faup + +def analyse(url, path): + faup.decode(url) + url_parsed = faup.get() + pprint.pprint(url_parsed) + resource_path = url_parsed['resource_path'] + query_string = url_parsed['query_string'] + + result_path = {'sqli' : False} + result_query = {'sqli' : False} + + if resource_path is not None: + result_path = pylibinjection.detect_sqli(resource_path) + print("path is sqli : {0}".format(result_path)) + + if query_string is not None: + result_query = pylibinjection.detect_sqli(query_string) + print("query is sqli : {0}".format(result_query)) + + if result_path['sqli'] is True or result_query['sqli'] is True: + paste = Paste.Paste(path) + print("Detected (libinjection) SQL in URL: ") + print(urllib.request.unquote(url)) + to_print = 'LibInjection;{};{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, "Detected SQL in URL", paste.p_path) + publisher.warning(to_print) + #Send to duplicate + p.populate_set_out(path, 'Duplicate') + #send to Browse_warning_paste + p.populate_set_out('sqlinjection;{}'.format(path), 'alertHandler') + msg = 'infoleak:automatic-detection="sql-injection";{}'.format(path) + p.populate_set_out(msg, 'Tags') + +if __name__ == '__main__': + # If you wish to use an other port of channel, do not forget to run a subscriber accordingly (see launch_logs.sh) + # Port of the redis instance used by pubsublogger + publisher.port = 6380 + # Script is the default channel used for the modules. + publisher.channel = 'Script' + + # Section name in bin/packages/modules.cfg + config_section = 'LibInjection' + + # Setup the I/O queues + p = Process(config_section) + + # Sent to the logging a description of the module + publisher.info("Try to detect SQL injection with LibInjection") + + faup = Faup() + + # Endless loop getting messages from the input queue + while True: + # Get one message from the input queue + message = p.get_from_set() + + if message is None: + publisher.debug("{} queue is empty, waiting".format(config_section)) + time.sleep(10) + continue + + else: + # Do something with the message from the queue + url, date, path = message.split() + analyse(url, path) diff --git a/bin/MISP_The_Hive_feeder.py b/bin/MISP_The_Hive_feeder.py index a850bc2e..7fa6b223 100755 --- a/bin/MISP_The_Hive_feeder.py +++ b/bin/MISP_The_Hive_feeder.py @@ -45,6 +45,7 @@ try: except: print('The HIVE keys not present') flag_the_hive = False + HiveApi = False from thehive4py.api import TheHiveApi import thehive4py.exceptions diff --git a/bin/NotificationHelper.py b/bin/NotificationHelper.py index 8c65bb3d..6dad63c7 100755 --- a/bin/NotificationHelper.py +++ b/bin/NotificationHelper.py @@ -21,7 +21,7 @@ TrackedTermsNotificationEnabled_Name = "TrackedNotifications" # Keys will be e.g. TrackedNotificationEmails TrackedTermsNotificationEmailsPrefix_Name = "TrackedNotificationEmails_" -def sendEmailNotification(recipient, term): +def sendEmailNotification(recipient, alert_name, content): if not os.path.exists(configfile): raise Exception('Unable to find the configuration file. \ @@ -57,7 +57,13 @@ def sendEmailNotification(recipient, term): try: if sender_pw is not None: - smtp_server = smtplib.SMTP_SSL(sender_host, sender_port) + try: + smtp_server = smtplib.SMTP(sender_host, sender_port) + smtp_server.starttls() + except smtplib.SMTPNotSupportedError: + print("The server does not support the STARTTLS extension.") + smtp_server = smtplib.SMTP_SSL(sender_host, sender_port) + smtp_server.ehlo() smtp_server.login(sender, sender_pw) else: @@ -67,13 +73,14 @@ def sendEmailNotification(recipient, term): mime_msg = MIMEMultipart() mime_msg['From'] = sender mime_msg['To'] = recipient - mime_msg['Subject'] = "AIL Term Alert" + mime_msg['Subject'] = "AIL Framework "+ alert_name + " Alert" - body = "New occurrence for term: " + term + body = content mime_msg.attach(MIMEText(body, 'plain')) smtp_server.sendmail(sender, recipient, mime_msg.as_string()) smtp_server.quit() + print('Send notification '+ alert_name + ' to '+recipient) except Exception as e: print(str(e)) diff --git a/bin/RegexForTermsFrequency.py b/bin/RegexForTermsFrequency.py index 7aea03f0..ecca8e4d 100755 --- a/bin/RegexForTermsFrequency.py +++ b/bin/RegexForTermsFrequency.py @@ -35,6 +35,8 @@ top_termFreq_setName_week = ["TopTermFreq_set_week", 7] top_termFreq_setName_month = ["TopTermFreq_set_month", 31] top_termFreq_set_array = [top_termFreq_setName_day,top_termFreq_setName_week, top_termFreq_setName_month] +# create direct link in mail +full_paste_url = "/showsavedpaste/?paste=" def refresh_dicos(): dico_regex = {} @@ -62,6 +64,9 @@ if __name__ == "__main__": # FUNCTIONS # publisher.info("RegexForTermsFrequency script started") + # create direct link in mail + full_paste_url = p.config.get("Notifications", "ail_domain") + full_paste_url + #compile the regex dico_refresh_cooldown = time.time() dico_regex, dico_regexname_to_redis = refresh_dicos() @@ -96,9 +101,15 @@ if __name__ == "__main__": if regex_str_complete not in server_term.smembers(BlackListTermsSet_Name): # Send a notification only when the member is in the set if regex_str_complete in server_term.smembers(TrackedTermsNotificationEnabled_Name): + + # create mail body + mail_body = ("AIL Framework,\n" + "New occurrence for regex: " + regex_str + "\n" + ''+full_paste_url + filename) + # Send to every associated email adress for email in server_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + regex_str_complete): - sendEmailNotification(email, regex_str) + sendEmailNotification(email, 'Term', mail_body) set_name = 'regex_' + dico_regexname_to_redis[regex_str] new_to_the_set = server_term.sadd(set_name, filename) diff --git a/bin/SetForTermsFrequency.py b/bin/SetForTermsFrequency.py index b42f07e6..78de9b08 100755 --- a/bin/SetForTermsFrequency.py +++ b/bin/SetForTermsFrequency.py @@ -34,6 +34,9 @@ top_termFreq_setName_week = ["TopTermFreq_set_week", 7] top_termFreq_setName_month = ["TopTermFreq_set_month", 31] top_termFreq_set_array = [top_termFreq_setName_day,top_termFreq_setName_week, top_termFreq_setName_month] +# create direct link in mail +full_paste_url = "/showsavedpaste/?paste=" + def add_quote_inside_tab(tab): quoted_tab = "[" for elem in tab[1:-1].split(','): @@ -60,6 +63,9 @@ if __name__ == "__main__": # FUNCTIONS # publisher.info("RegexForTermsFrequency script started") + # create direct link in mail + full_paste_url = p.config.get("Notifications", "ail_domain") + full_paste_url + #get the dico and matching percent dico_percent = {} dico_set_tab = {} @@ -105,9 +111,15 @@ if __name__ == "__main__": if eff_percent >= dico_percent[the_set]: # Send a notification only when the member is in the set if dico_setname_to_redis[str(the_set)] in server_term.smembers(TrackedTermsNotificationEnabled_Name): + + # create mail body + mail_body = ("AIL Framework,\n" + "New occurrence for term: " + dico_setname_to_redis[str(the_set)] + "\n" + ''+full_paste_url + filename) + # Send to every associated email adress for email in server_term.smembers(TrackedTermsNotificationEmailsPrefix_Name + dico_setname_to_redis[str(the_set)]): - sendEmailNotification(email, dico_setname_to_redis[str(the_set)]) + sendEmailNotification(email, 'Term', mail_body) print(the_set, "matched in", filename) set_name = 'set_' + dico_setname_to_redis[the_set] diff --git a/bin/Update-conf.py b/bin/Update-conf.py index 901cb935..0d04fb88 100755 --- a/bin/Update-conf.py +++ b/bin/Update-conf.py @@ -11,6 +11,15 @@ import shutil #return true if the configuration is up-to-date def main(): + if len(sys.argv) != 2: + print('usage:', 'Update-conf.py', 'Automatic (boolean)') + exit(1) + else: + automatic = sys.argv[1] + if automatic == 'True': + automatic = True + else: + automatic = False configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg') configfileBackup = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg') + '.backup' @@ -63,12 +72,19 @@ def main(): print(" - "+item[0]) print("+--------------------------------------------------------------------+") - resp = input("Do you want to auto fix it? [y/n] ") + if automatic: + resp = 'y' + else: + resp = input("Do you want to auto fix it? [y/n] ") if resp != 'y': return False else: - resp2 = input("Do you want to keep a backup of the old configuration file? [y/n] ") + if automatic: + resp2 = 'y' + else: + resp2 = input("Do you want to keep a backup of the old configuration file? [y/n] ") + if resp2 == 'y': shutil.move(configfile, configfileBackup) diff --git a/bin/packages/config.cfg.sample b/bin/packages/config.cfg.sample index 08b75324..b10d4af0 100644 --- a/bin/packages/config.cfg.sample +++ b/bin/packages/config.cfg.sample @@ -22,6 +22,7 @@ sentiment_lexicon_file = sentiment/vader_lexicon.zip/vader_lexicon/vader_lexicon ##### Notifications ###### [Notifications] +ail_domain = http://localhost:7000 sender = sender@example.com sender_host = smtp.example.com sender_port = 1337 diff --git a/bin/packages/modules.cfg b/bin/packages/modules.cfg index 06217452..712f5bab 100644 --- a/bin/packages/modules.cfg +++ b/bin/packages/modules.cfg @@ -70,6 +70,10 @@ publish = Redis_Url,ZMQ_Url [WebStats] subscribe = Redis_Url +[LibInjection] +subscribe = Redis_Url +publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags + [SQLInjectionDetection] subscribe = Redis_Url publish = Redis_alertHandler,Redis_Duplicate,Redis_Tags diff --git a/configs/6382.conf b/configs/6382.conf index fecfa7dd..27f03a04 100755 --- a/configs/6382.conf +++ b/configs/6382.conf @@ -1,7 +1,7 @@ # Ardb configuration file example, modified from redis's conf file. # Home dir for ardb instance, it can be referenced by ${ARDB_HOME} in this config file -home ../DATA_ARDB/ +home DATA_ARDB/ # Note on units: when memory size is needed, it is possible to specify # it in the usual form of 1k 5GB 4M and so forth: diff --git a/doc/presentation/ail-pass-the-salt.pdf b/doc/presentation/ail-pass-the-salt.pdf new file mode 100644 index 00000000..3cba7ce8 Binary files /dev/null and b/doc/presentation/ail-pass-the-salt.pdf differ diff --git a/docker_start.sh b/docker_start.sh index 52f40338..1f80ecae 100755 --- a/docker_start.sh +++ b/docker_start.sh @@ -1,3 +1,6 @@ +echo "Currently unmaintained, continue at your own risk of not having a working AIL at the end :(" +exit 1 + source ./AILENV/bin/activate cd bin @@ -5,7 +8,11 @@ export PATH=$AIL_HOME:$PATH export PATH=$AIL_REDIS:$PATH export PATH=$AIL_LEVELDB:$PATH export PATH=$AIL_ARDB:$PATH -export AILENV=/opt/AIL +if [ -z $1 ]; then + export AILENV=/opt/AIL + else + export AILENV=$1 +fi conf_dir="${AIL_HOME}/configs/" diff --git a/installing_deps.sh b/installing_deps.sh index b6c489df..c29ba4b9 100755 --- a/installing_deps.sh +++ b/installing_deps.sh @@ -55,7 +55,7 @@ sudo ldconfig popd # tlsh -test ! -d tlsh && git clone git://github.com/trendmicro/tlsh.git +test ! -d tlsh && git clone https://github.com/trendmicro/tlsh.git pushd tlsh/ ./make.sh pushd build/release/ diff --git a/installing_deps_archlinux.sh b/installing_deps_archlinux.sh index d19561c6..38ac4ca7 100644 --- a/installing_deps_archlinux.sh +++ b/installing_deps_archlinux.sh @@ -1,5 +1,9 @@ #!/bin/bash + +echo "Currently unmaintained, continue at your own risk of not having a working AIL at the end :( Will be merged into main install deps later on." +exit 1 + set -e set -x diff --git a/pip3_packages_requirement.txt b/pip3_packages_requirement.txt index 7ab82b6b..53ec97e7 100644 --- a/pip3_packages_requirement.txt +++ b/pip3_packages_requirement.txt @@ -67,3 +67,6 @@ https://github.com/trolldbois/python3-adns/archive/master.zip https://github.com/trolldbois/python-cymru-services/archive/master.zip https://github.com/saffsd/langid.py/archive/master.zip + +#LibInjection bindings +pylibinjection diff --git a/var/www/modules/PasteSubmit/Flask_PasteSubmit.py b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py index 34e8c458..cb404d2f 100644 --- a/var/www/modules/PasteSubmit/Flask_PasteSubmit.py +++ b/var/www/modules/PasteSubmit/Flask_PasteSubmit.py @@ -503,29 +503,26 @@ def edit_tag_export(): else: status_misp.append(False) - # empty whitelist - if whitelist_hive == 0: - for tag in list_export_tags: + for tag in list_export_tags: + if r_serv_db.sismember('whitelist_hive', tag): status_hive.append(True) - else: - for tag in list_export_tags: - if r_serv_db.sismember('whitelist_hive', tag): - status_hive.append(True) - else: - status_hive.append(False) - - if (misp_auto_events is not None) and (hive_auto_alerts is not None): + else: + status_hive.append(False) + if misp_auto_events is not None: if int(misp_auto_events) == 1: misp_active = True else: misp_active = False + else: + misp_active = False + + if hive_auto_alerts is not None: if int(hive_auto_alerts) == 1: hive_active = True else: hive_active = False else: - misp_active = False hive_active = False nb_tags = str(r_serv_db.scard('list_export_tags')) diff --git a/var/www/modules/dashboard/templates/index.html b/var/www/modules/dashboard/templates/index.html index f20a0964..e5d61014 100644 --- a/var/www/modules/dashboard/templates/index.html +++ b/var/www/modules/dashboard/templates/index.html @@ -102,7 +102,7 @@ Processed pastes

- Filtered duplicated + Filtered duplicates
diff --git a/var/www/modules/terms/Flask_terms.py b/var/www/modules/terms/Flask_terms.py index d550c2c0..cca6b713 100644 --- a/var/www/modules/terms/Flask_terms.py +++ b/var/www/modules/terms/Flask_terms.py @@ -265,7 +265,7 @@ def terms_management_query_paste(): for path in track_list_path: paste = Paste.Paste(path) p_date = str(paste._get_p_date()) - p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4] + p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8] p_source = paste.p_source p_encoding = paste._get_p_encoding() p_size = paste.p_size @@ -520,7 +520,7 @@ def credentials_management_query_paste(): path = r_serv_cred.hget(REDIS_KEY_ALL_PATH_SET_REV, pathNum) paste = Paste.Paste(path) p_date = str(paste._get_p_date()) - p_date = p_date[6:]+'/'+p_date[4:6]+'/'+p_date[0:4] + p_date = p_date[0:4]+'/'+p_date[4:6]+'/'+p_date[6:8] p_source = paste.p_source p_encoding = paste._get_p_encoding() p_size = paste.p_size diff --git a/var/www/modules/terms/templates/terms_management.html b/var/www/modules/terms/templates/terms_management.html index 41ac586f..98ac9df7 100644 --- a/var/www/modules/terms/templates/terms_management.html +++ b/var/www/modules/terms/templates/terms_management.html @@ -369,12 +369,7 @@ function perform_operation(){ if(json.action == "add") { // query data $.get("{{ url_for('terms.terms_management_query') }}", { term: json.term, section: json.section }, function(data2, status){ - var delete_button = ""; - var info_button = ""; - var enabled_checkbox = ""; - var action_buttons = "

" + info_button + delete_button + "   " + enabled_checkbox + "

"; - table_track.row.add( [ json.term, data2[3], data2[0], data2[1], data2[2], 0, action_buttons, $('#followTermEMailNotificationReceiversInput').val().replace(",", "\n") ] ).draw( false ); - perform_binding(); + reload_per_paste(); }); } else if (json.action == "delete") { // Find indexes of row which have the term in the first column @@ -408,6 +403,3 @@ function perform_operation(){ } } - - -