mirror of
https://github.com/ail-project/ail-framework.git
synced 2024-11-10 08:38:28 +00:00
commit
976b97bcd1
10 changed files with 682 additions and 98 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -34,6 +34,7 @@ var/www/submitted
|
||||||
bin/packages/config.cfg
|
bin/packages/config.cfg
|
||||||
bin/packages/config.cfg.backup
|
bin/packages/config.cfg.backup
|
||||||
configs/keys
|
configs/keys
|
||||||
|
update/current_version
|
||||||
files
|
files
|
||||||
|
|
||||||
# installed files
|
# installed files
|
||||||
|
|
|
@ -16,7 +16,6 @@ export AIL_HOME="${DIR}"
|
||||||
cd ${AIL_HOME}
|
cd ${AIL_HOME}
|
||||||
|
|
||||||
if [ -e "${DIR}/AILENV/bin/python" ]; then
|
if [ -e "${DIR}/AILENV/bin/python" ]; then
|
||||||
echo "AIL-framework virtualenv seems to exist, good"
|
|
||||||
ENV_PY="${DIR}/AILENV/bin/python"
|
ENV_PY="${DIR}/AILENV/bin/python"
|
||||||
else
|
else
|
||||||
echo "Please make sure you have a AIL-framework environment, au revoir"
|
echo "Please make sure you have a AIL-framework environment, au revoir"
|
||||||
|
@ -75,6 +74,7 @@ function helptext {
|
||||||
LAUNCH.sh
|
LAUNCH.sh
|
||||||
[-l | --launchAuto]
|
[-l | --launchAuto]
|
||||||
[-k | --killAll]
|
[-k | --killAll]
|
||||||
|
[-u | --update]
|
||||||
[-c | --configUpdate]
|
[-c | --configUpdate]
|
||||||
[-t | --thirdpartyUpdate]
|
[-t | --thirdpartyUpdate]
|
||||||
[-h | --help]
|
[-h | --help]
|
||||||
|
@ -264,20 +264,20 @@ function checking_redis {
|
||||||
redis_dir=${AIL_HOME}/redis/src/
|
redis_dir=${AIL_HOME}/redis/src/
|
||||||
bash -c $redis_dir'redis-cli -p 6379 PING | grep "PONG" &> /dev/null'
|
bash -c $redis_dir'redis-cli -p 6379 PING | grep "PONG" &> /dev/null'
|
||||||
if [ ! $? == 0 ]; then
|
if [ ! $? == 0 ]; then
|
||||||
echo -e $RED"\t6379 not ready"$DEFAULT
|
echo -e $RED"\t6379 not ready"$DEFAULT
|
||||||
flag_redis=1
|
flag_redis=1
|
||||||
fi
|
fi
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
bash -c $redis_dir'redis-cli -p 6380 PING | grep "PONG" &> /dev/null'
|
bash -c $redis_dir'redis-cli -p 6380 PING | grep "PONG" &> /dev/null'
|
||||||
if [ ! $? == 0 ]; then
|
if [ ! $? == 0 ]; then
|
||||||
echo -e $RED"\t6380 not ready"$DEFAULT
|
echo -e $RED"\t6380 not ready"$DEFAULT
|
||||||
flag_redis=1
|
flag_redis=1
|
||||||
fi
|
fi
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
bash -c $redis_dir'redis-cli -p 6381 PING | grep "PONG" &> /dev/null'
|
bash -c $redis_dir'redis-cli -p 6381 PING | grep "PONG" &> /dev/null'
|
||||||
if [ ! $? == 0 ]; then
|
if [ ! $? == 0 ]; then
|
||||||
echo -e $RED"\t6381 not ready"$DEFAULT
|
echo -e $RED"\t6381 not ready"$DEFAULT
|
||||||
flag_redis=1
|
flag_redis=1
|
||||||
fi
|
fi
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
|
|
||||||
|
@ -290,8 +290,8 @@ function checking_ardb {
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
bash -c $redis_dir'redis-cli -p 6382 PING | grep "PONG" &> /dev/null'
|
bash -c $redis_dir'redis-cli -p 6382 PING | grep "PONG" &> /dev/null'
|
||||||
if [ ! $? == 0 ]; then
|
if [ ! $? == 0 ]; then
|
||||||
echo -e $RED"\t6382 ARDB not ready"$DEFAULT
|
echo -e $RED"\t6382 ARDB not ready"$DEFAULT
|
||||||
flag_ardb=1
|
flag_ardb=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
return $flag_ardb;
|
return $flag_ardb;
|
||||||
|
@ -379,11 +379,15 @@ function launch_feeder {
|
||||||
|
|
||||||
function killall {
|
function killall {
|
||||||
if [[ $isredis || $isardb || $islogged || $isqueued || $isscripted || $isflasked || $isfeeded ]]; then
|
if [[ $isredis || $isardb || $islogged || $isqueued || $isscripted || $isflasked || $isfeeded ]]; then
|
||||||
echo -e $GREEN"Gracefully closing redis servers"$DEFAULT
|
if [[ $isredis ]]; then
|
||||||
shutting_down_redis;
|
echo -e $GREEN"Gracefully closing redis servers"$DEFAULT
|
||||||
sleep 0.2
|
shutting_down_redis;
|
||||||
echo -e $GREEN"Gracefully closing ardb servers"$DEFAULT
|
sleep 0.2
|
||||||
shutting_down_ardb;
|
fi
|
||||||
|
if [[ $isardb ]]; then
|
||||||
|
echo -e $GREEN"Gracefully closing ardb servers"$DEFAULT
|
||||||
|
shutting_down_ardb;
|
||||||
|
fi
|
||||||
echo -e $GREEN"Killing all"$DEFAULT
|
echo -e $GREEN"Killing all"$DEFAULT
|
||||||
kill $isredis $isardb $islogged $isqueued $isscripted $isflasked $isfeeded
|
kill $isredis $isardb $islogged $isqueued $isscripted $isflasked $isfeeded
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
|
@ -398,6 +402,17 @@ function shutdown {
|
||||||
bash -c "./Shutdown.py"
|
bash -c "./Shutdown.py"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function update() {
|
||||||
|
bin_dir=${AIL_HOME}/bin
|
||||||
|
|
||||||
|
bash -c "python3 $bin_dir/Update.py"
|
||||||
|
exitStatus=$?
|
||||||
|
if [ $exitStatus -ge 1 ]; then
|
||||||
|
echo -e $RED"\t* Update Error"$DEFAULT
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function update_thirdparty {
|
function update_thirdparty {
|
||||||
echo -e "\t* Updating thirdparty..."
|
echo -e "\t* Updating thirdparty..."
|
||||||
bash -c "(cd ${AIL_FLASK}; ./update_thirdparty.sh)"
|
bash -c "(cd ${AIL_FLASK}; ./update_thirdparty.sh)"
|
||||||
|
@ -411,6 +426,7 @@ function update_thirdparty {
|
||||||
}
|
}
|
||||||
|
|
||||||
function launch_all {
|
function launch_all {
|
||||||
|
update;
|
||||||
launch_redis;
|
launch_redis;
|
||||||
launch_ardb;
|
launch_ardb;
|
||||||
launch_logs;
|
launch_logs;
|
||||||
|
@ -424,7 +440,7 @@ function launch_all {
|
||||||
|
|
||||||
helptext;
|
helptext;
|
||||||
|
|
||||||
options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Flask" "Killall" "Shutdown" "Update-config" "Update-thirdparty")
|
options=("Redis" "Ardb" "Logs" "Queues" "Scripts" "Flask" "Killall" "Shutdown" "Update" "Update-config" "Update-thirdparty")
|
||||||
|
|
||||||
menu() {
|
menu() {
|
||||||
echo "What do you want to Launch?:"
|
echo "What do you want to Launch?:"
|
||||||
|
@ -475,6 +491,9 @@ function launch_all {
|
||||||
Shutdown)
|
Shutdown)
|
||||||
shutdown;
|
shutdown;
|
||||||
;;
|
;;
|
||||||
|
Update)
|
||||||
|
update;
|
||||||
|
;;
|
||||||
Update-config)
|
Update-config)
|
||||||
checking_configuration "manual";
|
checking_configuration "manual";
|
||||||
;;
|
;;
|
||||||
|
@ -488,12 +507,16 @@ function launch_all {
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
echo "$@"
|
||||||
|
|
||||||
while [ "$1" != "" ]; do
|
while [ "$1" != "" ]; do
|
||||||
case $1 in
|
case $1 in
|
||||||
-l | --launchAuto ) launch_all "automatic";
|
-l | --launchAuto ) launch_all "automatic";
|
||||||
;;
|
;;
|
||||||
-k | --killAll ) killall;
|
-k | --killAll ) killall;
|
||||||
;;
|
;;
|
||||||
|
-u | --update ) update;
|
||||||
|
;;
|
||||||
-t | --thirdpartyUpdate ) update_thirdparty;
|
-t | --thirdpartyUpdate ) update_thirdparty;
|
||||||
;;
|
;;
|
||||||
-c | --crawler ) launching_crawler;
|
-c | --crawler ) launching_crawler;
|
||||||
|
@ -502,6 +525,9 @@ while [ "$1" != "" ]; do
|
||||||
;;
|
;;
|
||||||
-h | --help ) helptext;
|
-h | --help ) helptext;
|
||||||
exit
|
exit
|
||||||
|
;;
|
||||||
|
-kh | --khelp ) helptext;
|
||||||
|
|
||||||
;;
|
;;
|
||||||
* ) helptext
|
* ) helptext
|
||||||
exit 1
|
exit 1
|
||||||
|
|
366
bin/Update.py
Executable file
366
bin/Update.py
Executable file
|
@ -0,0 +1,366 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
"""
|
||||||
|
Update AIL
|
||||||
|
============================
|
||||||
|
|
||||||
|
Update AIL clone and fork
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
def auto_update_enabled(cfg):
|
||||||
|
auto_update = cfg.get('Update', 'auto_update')
|
||||||
|
if auto_update == 'True' or auto_update == 'true':
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# check if files are modify locally
|
||||||
|
def check_if_files_modified():
|
||||||
|
process = subprocess.run(['git', 'ls-files' ,'-m'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
modified_files = process.stdout
|
||||||
|
if modified_files:
|
||||||
|
print('Modified Files:')
|
||||||
|
print('{}{}{}'.format(TERMINAL_BLUE, modified_files.decode(), TERMINAL_DEFAULT))
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def repo_is_fork():
|
||||||
|
print('Check if this repository is a fork:')
|
||||||
|
process = subprocess.run(['git', 'ls-remote', '--tags'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
# remove url origin
|
||||||
|
local_remote = process.stdout
|
||||||
|
process = subprocess.run(['git', 'ls-remote' ,'--tags', AIL_REPO], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
ail_remote = process.stdout
|
||||||
|
if local_remote == ail_remote:
|
||||||
|
print(' This repository is a {}clone of {}{}'.format(TERMINAL_BLUE, AIL_REPO, TERMINAL_DEFAULT))
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print(' This repository is a {}fork{}'.format(TERMINAL_BLUE, TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def is_upstream_created(upstream):
|
||||||
|
process = subprocess.run(['git', 'remote', '-v'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
output = process.stdout.decode()
|
||||||
|
if upstream in output:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def create_fork_upstream(upstream):
|
||||||
|
print('{}... Creating upstream ...{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
print('git remote add {} {}'.format(upstream, AIL_REPO))
|
||||||
|
process = subprocess.run(['git', 'remote', 'add', upstream, AIL_REPO], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
print(process.stdout.decode())
|
||||||
|
if is_upstream_created(upstream):
|
||||||
|
print('Fork upstream created')
|
||||||
|
print('{}... ...{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
else:
|
||||||
|
print('Fork not created')
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def update_fork():
|
||||||
|
print('{}... Updating fork ...{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
if cfg.get('Update', 'update-fork') == 'True' or cfg.get('Update', 'update-fork') == 'true':
|
||||||
|
upstream = cfg.get('Update', 'upstream')
|
||||||
|
if not is_upstream_created(upstream):
|
||||||
|
create_fork_upstream(upstream)
|
||||||
|
print('{}git fetch {}:{}'.format(TERMINAL_YELLOW, upstream, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'fetch', upstream], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
print(process.stdout.decode())
|
||||||
|
print('{}git checkout master:{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'checkout', 'master'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
print(process.stdout.decode())
|
||||||
|
print('{}git merge {}/master:{}'.format(TERMINAL_YELLOW, upstream, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'merge', '{}/master'.format(upstream)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
print(process.stdout.decode())
|
||||||
|
print('{}... ...{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('{}Fork Auto-Update disabled in config file{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_current_tag(current_version_path):
|
||||||
|
try:
|
||||||
|
with open(current_version_path, 'r') as version_content:
|
||||||
|
version = version_content.read()
|
||||||
|
except FileNotFoundError:
|
||||||
|
version = 'v1.4'
|
||||||
|
with open(current_version_path, 'w') as version_content:
|
||||||
|
version_content.write(version)
|
||||||
|
|
||||||
|
version = version.replace(" ", "").splitlines()
|
||||||
|
return version[0]
|
||||||
|
|
||||||
|
def get_git_upper_tags_remote(current_tag, is_fork):
|
||||||
|
if is_fork:
|
||||||
|
process = subprocess.run(['git', 'tag'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
list_all_tags = process.stdout.decode().splitlines()
|
||||||
|
|
||||||
|
list_upper_tags = []
|
||||||
|
if list_all_tags[-1][1:] == current_tag:
|
||||||
|
list_upper_tags.append( (list_all_tags[-1], None) )
|
||||||
|
return list_upper_tags
|
||||||
|
for tag in list_all_tags:
|
||||||
|
if float(tag[1:]) >= float(current_tag):
|
||||||
|
list_upper_tags.append( (tag, None) )
|
||||||
|
return list_upper_tags
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
process = subprocess.run(['git', 'ls-remote' ,'--tags'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
list_all_tags = process.stdout.decode().splitlines()
|
||||||
|
last_tag = list_all_tags[-1].split('\trefs/tags/')
|
||||||
|
last_commit = last_tag[0]
|
||||||
|
last_tag = last_tag[1].split('^{}')[0]
|
||||||
|
list_upper_tags = []
|
||||||
|
if last_tag[1:] == current_tag:
|
||||||
|
list_upper_tags.append( (last_tag, last_commit) )
|
||||||
|
return list_upper_tags
|
||||||
|
else:
|
||||||
|
for mess_tag in list_all_tags:
|
||||||
|
commit, tag = mess_tag.split('\trefs/tags/')
|
||||||
|
|
||||||
|
# add tag with last commit
|
||||||
|
if float(tag.split('^{}')[0][1:]) >= float(current_tag):
|
||||||
|
if '^{}' in tag:
|
||||||
|
list_upper_tags.append( (tag.split('^{}')[0], commit) )
|
||||||
|
# add last commit
|
||||||
|
if last_tag not in list_upper_tags[-1][0]:
|
||||||
|
list_upper_tags.append( (last_tag, last_commit) )
|
||||||
|
return list_upper_tags
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def update_ail(current_tag, list_upper_tags_remote, current_version_path, is_fork):
|
||||||
|
print('{}git checkout master:{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'checkout', 'master'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
#process = subprocess.run(['ls'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
print(process.stdout.decode())
|
||||||
|
print()
|
||||||
|
print('{}git pull:{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'pull'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
output = process.stdout.decode()
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
if len(list_upper_tags_remote) == 1:
|
||||||
|
# additional update (between 2 commits on the same version)
|
||||||
|
additional_update_path = os.path.join(os.environ['AIL_HOME'], 'update', current_tag, 'additional_update.sh')
|
||||||
|
if os.path.isfile(additional_update_path):
|
||||||
|
print()
|
||||||
|
print('{}------------------------------------------------------------------'.format(TERMINAL_YELLOW))
|
||||||
|
print('- Launching Additional Update: -')
|
||||||
|
print('-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --{}'.format(TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['bash', additional_update_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
output = process.stdout.decode()
|
||||||
|
print(output)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('{}**************** AIL Sucessfully Updated *****************{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# map version with roll back commit
|
||||||
|
list_update = []
|
||||||
|
previous_commit = list_upper_tags_remote[0][1]
|
||||||
|
for tuple in list_upper_tags_remote[1:]:
|
||||||
|
tag = tuple[0]
|
||||||
|
list_update.append( (tag, previous_commit) )
|
||||||
|
previous_commit = tuple[1]
|
||||||
|
|
||||||
|
for update in list_update:
|
||||||
|
launch_update_version(update[0], update[1], current_version_path, is_fork)
|
||||||
|
# Sucess
|
||||||
|
print('{}**************** AIL Sucessfully Updated *****************{}'.format(TERMINAL_YELLOW, TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.decode(), TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def launch_update_version(version, roll_back_commit, current_version_path, is_fork):
|
||||||
|
update_path = os.path.join(os.environ['AIL_HOME'], 'update', version, 'Update.sh')
|
||||||
|
print()
|
||||||
|
print('{}------------------------------------------------------------------'.format(TERMINAL_YELLOW))
|
||||||
|
print('- Launching Update: {}{}{} -'.format(TERMINAL_BLUE, version, TERMINAL_YELLOW))
|
||||||
|
print('-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --{}'.format(TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.Popen(['bash', update_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
while True:
|
||||||
|
output = process.stdout.readline().decode()
|
||||||
|
if output == '' and process.poll() is not None:
|
||||||
|
break
|
||||||
|
if output:
|
||||||
|
print(output.strip())
|
||||||
|
if process.returncode == 0:
|
||||||
|
#output = process.stdout.decode()
|
||||||
|
#print(output)
|
||||||
|
|
||||||
|
with open(current_version_path, 'w') as version_content:
|
||||||
|
version_content.write(version)
|
||||||
|
|
||||||
|
print('{}-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --'.format(TERMINAL_YELLOW))
|
||||||
|
print('- Sucessfully Updated: {}{}{} -'.format(TERMINAL_BLUE, version, TERMINAL_YELLOW))
|
||||||
|
print('------------------------------------------------------------------{}'.format(TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
else:
|
||||||
|
#print(process.stdout.read().decode())
|
||||||
|
print('{}{}{}'.format(TERMINAL_RED, process.stderr.read().decode(), TERMINAL_DEFAULT))
|
||||||
|
print('------------------------------------------------------------------')
|
||||||
|
print(' {}Update Error: {}{}{}'.format(TERMINAL_RED, TERMINAL_BLUE, version, TERMINAL_DEFAULT))
|
||||||
|
print('------------------------------------------------------------------')
|
||||||
|
if not is_fork:
|
||||||
|
roll_back_update(roll_back_commit)
|
||||||
|
else:
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def roll_back_update(roll_back_commit):
|
||||||
|
print('Rolling back to safe commit: {}{}{}'.format(TERMINAL_BLUE ,roll_back_commit, TERMINAL_DEFAULT))
|
||||||
|
process = subprocess.run(['git', 'checkout', roll_back_commit], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
if process.returncode == 0:
|
||||||
|
output = process.stdout
|
||||||
|
print(output)
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print(TERMINAL_RED+process.stderr.decode()+TERMINAL_DEFAULT)
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def aborting_update():
|
||||||
|
print()
|
||||||
|
print('{}Aborting ...{}'.format(TERMINAL_RED, TERMINAL_DEFAULT))
|
||||||
|
print('{}******************************************************************'.format(TERMINAL_RED))
|
||||||
|
print('* AIL Not Updated *')
|
||||||
|
print('******************************************************************{}'.format(TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
TERMINAL_RED = '\033[91m'
|
||||||
|
TERMINAL_YELLOW = '\33[93m'
|
||||||
|
TERMINAL_BLUE = '\33[94m'
|
||||||
|
TERMINAL_BLINK = '\33[6m'
|
||||||
|
TERMINAL_DEFAULT = '\033[0m'
|
||||||
|
|
||||||
|
AIL_REPO = 'https://github.com/CIRCL/AIL-framework.git'
|
||||||
|
|
||||||
|
configfile = os.path.join(os.environ['AIL_HOME'], 'configs/update.cfg')
|
||||||
|
if not os.path.exists(configfile):
|
||||||
|
raise Exception('Unable to find the configuration file. \
|
||||||
|
Did you set environment variables? \
|
||||||
|
Or activate the virtualenv.')
|
||||||
|
cfg = configparser.ConfigParser()
|
||||||
|
cfg.read(configfile)
|
||||||
|
|
||||||
|
current_version_path = os.path.join(os.environ['AIL_HOME'], 'update/current_version')
|
||||||
|
|
||||||
|
print('{}******************************************************************'.format(TERMINAL_YELLOW))
|
||||||
|
print('* Updating AIL ... *')
|
||||||
|
print('******************************************************************{}'.format(TERMINAL_DEFAULT))
|
||||||
|
|
||||||
|
if auto_update_enabled(cfg):
|
||||||
|
if check_if_files_modified():
|
||||||
|
is_fork = repo_is_fork()
|
||||||
|
if is_fork:
|
||||||
|
update_fork()
|
||||||
|
|
||||||
|
current_tag = get_git_current_tag(current_version_path)
|
||||||
|
print()
|
||||||
|
print('Current Version: {}{}{}'.format( TERMINAL_YELLOW, current_tag, TERMINAL_DEFAULT))
|
||||||
|
print()
|
||||||
|
list_upper_tags_remote = get_git_upper_tags_remote(current_tag[1:], is_fork)
|
||||||
|
# new realease
|
||||||
|
if len(list_upper_tags_remote) > 1:
|
||||||
|
print('New Releases:')
|
||||||
|
if is_fork:
|
||||||
|
for upper_tag in list_upper_tags_remote:
|
||||||
|
print(' {}{}{}'.format(TERMINAL_BLUE, upper_tag[0], TERMINAL_DEFAULT))
|
||||||
|
else:
|
||||||
|
for upper_tag in list_upper_tags_remote:
|
||||||
|
print(' {}{}{}: {}'.format(TERMINAL_BLUE, upper_tag[0], TERMINAL_DEFAULT, upper_tag[1]))
|
||||||
|
print()
|
||||||
|
update_ail(current_tag, list_upper_tags_remote, current_version_path, is_fork)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('Please, commit your changes or stash them before you can update AIL')
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print(' {}AIL Auto update is disabled{}'.format(TERMINAL_RED, TERMINAL_DEFAULT))
|
||||||
|
aborting_update()
|
||||||
|
sys.exit(0)
|
|
@ -1,16 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -x
|
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_LEVELDB" ] && echo "Needs the env var AIL_LEVELDB. Run the script from the virtual environment." && exit 1;
|
|
||||||
|
|
||||||
screen -dmS "Logging"
|
|
||||||
sleep 0.1
|
|
||||||
echo -e $GREEN"\t* Launching logging process"$DEFAULT
|
|
||||||
screen -S "Logging" -X screen -t "LogQueue" bash -c 'log_subscriber -p 6380 -c Queuing -l ../logs/; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Logging" -X screen -t "LogScript" bash -c 'log_subscriber -p 6380 -c Script -l ../logs/; read x'
|
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -x
|
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_LEVELDB" ] && echo "Needs the env var AIL_LEVELDB. Run the script from the virtual environment." && exit 1;
|
|
||||||
|
|
||||||
lvdbhost='127.0.0.1'
|
|
||||||
lvdbdir="${AIL_HOME}/LEVEL_DB_DATA/"
|
|
||||||
nb_db=13
|
|
||||||
|
|
||||||
db_y=`date +%Y`
|
|
||||||
#Verify that a dir with the correct year exists, create it otherwise
|
|
||||||
if [ ! -d "$lvdbdir$db_y" ]; then
|
|
||||||
mkdir -p "$db_y"
|
|
||||||
fi
|
|
||||||
|
|
||||||
screen -dmS "LevelDB"
|
|
||||||
sleep 0.1
|
|
||||||
echo -e $GREEN"\t* Launching Levels DB servers"$DEFAULT
|
|
||||||
|
|
||||||
#Launch a DB for each dir
|
|
||||||
for pathDir in $lvdbdir*/ ; do
|
|
||||||
yDir=$(basename "$pathDir")
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "LevelDB" -X screen -t "$yDir" bash -c 'redis-leveldb -H '$lvdbhost' -D '$pathDir'/ -P '$yDir' -M '$nb_db'; read x'
|
|
||||||
done
|
|
|
@ -1,15 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -x
|
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_LEVELDB" ] && echo "Needs the env var AIL_LEVELDB. Run the script from the virtual environment." && exit 1;
|
|
||||||
|
|
||||||
screen -dmS "Queue"
|
|
||||||
sleep 0.1
|
|
||||||
|
|
||||||
echo -e $GREEN"\t* Launching all the queues"$DEFAULT
|
|
||||||
screen -S "Queue" -X screen -t "Queues" bash -c './launch_queues.py; read x'
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
set -x
|
|
||||||
|
|
||||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
|
||||||
[ -z "$AIL_LEVELDB" ] && echo "Needs the env var AIL_LEVELDB. Run the script from the virtual environment." && exit 1;
|
|
||||||
|
|
||||||
conf_dir="${AIL_HOME}/configs/"
|
|
||||||
|
|
||||||
screen -dmS "Redis"
|
|
||||||
sleep 0.1
|
|
||||||
echo -e $GREEN"\t* Launching Redis servers"$DEFAULT
|
|
||||||
screen -S "Redis" -X screen -t "6379" bash -c '../redis/src/redis-server '$conf_dir'6379.conf ; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Redis" -X screen -t "6380" bash -c '../redis/src/redis-server '$conf_dir'6380.conf ; read x'
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Redis" -X screen -t "6381" bash -c '../redis/src/redis-server '$conf_dir'6381.conf ; read x'
|
|
||||||
|
|
||||||
# For Words and curves
|
|
||||||
sleep 0.1
|
|
||||||
screen -S "Redis" -X screen -t "6382" bash -c '../redis/src/redis-server '$conf_dir'6382.conf ; read x'
|
|
4
configs/update.cfg
Normal file
4
configs/update.cfg
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[Update]
|
||||||
|
auto_update = True
|
||||||
|
upstream = upstream
|
||||||
|
update-fork = False
|
227
update/v1.5/Update.py
Executable file
227
update/v1.5/Update.py
Executable file
|
@ -0,0 +1,227 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*-coding:UTF-8 -*
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import redis
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
start_deb = time.time()
|
||||||
|
|
||||||
|
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||||
|
if not os.path.exists(configfile):
|
||||||
|
raise Exception('Unable to find the configuration file. \
|
||||||
|
Did you set environment variables? \
|
||||||
|
Or activate the virtualenv.')
|
||||||
|
cfg = configparser.ConfigParser()
|
||||||
|
cfg.read(configfile)
|
||||||
|
|
||||||
|
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||||
|
|
||||||
|
r_serv_metadata = redis.StrictRedis(
|
||||||
|
host=cfg.get("ARDB_Metadata", "host"),
|
||||||
|
port=cfg.getint("ARDB_Metadata", "port"),
|
||||||
|
db=cfg.getint("ARDB_Metadata", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
|
r_serv_tag = redis.StrictRedis(
|
||||||
|
host=cfg.get("ARDB_Tags", "host"),
|
||||||
|
port=cfg.getint("ARDB_Tags", "port"),
|
||||||
|
db=cfg.getint("ARDB_Tags", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
|
r_serv_onion = redis.StrictRedis(
|
||||||
|
host=cfg.get("ARDB_Onion", "host"),
|
||||||
|
port=cfg.getint("ARDB_Onion", "port"),
|
||||||
|
db=cfg.getint("ARDB_Onion", "db"),
|
||||||
|
decode_responses=True)
|
||||||
|
|
||||||
|
## Update metadata ##
|
||||||
|
print('Updating ARDB_Metadata ...')
|
||||||
|
index = 0
|
||||||
|
start = time.time()
|
||||||
|
|
||||||
|
string_keys_to_rename = ['misp_events:{}*'.format(PASTES_FOLDER), 'hive_cases:{}*'.format(PASTES_FOLDER)]
|
||||||
|
for key_to_rename in string_keys_to_rename:
|
||||||
|
|
||||||
|
keys_to_rename = []
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
new_key = key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
keys_to_rename.append( (key, new_key) )
|
||||||
|
index = index + 1
|
||||||
|
for key, new_key in keys_to_rename:
|
||||||
|
r_serv_metadata.rename(key, new_key)
|
||||||
|
|
||||||
|
keys_to_rename = None
|
||||||
|
|
||||||
|
set_keys_to_rename = ['tag:{}*'.format(PASTES_FOLDER), 'hash_paste:{}*'.format(PASTES_FOLDER), 'base64_paste:{}*'.format(PASTES_FOLDER), 'binary_paste:{}*'.format(PASTES_FOLDER), 'hexadecimal_paste:{}*'.format(PASTES_FOLDER), 'paste_regular_external_links:{}*'.format(PASTES_FOLDER), 'paste_onion_external_links:{}*'.format(PASTES_FOLDER), 'paste_children:{}*'.format(PASTES_FOLDER)]
|
||||||
|
for key_to_rename in set_keys_to_rename:
|
||||||
|
|
||||||
|
keys_to_remove = []
|
||||||
|
keys_to_rename = []
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
new_key = key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
# a set with this key already exist
|
||||||
|
if r_serv_metadata.exists(new_key):
|
||||||
|
# save data
|
||||||
|
for new_key_value in r_serv_metadata.smembers(key):
|
||||||
|
r_serv_metadata.sadd(new_key, new_key_value)
|
||||||
|
keys_to_remove.append(key)
|
||||||
|
else:
|
||||||
|
keys_to_rename.append( (key, new_key) )
|
||||||
|
index = index + 1
|
||||||
|
for key in keys_to_remove:
|
||||||
|
r_serv_metadata.delete(key)
|
||||||
|
for key, new_key in keys_to_rename:
|
||||||
|
r_serv_metadata.rename(key, new_key)
|
||||||
|
|
||||||
|
keys_to_remove = None
|
||||||
|
keys_to_rename = None
|
||||||
|
|
||||||
|
|
||||||
|
zset_keys_to_rename = ['nb_seen_hash:*', 'base64_hash:*', 'binary_hash:*']
|
||||||
|
for key_to_rename in zset_keys_to_rename:
|
||||||
|
|
||||||
|
keys_to_remove = []
|
||||||
|
zkeys_to_remove = []
|
||||||
|
keys_to_add = []
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
temp = []
|
||||||
|
for zset_key, value in r_serv_metadata.zscan_iter(key, '*{}*'.format(PASTES_FOLDER)):
|
||||||
|
new_key = zset_key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
index = index +1
|
||||||
|
temp.append((key, zset_key))
|
||||||
|
keys_to_add.append((key, new_key, value))
|
||||||
|
if 0 < len(temp) < r_serv_metadata.zcard(key):
|
||||||
|
zkeys_to_remove.extend(temp)
|
||||||
|
else:
|
||||||
|
keys_to_remove.append(key)
|
||||||
|
for key in keys_to_remove:
|
||||||
|
r_serv_metadata.delete(key)
|
||||||
|
for key, zset_key in zkeys_to_remove:
|
||||||
|
r_serv_metadata.zrem(key, zset_key)
|
||||||
|
for key, new_key, value in keys_to_add:
|
||||||
|
r_serv_metadata.zincrby(key, new_key, int(value))
|
||||||
|
keys_to_remove = None
|
||||||
|
zkeys_to_remove = None
|
||||||
|
keys_to_add = None
|
||||||
|
|
||||||
|
set_keys_to_rename = ['paste_children:*']
|
||||||
|
for key_to_rename in set_keys_to_rename:
|
||||||
|
keys_to_remove = []
|
||||||
|
skeys_to_remove = []
|
||||||
|
keys_to_add = []
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
temp = []
|
||||||
|
for set_key in r_serv_metadata.sscan_iter(key, '*{}*'.format(PASTES_FOLDER)):
|
||||||
|
new_key = set_key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
index = index +1
|
||||||
|
temp.append((key, set_key))
|
||||||
|
keys_to_add.append((key, new_key))
|
||||||
|
if 0 < len(temp) < r_serv_metadata.scard(key):
|
||||||
|
skeys_to_remove.extend(temp)
|
||||||
|
else:
|
||||||
|
keys_to_remove.append(key)
|
||||||
|
for key in keys_to_remove:
|
||||||
|
r_serv_metadata.delete(key)
|
||||||
|
for key, set_key in skeys_to_remove:
|
||||||
|
r_serv_metadata.srem(key, set_key)
|
||||||
|
for key, new_key in keys_to_add:
|
||||||
|
r_serv_metadata.sadd(key, new_key)
|
||||||
|
keys_to_remove = None
|
||||||
|
skeys_to_remove = None
|
||||||
|
keys_to_add = None
|
||||||
|
|
||||||
|
hset_keys_to_rename = ['paste_metadata:{}*'.format(PASTES_FOLDER)]
|
||||||
|
for key_to_rename in hset_keys_to_rename:
|
||||||
|
|
||||||
|
keys_to_rename = []
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
new_key = key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
# a hset with this key already exist
|
||||||
|
keys_to_rename.append((key, new_key))
|
||||||
|
index = index + 1
|
||||||
|
for key, new_key in keys_to_rename:
|
||||||
|
r_serv_metadata.rename(key, new_key)
|
||||||
|
keys_to_rename = None
|
||||||
|
|
||||||
|
# to verify 120/100 try with scan
|
||||||
|
hset_keys_to_rename = ['paste_metadata:*']
|
||||||
|
for key_to_rename in hset_keys_to_rename:
|
||||||
|
for key in r_serv_metadata.scan_iter(key_to_rename):
|
||||||
|
father = r_serv_metadata.hget(key, 'father')
|
||||||
|
super_father = r_serv_metadata.hget(key, 'super_father')
|
||||||
|
|
||||||
|
if father:
|
||||||
|
if PASTES_FOLDER in father:
|
||||||
|
index = index + 1
|
||||||
|
r_serv_metadata.hdel(key, 'father')
|
||||||
|
r_serv_metadata.hset(key, 'father', father.replace(PASTES_FOLDER, '', 1))
|
||||||
|
|
||||||
|
if super_father:
|
||||||
|
if PASTES_FOLDER in super_father:
|
||||||
|
index = index + 1
|
||||||
|
r_serv_metadata.hdel(key, 'super_father')
|
||||||
|
r_serv_metadata.hset(key, 'super_father', super_father.replace(PASTES_FOLDER, '', 1))
|
||||||
|
|
||||||
|
keys_to_rename = None
|
||||||
|
|
||||||
|
|
||||||
|
end = time.time()
|
||||||
|
|
||||||
|
print('Updating ARDB_Metadata Done => {} paths: {} s'.format(index, end - start))
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Updating ARDB_Tags ...')
|
||||||
|
index = 0
|
||||||
|
start = time.time()
|
||||||
|
|
||||||
|
tags_list = r_serv_tag.smembers('list_tags')
|
||||||
|
for tag in tags_list:
|
||||||
|
res = False
|
||||||
|
|
||||||
|
list_pastes = r_serv_tag.sscan(tag, 0, '*{}*'.format(PASTES_FOLDER), 1000)
|
||||||
|
while list_pastes[1]:
|
||||||
|
for paste in list_pastes[1]:
|
||||||
|
r_serv_tag.srem(tag, paste)
|
||||||
|
r_serv_tag.sadd(tag, paste.replace(PASTES_FOLDER, '', 1))
|
||||||
|
index = index + 1
|
||||||
|
|
||||||
|
list_pastes = r_serv_tag.sscan(tag, 0, '*{}*'.format(PASTES_FOLDER), 1000)
|
||||||
|
|
||||||
|
end = time.time()
|
||||||
|
print('Updating ARDB_Tags Done => {} paths: {} s'.format(index, end - start))
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Updating ARDB_Onion ...')
|
||||||
|
index = 0
|
||||||
|
start = time.time()
|
||||||
|
|
||||||
|
hset_keys_to_rename = ['onion_metadata:*']
|
||||||
|
for key_to_rename in hset_keys_to_rename:
|
||||||
|
for key in r_serv_onion.scan_iter(key_to_rename):
|
||||||
|
list_data = r_serv_onion.hscan(key, 0, '*{}*'.format(PASTES_FOLDER), 1000)
|
||||||
|
while list_data[1]:
|
||||||
|
for hash_key, value in list_data[1].items():
|
||||||
|
r_serv_onion.hdel(key, hash_key)
|
||||||
|
new_hash = hash_key.replace(PASTES_FOLDER, '', 1)
|
||||||
|
new_value = value.replace(PASTES_FOLDER, '', 1)
|
||||||
|
index = index +1
|
||||||
|
r_serv_onion.hset(key, new_hash, new_value)
|
||||||
|
|
||||||
|
list_data = r_serv_onion.hscan(key, 0, '*{}*'.format(PASTES_FOLDER), 1000)
|
||||||
|
|
||||||
|
for elem in r_serv_onion.smembers('onion_crawler_queue'):
|
||||||
|
if PASTES_FOLDER in elem:
|
||||||
|
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||||
|
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||||
|
index = index +1
|
||||||
|
|
||||||
|
|
||||||
|
end = time.time()
|
||||||
|
print('Updating ARDB_Onion Done => {} paths: {} s'.format(index, end - start))
|
||||||
|
print()
|
||||||
|
print('Done in {} s'.format(end - start_deb))
|
43
update/v1.5/Update.sh
Executable file
43
update/v1.5/Update.sh
Executable file
|
@ -0,0 +1,43 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||||
|
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
|
||||||
|
|
||||||
|
export PATH=$AIL_HOME:$PATH
|
||||||
|
export PATH=$AIL_REDIS:$PATH
|
||||||
|
export PATH=$AIL_ARDB:$PATH
|
||||||
|
export PATH=$AIL_BIN:$PATH
|
||||||
|
export PATH=$AIL_FLASK:$PATH
|
||||||
|
|
||||||
|
echo "Killing all screens ..."
|
||||||
|
bash -c "bash ${AIL_BIN}/LAUNCH.sh -k"
|
||||||
|
echo ""
|
||||||
|
echo "Starting ARDB ..."
|
||||||
|
bash -c "bash ${AIL_BIN}/launch_ardb.sh"
|
||||||
|
|
||||||
|
flag_ardb=true
|
||||||
|
while $flag_ardb; do
|
||||||
|
sleep 1
|
||||||
|
bash -c "bash ${AIL_BIN}/check_ardb.sh"
|
||||||
|
if [ $? == 0 ]; then
|
||||||
|
flag_ardb=false
|
||||||
|
else
|
||||||
|
echo "ARDB not available, waiting 5s before retry"
|
||||||
|
sleep 5
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Fixing ARDB ..."
|
||||||
|
echo ""
|
||||||
|
bash -c "python ${AIL_HOME}/update/v1.5/Update.py"
|
||||||
|
|
||||||
|
echo "Shutting down ARDB ..."
|
||||||
|
bash -c "bash ${AIL_BIN}/LAUNCH.sh -k"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
exit 0
|
Loading…
Reference in a new issue