mirror of https://github.com/CIRCL/AIL-framework
chg: [update] add update script Redis ARDB + add background update
parent
1ed9468881
commit
5c4db5f2f1
|
@ -465,7 +465,7 @@ function launch_all {
|
|||
if [[ "${choices[i]}" ]]; then
|
||||
case ${options[i]} in
|
||||
Redis)
|
||||
launch_redis
|
||||
launch_redis;
|
||||
;;
|
||||
Ardb)
|
||||
launch_ardb;
|
||||
|
@ -507,12 +507,16 @@ function launch_all {
|
|||
exit
|
||||
}
|
||||
|
||||
echo "$@"
|
||||
#echo "$@"
|
||||
|
||||
while [ "$1" != "" ]; do
|
||||
case $1 in
|
||||
-l | --launchAuto ) launch_all "automatic";
|
||||
;;
|
||||
-lr | --launchRedis ) launch_redis;
|
||||
;;
|
||||
-la | --launchARDB ) launch_ardb;
|
||||
;;
|
||||
-k | --killAll ) killall;
|
||||
;;
|
||||
-u | --update ) update;
|
||||
|
|
|
@ -259,7 +259,7 @@ def launch_update_version(version, roll_back_commit, current_version_path, is_fo
|
|||
print('{}------------------------------------------------------------------'.format(TERMINAL_YELLOW))
|
||||
print('- Launching Update: {}{}{} -'.format(TERMINAL_BLUE, version, TERMINAL_YELLOW))
|
||||
print('-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --{}'.format(TERMINAL_DEFAULT))
|
||||
process = subprocess.Popen(['bash', update_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
process = subprocess.Popen(['unbuffer', 'bash', update_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
while True:
|
||||
output = process.stdout.readline().decode()
|
||||
if output == '' and process.poll() is not None:
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
"""
|
||||
Update AIL
|
||||
============================
|
||||
|
||||
Update AIL clone and fork
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import redis
|
||||
import subprocess
|
||||
import configparser
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
if not os.path.exists(configfile):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
Did you set environment variables? \
|
||||
Or activate the virtualenv.')
|
||||
cfg = configparser.ConfigParser()
|
||||
cfg.read(configfile)
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
ail_version = r_serv.get('ail:version')
|
||||
if ail_version is None:
|
||||
sys.exit(0)
|
||||
else:
|
||||
if ail_version == 'v1.5':
|
||||
onions_update_status = r_serv.get('v1.5:onions')
|
||||
if onions_update_status is None:
|
||||
if int(onions_update_status) != 1:
|
||||
update_file = os.path.join(os.environ['AIL_HOME'], 'update', 'v1.4', 'Update-ARDB_Onions.py')
|
||||
process = subprocess.run(['unbuffer', 'python' ,'update_file'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
metadata_update_status = r_serv.get('v1.5:metadata')
|
||||
if metadata_update_status is None:
|
||||
if int(metadata_update_status) != 1:
|
||||
pass
|
||||
#launch update
|
||||
tags_update_status = r_serv.get('v1.5:tags')
|
||||
if tags_update_status is None:
|
||||
if int(tags_update_status) != 1:
|
||||
pass
|
||||
#launch update
|
||||
tags_background_update_status = r_serv.get('v1.5:tags_background')
|
||||
if tags_background_update_status is None:
|
||||
if int(tags_background_update_status) != 1:
|
||||
pass
|
||||
#launch update
|
|
@ -39,7 +39,7 @@ sudo apt-get install p7zip-full -y
|
|||
# REDIS #
|
||||
test ! -d redis/ && git clone https://github.com/antirez/redis.git
|
||||
pushd redis/
|
||||
git checkout 3.2
|
||||
git checkout 5.0
|
||||
make
|
||||
popd
|
||||
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "Killing all screens ..."
|
||||
bash -c "bash ../../bin/LAUNCH.sh -k"
|
||||
echo ""
|
||||
echo "Updating ARDB ..."
|
||||
pushd ../../
|
||||
rm -r ardb
|
||||
pushd ardb/
|
||||
git clone https://github.com/yinqiwen/ardb.git
|
||||
git checkout 0.10 || exit 1
|
||||
make || exit 1
|
||||
popd
|
||||
popd
|
||||
echo "ARDB Updated"
|
||||
echo ""
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,27 @@
|
|||
#!/bin/bash
|
||||
|
||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
|
||||
|
||||
export PATH=$AIL_HOME:$PATH
|
||||
export PATH=$AIL_REDIS:$PATH
|
||||
export PATH=$AIL_ARDB:$PATH
|
||||
export PATH=$AIL_BIN:$PATH
|
||||
export PATH=$AIL_FLASK:$PATH
|
||||
|
||||
echo "Killing all screens ..."
|
||||
bash -c "bash ${AIL_BIN}/LAUNCH.sh -k"
|
||||
echo ""
|
||||
echo "Updating Redis ..."
|
||||
pushd $AIL_HOME/redis
|
||||
git pull || exit 1
|
||||
git checkout 5.0 || exit 1
|
||||
make || exit 1
|
||||
popd
|
||||
echo "Redis Updated"
|
||||
echo ""
|
||||
|
||||
exit 0
|
|
@ -10,27 +10,35 @@ import configparser
|
|||
|
||||
def update_hash_item(has_type):
|
||||
#get all hash items:
|
||||
#all_base64 = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\"'.format(has_type))
|
||||
all_hash_items = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\":20180925'.format(has_type))
|
||||
all_hash_items = r_serv_tag.smembers('infoleak:automatic-detection=\"{}\"'.format(has_type))
|
||||
for item_path in all_hash_items:
|
||||
item_path = '/home/aurelien/git/python3/AIL-framework/PASTES/archive/pastebin.com_pro/2018/09/25/Fu9akJaz.gz'
|
||||
if PASTES_FOLDER in item_path:
|
||||
base64_key = '{}_paste:{}'.format(has_type, item_path)
|
||||
hash_key = 'hash_paste:{}'.format(item_path)
|
||||
|
||||
## TODO: catch error
|
||||
if r_serv_metadata.exists(base64_key):
|
||||
res = r_serv_metadata.renamenx(base64_key, base64_key.replace(PASTES_FOLDER, '', 1))
|
||||
## TODO: key merge
|
||||
if not res:
|
||||
new_base64_key = base64_key.replace(PASTES_FOLDER, '', 1)
|
||||
res = r_serv_metadata.renamenx(base64_key, new_base64_key)
|
||||
print(res)
|
||||
if res == 0:
|
||||
print('same key, double name: {}'.format(item_path))
|
||||
# fusion
|
||||
all_key = r_serv_metadata.smembers(base64_key)
|
||||
for elem in all_key:
|
||||
r_serv_metadata.sadd(new_base64_key, elem)
|
||||
r_serv_metadata.srem(base64_key, elem)
|
||||
|
||||
if r_serv_metadata.exists(hash_key):
|
||||
## TODO: catch error
|
||||
res = r_serv_metadata.renamenx(hash_key, hash_key.replace(PASTES_FOLDER, '', 1))
|
||||
## TODO: key merge
|
||||
if not res:
|
||||
new_hash_key = hash_key.replace(PASTES_FOLDER, '', 1)
|
||||
res = r_serv_metadata.renamenx(hash_key, new_hash_key)
|
||||
print(res)
|
||||
if res == 0:
|
||||
print('same key, double name: {}'.format(item_path))
|
||||
# fusion
|
||||
all_key = r_serv_metadata.smembers(hash_key)
|
||||
for elem in all_key:
|
||||
r_serv_metadata.sadd(new_hash_key, elem)
|
||||
r_serv_metadata.srem(hash_key, elem)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
|
@ -46,6 +54,12 @@ if __name__ == '__main__':
|
|||
|
||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
r_serv_metadata = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_Metadata", "host"),
|
||||
port=cfg.getint("ARDB_Metadata", "port"),
|
||||
|
@ -84,8 +98,11 @@ if __name__ == '__main__':
|
|||
old_item_metadata = 'paste_metadata:{}'.format(item_path)
|
||||
item_path = item_path.replace(PASTES_FOLDER, '', 1)
|
||||
new_item_metadata = 'paste_metadata:{}'.format(item_path)
|
||||
## TODO: catch error
|
||||
res = r_serv_metadata.renamenx(old_item_metadata, new_item_metadata)
|
||||
#key already exist
|
||||
if res == 0:
|
||||
r_serv_metadata.delete(old_item_metadata)
|
||||
|
||||
# update domain port
|
||||
domain = r_serv_metadata.hget(new_item_metadata, 'domain')
|
||||
if domain:
|
||||
|
@ -244,3 +261,5 @@ if __name__ == '__main__':
|
|||
|
||||
print('Updating ARDB_Metadata Done => {} paths: {} s'.format(index, end - start))
|
||||
print()
|
||||
|
||||
r_serv.set('v1.5:metadata', 1)
|
||||
|
|
|
@ -27,9 +27,9 @@ def get_domain_root_from_paste_childrens(item_father, domain):
|
|||
for item_path in item_children:
|
||||
# remove absolute_path
|
||||
if PASTES_FOLDER in item_path:
|
||||
#r_serv_metadata.srem('paste_children:{}'.format(item_father), item_path)
|
||||
r_serv_metadata.srem('paste_children:{}'.format(item_father), item_path)
|
||||
item_path = item_path.replace(PASTES_FOLDER, '', 1)
|
||||
#r_serv_metadata.sadd('paste_children:{}'.format(item_father), item_path)
|
||||
r_serv_metadata.sadd('paste_children:{}'.format(item_father), item_path)
|
||||
if domain in item_path:
|
||||
domain_root = item_path
|
||||
return domain_root
|
||||
|
@ -49,6 +49,12 @@ if __name__ == '__main__':
|
|||
|
||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
r_serv_metadata = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_Metadata", "host"),
|
||||
port=cfg.getint("ARDB_Metadata", "port"),
|
||||
|
@ -99,7 +105,6 @@ if __name__ == '__main__':
|
|||
for date_history in all_onion_history:
|
||||
print('--------')
|
||||
print('onion_history:{}:{}'.format(onion_domain, date_history))
|
||||
#item_father = r_serv_onion.lpop('onion_history:{}:{}'.format(onion_domain, date_history))
|
||||
item_father = r_serv_onion.lrange('onion_history:{}:{}'.format(onion_domain, date_history), 0, 0)
|
||||
print('item_father: {}'.format(item_father))
|
||||
item_father = item_father[0]
|
||||
|
@ -120,23 +125,9 @@ if __name__ == '__main__':
|
|||
r_serv_onion.hdel('onion_metadata:{}'.format(onion_domain), 'last_seen')
|
||||
|
||||
|
||||
'''
|
||||
# update crawler queue
|
||||
for elem in r_serv_onion.smembers('onion_crawler_queue'):
|
||||
if PASTES_FOLDER in elem:
|
||||
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||
index = index +1
|
||||
for elem in r_serv_onion.smembers('onion_crawler_priority_queue'):
|
||||
if PASTES_FOLDER in elem:
|
||||
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||
index = index +1
|
||||
|
||||
'''
|
||||
|
||||
|
||||
end = time.time()
|
||||
print('Updating ARDB_Onion Done => {} paths: {} s'.format(index, end - start))
|
||||
print()
|
||||
print('Done in {} s'.format(end - start_deb))
|
||||
|
||||
r_serv.set('v1.5:onions', 1)
|
||||
|
|
|
@ -29,6 +29,12 @@ if __name__ == '__main__':
|
|||
|
||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
r_serv_metadata = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_Metadata", "host"),
|
||||
port=cfg.getint("ARDB_Metadata", "port"),
|
||||
|
@ -121,3 +127,5 @@ if __name__ == '__main__':
|
|||
|
||||
|
||||
print('Updating ARDB_Tags Done => {} paths: {} s'.format(index, end - start))
|
||||
|
||||
r_serv.set('v1.5:tags', 1)
|
||||
|
|
|
@ -29,6 +29,12 @@ if __name__ == '__main__':
|
|||
|
||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
r_serv_metadata = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_Metadata", "host"),
|
||||
port=cfg.getint("ARDB_Metadata", "port"),
|
||||
|
@ -68,3 +74,5 @@ if __name__ == '__main__':
|
|||
|
||||
|
||||
print('Updating ARDB_Tags Done: {} s'.format(end - start))
|
||||
|
||||
r_serv.set('v1.5:tags_background', 1)
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
import configparser
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
start_deb = time.time()
|
||||
|
||||
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
|
||||
if not os.path.exists(configfile):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
Did you set environment variables? \
|
||||
Or activate the virtualenv.')
|
||||
cfg = configparser.ConfigParser()
|
||||
cfg.read(configfile)
|
||||
|
||||
PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) + '/'
|
||||
|
||||
r_serv = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_DB", "host"),
|
||||
port=cfg.getint("ARDB_DB", "port"),
|
||||
db=cfg.getint("ARDB_DB", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
r_serv_onion = redis.StrictRedis(
|
||||
host=cfg.get("ARDB_Onion", "host"),
|
||||
port=cfg.getint("ARDB_Onion", "port"),
|
||||
db=cfg.getint("ARDB_Onion", "db"),
|
||||
decode_responses=True)
|
||||
|
||||
print()
|
||||
print('Updating ARDB_Onion ...')
|
||||
index = 0
|
||||
start = time.time()
|
||||
|
||||
# update crawler queue
|
||||
for elem in r_serv_onion.smembers('onion_crawler_queue'):
|
||||
if PASTES_FOLDER in elem:
|
||||
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||
index = index +1
|
||||
for elem in r_serv_onion.smembers('onion_crawler_priority_queue'):
|
||||
if PASTES_FOLDER in elem:
|
||||
r_serv_onion.srem('onion_crawler_queue', elem)
|
||||
r_serv_onion.sadd('onion_crawler_queue', elem.replace(PASTES_FOLDER, '', 1))
|
||||
index = index +1
|
||||
|
||||
end = time.time()
|
||||
print('Updating ARDB_Onion Done => {} paths: {} s'.format(index, end - start))
|
||||
print()
|
||||
|
||||
#Set current ail version
|
||||
r_serv.set('ail:version', 'v1.5')
|
||||
|
||||
print('Done in {} s'.format(end - start_deb))
|
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
|
||||
[ -z "$AIL_HOME" ] && echo "Needs the env var AIL_HOME. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_REDIS" ] && echo "Needs the env var AIL_REDIS. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_ARDB" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_BIN" ] && echo "Needs the env var AIL_ARDB. Run the script from the virtual environment." && exit 1;
|
||||
[ -z "$AIL_FLASK" ] && echo "Needs the env var AIL_FLASK. Run the script from the virtual environment." && exit 1;
|
||||
|
||||
export PATH=$AIL_HOME:$PATH
|
||||
export PATH=$AIL_REDIS:$PATH
|
||||
export PATH=$AIL_ARDB:$PATH
|
||||
export PATH=$AIL_BIN:$PATH
|
||||
export PATH=$AIL_FLASK:$PATH
|
||||
|
||||
echo ""
|
||||
bash -c "bash ${AIL_HOME}/update/bin/Update_Redis.sh"
|
||||
#bash -c "bash ${AIL_HOME}/update/bin/Update_ARDB.sh"
|
||||
|
||||
echo ""
|
||||
echo "Fixing ARDB ..."
|
||||
echo ""
|
||||
bash -c "unbuffer python ${AIL_HOME}/update/v1.5/Update.py"
|
||||
|
||||
echo "Shutting down ARDB ..."
|
||||
bash -c "bash ${AIL_BIN}/LAUNCH.sh -k"
|
||||
|
||||
echo ""
|
||||
|
||||
exit 0
|
Loading…
Reference in New Issue