2019-01-29 18:37:13 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2021-08-27 17:28:26 +02:00
|
|
|
import argparse
|
|
|
|
import os
|
2019-01-29 18:37:13 +01:00
|
|
|
import time
|
|
|
|
from pathlib import Path
|
2021-08-27 17:28:26 +02:00
|
|
|
from subprocess import Popen
|
2019-01-29 18:37:13 +01:00
|
|
|
|
2021-08-27 17:28:26 +02:00
|
|
|
from redis import Redis
|
2021-08-27 18:08:34 +02:00
|
|
|
from redis.exceptions import ConnectionError
|
2021-09-07 12:59:31 +02:00
|
|
|
|
2024-03-05 20:51:21 +01:00
|
|
|
from lookyloo.default import get_homedir, get_socket_path, get_config
|
2021-08-27 17:28:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_running(name: str) -> bool:
|
|
|
|
socket_path = get_socket_path(name)
|
|
|
|
if not os.path.exists(socket_path):
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
r = Redis(unix_socket_path=socket_path)
|
|
|
|
return True if r.ping() else False
|
|
|
|
except ConnectionError:
|
|
|
|
return False
|
2019-01-29 18:37:13 +01:00
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def launch_cache(storage_directory: Path | None=None) -> None:
|
2019-01-29 18:37:13 +01:00
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
|
|
|
if not check_running('cache'):
|
|
|
|
Popen(["./run_redis.sh"], cwd=(storage_directory / 'cache'))
|
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def shutdown_cache(storage_directory: Path | None=None) -> None:
|
2019-01-29 18:37:13 +01:00
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
2021-11-26 18:35:15 +01:00
|
|
|
r = Redis(unix_socket_path=get_socket_path('cache'))
|
2021-12-02 14:39:32 +01:00
|
|
|
r.shutdown(save=True)
|
2021-11-26 18:35:15 +01:00
|
|
|
print('Redis cache database shutdown.')
|
2019-01-29 18:37:13 +01:00
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def launch_indexing(storage_directory: Path | None=None) -> None:
|
2020-06-13 02:49:07 +02:00
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
|
|
|
if not check_running('indexing'):
|
|
|
|
Popen(["./run_redis.sh"], cwd=(storage_directory / 'indexing'))
|
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def shutdown_indexing(storage_directory: Path | None=None) -> None:
|
2020-06-13 02:49:07 +02:00
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
2021-11-26 18:35:15 +01:00
|
|
|
r = Redis(unix_socket_path=get_socket_path('indexing'))
|
2021-12-02 14:39:32 +01:00
|
|
|
r.shutdown(save=True)
|
2021-11-26 18:35:15 +01:00
|
|
|
print('Redis indexing database shutdown.')
|
2020-06-13 02:49:07 +02:00
|
|
|
|
|
|
|
|
2024-03-05 20:51:21 +01:00
|
|
|
def launch_full_index(storage_directory: Path | None=None) -> None:
|
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
|
|
|
if not check_running('full_index'):
|
|
|
|
Popen(["./run_kvrocks.sh"], cwd=(storage_directory / 'full_index'))
|
|
|
|
|
|
|
|
|
|
|
|
def shutdown_full_index(storage_directory: Path | None=None) -> None:
|
|
|
|
if not storage_directory:
|
|
|
|
storage_directory = get_homedir()
|
|
|
|
r = Redis(unix_socket_path=get_socket_path('full_index'))
|
2024-03-11 00:14:07 +01:00
|
|
|
r.shutdown()
|
2024-03-05 20:51:21 +01:00
|
|
|
print('Kvrocks full indexing database shutdown.')
|
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def launch_all() -> None:
|
2019-01-29 18:37:13 +01:00
|
|
|
launch_cache()
|
2020-06-13 02:49:07 +02:00
|
|
|
launch_indexing()
|
2024-03-05 20:51:21 +01:00
|
|
|
if get_config('generic', 'index_everything'):
|
|
|
|
launch_full_index()
|
2019-01-29 18:37:13 +01:00
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def check_all(stop: bool=False) -> None:
|
|
|
|
backends: dict[str, bool] = {'cache': False, 'indexing': False}
|
2024-03-05 20:51:21 +01:00
|
|
|
if get_config('generic', 'index_everything'):
|
|
|
|
backends['full_index'] = False
|
2019-01-29 18:37:13 +01:00
|
|
|
while True:
|
2021-12-02 14:39:32 +01:00
|
|
|
for db_name in backends.keys():
|
2019-01-29 18:37:13 +01:00
|
|
|
try:
|
2021-12-02 14:39:32 +01:00
|
|
|
backends[db_name] = check_running(db_name)
|
2019-01-29 18:37:13 +01:00
|
|
|
except Exception:
|
2021-12-02 14:39:32 +01:00
|
|
|
backends[db_name] = False
|
2019-01-29 18:37:13 +01:00
|
|
|
if stop:
|
2021-12-02 14:39:32 +01:00
|
|
|
if not any(running for running in backends.values()):
|
2019-01-29 18:37:13 +01:00
|
|
|
break
|
|
|
|
else:
|
2021-12-02 14:39:32 +01:00
|
|
|
if all(running for running in backends.values()):
|
2019-01-29 18:37:13 +01:00
|
|
|
break
|
2021-12-02 14:39:32 +01:00
|
|
|
for db_name, running in backends.items():
|
|
|
|
if not stop and not running:
|
|
|
|
print(f"Waiting on {db_name} to start")
|
|
|
|
if stop and running:
|
|
|
|
print(f"Waiting on {db_name} to stop")
|
2019-01-29 18:37:13 +01:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def stop_all() -> None:
|
2019-01-29 18:37:13 +01:00
|
|
|
shutdown_cache()
|
2020-06-13 02:49:07 +02:00
|
|
|
shutdown_indexing()
|
2024-03-05 20:51:21 +01:00
|
|
|
if get_config('generic', 'index_everything'):
|
|
|
|
shutdown_full_index()
|
2019-01-29 18:37:13 +01:00
|
|
|
|
|
|
|
|
2024-01-12 17:15:41 +01:00
|
|
|
def main() -> None:
|
2019-01-29 18:37:13 +01:00
|
|
|
parser = argparse.ArgumentParser(description='Manage backend DBs.')
|
|
|
|
parser.add_argument("--start", action='store_true', default=False, help="Start all")
|
|
|
|
parser.add_argument("--stop", action='store_true', default=False, help="Stop all")
|
|
|
|
parser.add_argument("--status", action='store_true', default=True, help="Show status")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
if args.start:
|
|
|
|
launch_all()
|
|
|
|
if args.stop:
|
|
|
|
stop_all()
|
|
|
|
if not args.stop and args.status:
|
|
|
|
check_all()
|
2020-10-03 21:19:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|