websoft9/appmanage/api/service/manage.py

1256 lines
45 KiB
Python
Raw Normal View History

2023-03-15 08:59:06 +08:00
import os
import io
import sys
import platform
import shutil
import time
import subprocess
2023-05-11 10:46:13 +08:00
import requests
2023-03-15 08:59:06 +08:00
import json
import datetime
import socket
2023-03-21 15:14:35 +08:00
import re
from threading import Thread
2023-03-24 15:35:58 +08:00
from api.utils import shell_execute, docker, const
from api.model.app import App
2023-03-07 16:55:23 +08:00
from api.model.response import Response
2023-04-18 14:04:51 +08:00
from api.model.config import Config
2023-04-14 14:36:37 +08:00
from api.model.status_reason import StatusReason
2023-03-28 17:16:07 +08:00
from api.utils.common_log import myLogger
2023-04-04 10:22:05 +08:00
from redis import Redis
2023-04-04 17:25:23 +08:00
from rq import Queue, Worker, Connection
2023-06-13 09:09:36 +08:00
from rq.registry import StartedJobRegistry, FinishedJobRegistry, DeferredJobRegistry, FailedJobRegistry, ScheduledJobRegistry, CanceledJobRegistry
2023-04-14 16:41:43 +08:00
from api.exception.command_exception import CommandException
2023-06-17 15:45:16 +08:00
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger,CronTrigger
2023-04-04 10:22:05 +08:00
# 指定 Redis 容器的主机名和端口
2023-04-04 15:28:21 +08:00
redis_conn = Redis(host='websoft9-redis', port=6379)
2023-04-04 10:22:05 +08:00
# 使用指定的 Redis 连接创建 RQ 队列
2023-06-09 12:07:43 +08:00
q = Queue(connection=redis_conn, default_timeout=3600)
2023-06-17 15:45:16 +08:00
def auto_update():
shell_execute.execute_command_output_all("wget https://websoft9.github.io/StackHub/install/update_appstore.sh && bash update_appstore.sh 1>/dev/null 2>&1")
def test():
shell_execute.execute_command_output_all("echo 1111 >> /tmp/xuweitest")
scheduler = BackgroundScheduler()
#scheduler.add_job(auto_update, CronTrigger(hour=1, minute=0))
scheduler.add_job(test, IntervalTrigger(minutes=1))
2023-06-09 12:07:43 +08:00
# 获取github文件内容
def get_github_content(repo, path):
url = 'https://websoft9.github.io/{repo}/{path}'
url = url.format(repo=repo, path=path)
response = requests.get(url)
response.encoding = 'utf-8'
contents = response.text
return contents
2023-06-17 15:45:16 +08:00
def AppAutoUpdate(auto_update):
if auto_update:
scheduler.start()
return "软件商店自动更新已经开启"
else:
scheduler.shutdown()
return "软件商店自动更新已经关闭"
2023-06-14 14:42:31 +08:00
# 更新软件商店
def AppStoreUpdate():
local_path = '/data/library/install/version.json'
local_version = "0"
try:
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
local_version = json.loads(op)['VERSION']
except:
local_version = "0.1.0"
repo = 'docker-library'
version_contents = get_github_content(repo, 'install/version.json')
version = json.loads(version_contents)['VERSION']
if compared_version(local_version, version) == -1:
content = []
change_log_contents = get_github_content(repo, 'CHANGELOG.md')
change_log = change_log_contents.split('## ')[1].split('\n')
for change in change_log[1:]:
if change != '':
content.append(change)
2023-06-17 11:35:57 +08:00
shell_execute.execute_command_output_all("wget https://websoft9.github.io/StackHub/install/update_appstore.sh && bash update_appstore.sh 1>/dev/null 2>&1")
2023-06-14 14:42:31 +08:00
return content
else:
return None
2023-06-09 12:07:43 +08:00
2023-06-09 14:10:43 +08:00
# 获取 update info
def get_update_list():
local_path = '/data/apps/stackhub/install/version.json'
repo = 'StackHub'
2023-06-09 12:07:43 +08:00
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
local_version = json.loads(op)['VERSION']
version_contents = get_github_content(repo, 'install/version.json')
version = json.loads(version_contents)['VERSION']
if compared_version(local_version, version) == -1:
content = []
change_log_contents = get_github_content(repo, 'CHANGELOG.md')
change_log = change_log_contents.split('## ')[1].split('\n')
2023-06-12 11:35:50 +08:00
date = change_log[0].split()[-1]
2023-06-09 12:07:43 +08:00
for change in change_log[1:]:
if change != '':
content.append(change)
ret = {}
ret['version'] = version
2023-06-12 11:35:50 +08:00
ret['date'] = date
2023-06-09 12:07:43 +08:00
ret['content'] = content
return ret
else:
return None
2023-04-15 16:39:33 +08:00
2023-05-17 17:50:06 +08:00
def conbine_list(installing_list, installed_list):
app_list = installing_list + installed_list
result_list = []
appid_list = []
for app in app_list:
app_id = app['app_id']
if app_id in appid_list:
continue
else:
appid_list.append(app_id)
result_list.append(app)
return result_list
2023-06-09 09:05:36 +08:00
2023-06-09 12:07:43 +08:00
2023-04-15 16:39:33 +08:00
# 获取所有app的信息
2023-04-16 17:45:14 +08:00
def get_my_app(app_id):
2023-04-17 09:54:03 +08:00
installed_list = get_apps_from_compose()
2023-04-15 07:43:22 +08:00
installing_list = get_apps_from_queue()
2023-06-09 09:05:36 +08:00
2023-05-17 17:50:06 +08:00
app_list = conbine_list(installing_list, installed_list)
2023-04-17 09:54:03 +08:00
find = False
ret = {}
if app_id != None:
for app in app_list:
if app_id == app['app_id']:
ret = app
find = True
break
if not find:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "")
else:
ret = app_list
2023-04-16 23:19:55 +08:00
myLogger.info_logger("app list result ok")
2023-04-17 09:54:03 +08:00
return ret
2023-04-15 16:39:33 +08:00
2023-06-09 12:07:43 +08:00
2023-03-24 15:23:39 +08:00
# 获取具体某个app的信息
2023-04-15 07:43:22 +08:00
def get_app_status(app_id):
2023-04-14 19:42:42 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
2023-04-17 09:54:03 +08:00
app = get_my_app(app_id)
2023-04-15 07:43:22 +08:00
# 将app_list 过滤出app_id的app并缩减信息使其符合文档的要求
2023-04-15 16:39:33 +08:00
ret = {}
2023-04-17 09:54:03 +08:00
ret['app_id'] = app['app_id']
ret['status'] = app['status']
ret['status_reason'] = app['status_reason']
2023-03-27 16:26:03 +08:00
else:
2023-04-15 16:39:33 +08:00
raise CommandException(code, message, '')
return ret
2023-04-14 19:42:42 +08:00
2023-04-14 12:04:07 +08:00
def install_app(app_name, customer_name, app_version):
2023-04-04 10:22:05 +08:00
myLogger.info_logger("Install app ...")
2023-04-12 17:11:53 +08:00
ret = {}
ret['ResponseData'] = {}
2023-04-14 12:04:07 +08:00
app_id = app_name + "_" + customer_name
ret['ResponseData']['app_id'] = app_id
2023-04-14 13:15:35 +08:00
code, message = check_app(app_name, customer_name, app_version)
if code == None:
2023-04-16 17:27:58 +08:00
q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id)
2023-04-14 13:15:35 +08:00
else:
2023-04-14 19:42:42 +08:00
ret['Error'] = get_error_info(code, message, "")
2023-04-14 12:04:07 +08:00
2023-03-21 15:14:35 +08:00
return ret
2023-02-28 17:44:04 +08:00
2023-05-17 11:52:53 +08:00
2023-06-09 12:07:43 +08:00
def start_app(app_id):
2023-05-17 11:52:53 +08:00
info, flag = app_exits_in_docker(app_id)
if flag:
app_path = info.split()[-1].rsplit('/', 1)[0]
cmd = "docker compose -f " + app_path + "/docker-compose.yml start"
shell_execute.execute_command_output_all(cmd)
2023-02-28 17:44:04 +08:00
else:
2023-05-17 11:52:53 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
2023-02-28 17:44:04 +08:00
2023-03-15 16:13:17 +08:00
def stop_app(app_id):
2023-05-17 11:52:53 +08:00
info, flag = app_exits_in_docker(app_id)
if flag:
app_path = info.split()[-1].rsplit('/', 1)[0]
cmd = "docker compose -f " + app_path + "/docker-compose.yml stop"
shell_execute.execute_command_output_all(cmd)
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
2023-04-07 09:52:22 +08:00
2023-06-09 12:07:43 +08:00
2023-03-15 16:13:17 +08:00
def restart_app(app_id):
2023-04-15 08:35:33 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
2023-03-28 16:07:39 +08:00
app_path = info.split()[-1].rsplit('/', 1)[0]
cmd = "docker compose -f " + app_path + "/docker-compose.yml restart"
2023-04-15 08:35:33 +08:00
shell_execute.execute_command_output_all(cmd)
2023-02-28 17:44:04 +08:00
else:
2023-04-15 16:39:33 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
2023-02-28 17:44:04 +08:00
else:
2023-04-15 16:39:33 +08:00
raise CommandException(code, message, "")
2023-02-28 17:44:04 +08:00
2023-03-15 08:59:06 +08:00
2023-04-17 08:55:50 +08:00
def delete_app_failedjob(job_id):
2023-04-14 19:42:42 +08:00
myLogger.info_logger("delete_app_failedjob")
2023-04-17 09:09:19 +08:00
failed = FailedJobRegistry(queue=q)
failed.remove(job_id, delete_job=True)
2023-04-14 18:38:30 +08:00
2023-05-25 16:11:19 +08:00
2023-06-09 12:07:43 +08:00
def delete_app(app_id):
2023-05-25 16:11:19 +08:00
try:
app_name = app_id.split('_')[0]
customer_name = app_id.split('_')[1]
app_path = ""
info, code_exist = app_exits_in_docker(app_id)
2023-06-09 09:05:36 +08:00
if code_exist:
2023-05-25 16:11:19 +08:00
app_path = info.split()[-1].rsplit('/', 1)[0]
cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
lib_path = '/data/library/apps/' + app_name
if app_path != lib_path:
cmd = cmd + " && sudo rm -rf " + app_path
2023-06-02 11:03:16 +08:00
try:
myLogger.info_logger("Intall fail, down app and delete files")
shell_execute.execute_command_output_all(cmd)
except Exception:
myLogger.info_logger("Delete app compose exception")
2023-06-02 10:27:08 +08:00
# 强制删除失败又无法通过docker compose down 删除的容器
2023-06-02 11:03:16 +08:00
try:
myLogger.info_logger("IF delete fail, force to delete containers")
2023-06-09 12:07:43 +08:00
force_cmd = "docker rm -f $(docker ps -f name=^" + customer_name + " -aq)"
2023-06-02 11:03:16 +08:00
shell_execute.execute_command_output_all(force_cmd)
except Exception:
myLogger.info_logger("force delete app compose exception")
2023-05-25 16:11:19 +08:00
else:
if check_app_rq(app_id):
delete_app_failedjob(app_id)
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
cmd = " sudo rm -rf /data/apps/" + customer_name
shell_execute.execute_command_output_all(cmd)
except CommandException as ce:
myLogger.info_logger("Delete app compose exception")
2023-05-17 11:52:53 +08:00
2023-06-09 12:07:43 +08:00
def uninstall_app(app_id):
2023-05-17 11:52:53 +08:00
app_name = app_id.split('_')[0]
customer_name = app_id.split('_')[1]
app_path = ""
info, code_exist = app_exits_in_docker(app_id)
2023-06-09 09:05:36 +08:00
if code_exist:
2023-05-17 11:52:53 +08:00
app_path = info.split()[-1].rsplit('/', 1)[0]
cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
lib_path = '/data/library/apps/' + app_name
if app_path != lib_path:
cmd = cmd + " && sudo rm -rf " + app_path
shell_execute.execute_command_output_all(cmd)
2023-03-31 18:26:50 +08:00
else:
2023-05-17 11:52:53 +08:00
if check_app_rq(app_id):
delete_app_failedjob(app_id)
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
2023-05-17 13:58:39 +08:00
# Force to delete docker compose
2023-05-17 11:52:53 +08:00
try:
cmd = " sudo rm -rf /data/apps/" + customer_name
shell_execute.execute_command_output_all(cmd)
except CommandException as ce:
myLogger.info_logger("Delete app compose exception")
2023-05-17 13:58:39 +08:00
# Delete proxy config when uninstall app
app_proxy_delete(app_id)
2023-03-24 15:54:08 +08:00
2023-06-09 12:07:43 +08:00
2023-04-14 13:15:35 +08:00
def check_app(app_name, customer_name, app_version):
2023-04-14 16:41:43 +08:00
message = ""
2023-04-12 17:11:53 +08:00
code = None
2023-04-17 16:37:57 +08:00
app_id = app_name + "_" + customer_name
2023-04-14 13:15:35 +08:00
if app_name == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_name is null"
elif customer_name == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "customer_name is null"
2023-06-02 11:15:04 +08:00
elif len(customer_name) < 2:
2023-06-02 10:27:08 +08:00
code = const.ERROR_CLIENT_PARAM_BLANK
message = "customer_name must be longer than 2 chars"
2023-04-14 13:15:35 +08:00
elif app_version == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_version is null"
2023-05-16 09:26:30 +08:00
elif app_version == "undefined" or app_version == "":
2023-06-09 12:07:43 +08:00
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_version is null"
2023-04-14 13:15:35 +08:00
elif not docker.check_app_websoft9(app_name):
code = const.ERROR_CLIENT_PARAM_NOTEXIST
message = "It is not support to install " + app_name
elif re.match('^[a-z0-9]+$', customer_name) == None:
code = const.ERROR_CLIENT_PARAM_Format
message = "APP name can only be composed of numbers and lowercase letters"
elif docker.check_directory("/data/apps/" + customer_name):
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
2023-03-24 15:25:25 +08:00
elif not docker.check_vm_resource(app_name):
2023-04-14 13:15:35 +08:00
code = const.ERROR_SERVER_RESOURCE
message = "Insufficient system resources (cpu, memory, disk space)"
2023-04-17 14:04:14 +08:00
elif check_app_docker(app_id):
2023-06-09 12:07:43 +08:00
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
2023-04-14 13:15:35 +08:00
elif check_app_rq(app_id):
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
2023-04-16 17:07:16 +08:00
2023-03-24 15:25:25 +08:00
return code, message
2023-03-15 16:13:17 +08:00
2023-06-09 12:07:43 +08:00
2023-04-14 19:42:42 +08:00
def prepare_app(app_name, customer_name):
2023-03-24 16:02:35 +08:00
library_path = "/data/library/apps/" + app_name
2023-04-14 16:41:43 +08:00
install_path = "/data/apps/" + customer_name
shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
2023-03-24 15:25:25 +08:00
2023-06-09 12:07:43 +08:00
2023-04-14 19:42:42 +08:00
def install_app_delay(app_name, customer_name, app_version):
2023-04-17 17:54:23 +08:00
myLogger.info_logger("-------RQ install start --------")
2023-04-14 16:41:43 +08:00
job_id = app_name + "_" + customer_name
2023-04-14 14:36:37 +08:00
2023-04-06 14:37:26 +08:00
try:
2023-04-18 18:00:03 +08:00
# 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的应该是app_name,而不是customer_name
resource_flag = docker.check_vm_resource(app_name)
2023-06-09 09:05:36 +08:00
2023-04-17 17:54:23 +08:00
if resource_flag == True:
2023-06-09 09:05:36 +08:00
2023-04-16 17:17:49 +08:00
myLogger.info_logger("job check ok, continue to install app")
2023-04-14 16:41:43 +08:00
env_path = "/data/apps/" + customer_name + "/.env"
2023-04-19 09:54:33 +08:00
# prepare_app(app_name, customer_name)
docker.check_app_compose(app_name, customer_name)
myLogger.info_logger("start JobID=" + job_id)
2023-04-14 16:41:43 +08:00
docker.modify_env(env_path, 'APP_NAME', customer_name)
docker.modify_env(env_path, "APP_VERSION", app_version)
2023-05-16 10:21:15 +08:00
docker.check_app_url(customer_name)
2023-04-14 16:41:43 +08:00
cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d"
output = shell_execute.execute_command_output_all(cmd)
myLogger.info_logger("-------Install result--------")
myLogger.info_logger(output["code"])
myLogger.info_logger(output["result"])
2023-05-24 09:36:18 +08:00
try:
shell_execute.execute_command_output_all("bash /data/apps/" + customer_name + "/src/after_up.sh")
except Exception as e:
2023-06-09 09:05:36 +08:00
myLogger.info_logger(str(e))
2023-04-06 14:37:26 +08:00
else:
2023-06-09 12:07:43 +08:00
error_info = "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)"
2023-04-17 17:54:23 +08:00
myLogger.info_logger(error_info)
2023-04-17 16:03:44 +08:00
raise Exception(error_info)
2023-04-14 16:41:43 +08:00
except CommandException as ce:
2023-04-18 17:40:14 +08:00
myLogger.info_logger(customer_name + " install failed(docker)!")
2023-05-25 16:26:27 +08:00
delete_app(job_id)
2023-06-09 12:07:43 +08:00
error_info = "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail
2023-04-17 17:28:42 +08:00
myLogger.info_logger(error_info)
2023-04-17 16:03:44 +08:00
raise Exception(error_info)
2023-04-06 14:37:26 +08:00
except Exception as e:
2023-04-18 17:40:14 +08:00
myLogger.info_logger(customer_name + " install failed(system)!")
2023-05-25 16:26:27 +08:00
delete_app(job_id)
2023-06-09 12:07:43 +08:00
error_info = "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str(
e)
2023-04-17 17:28:42 +08:00
myLogger.info_logger(error_info)
2023-04-17 16:03:44 +08:00
raise Exception(error_info)
2023-04-15 16:39:33 +08:00
2023-06-09 12:07:43 +08:00
2023-04-15 07:57:23 +08:00
def app_exits_in_docker(app_id):
customer_name = app_id.split('_')[1]
app_name = app_id.split('_')[0]
2023-03-31 16:44:16 +08:00
flag = False
2023-03-28 16:07:39 +08:00
info = ""
2023-04-17 10:00:36 +08:00
cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
2023-04-16 21:42:32 +08:00
try:
output = shell_execute.execute_command_output_all(cmd)
if int(output["code"]) == 0:
info = output["result"]
app_path = info.split()[-1].rsplit('/', 1)[0]
is_official = check_if_official_app(app_path + '/variables.json')
if is_official:
name = docker.read_var(app_path + '/variables.json', 'name')
if name == app_name:
flag = True
elif app_name == customer_name:
2023-03-31 16:44:16 +08:00
flag = True
2023-04-16 21:42:32 +08:00
myLogger.info_logger("APP in docker")
except CommandException as ce:
myLogger.info_logger("APP not in docker")
2023-03-31 16:44:16 +08:00
return info, flag
2023-03-24 15:25:25 +08:00
2023-06-09 12:07:43 +08:00
2023-03-15 16:13:17 +08:00
def split_app_id(app_id):
2023-03-16 10:41:28 +08:00
return app_id.split("_")[1]
2023-03-24 15:23:39 +08:00
2023-06-09 12:07:43 +08:00
2023-05-18 16:13:14 +08:00
def get_createtime(official_app, app_path, customer_name):
data_time = ""
try:
if official_app:
2023-05-19 11:56:44 +08:00
cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
2023-05-18 16:13:14 +08:00
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
2023-05-19 11:56:44 +08:00
data_time = result
2023-05-18 16:13:14 +08:00
else:
cmd_all = "cd " + app_path + " && docker compose ps -a --format json"
output = shell_execute.execute_command_output_all(cmd_all)
container_name = json.loads(output["result"])[0]["Name"]
2023-06-13 09:09:36 +08:00
cmd = "docker ps -f name=" + container_name + " --format {{.RunningFor}} | head -n 1"
2023-05-18 16:13:14 +08:00
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
2023-05-19 11:56:44 +08:00
data_time = result
2023-05-18 17:15:59 +08:00
2023-05-23 10:28:15 +08:00
except Exception as e:
myLogger.info_logger(str(e))
2023-06-09 12:07:43 +08:00
myLogger.info_logger("get_createtime get success" + data_time)
2023-05-18 16:13:14 +08:00
return data_time
2023-06-09 12:07:43 +08:00
2023-04-17 09:54:03 +08:00
def get_apps_from_compose():
myLogger.info_logger("Search all of apps ...")
cmd = "docker compose ls -a --format json"
output = shell_execute.execute_command_output_all(cmd)
output_list = json.loads(output["result"])
2023-04-16 22:25:14 +08:00
myLogger.info_logger(len(output_list))
2023-04-27 09:35:44 +08:00
ip = "localhost"
try:
2023-06-03 10:18:41 +08:00
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
2023-04-27 09:55:37 +08:00
ip = ip_result["result"].rstrip('\n')
2023-04-27 09:35:44 +08:00
except Exception:
ip = "127.0.0.1"
2023-03-24 15:23:39 +08:00
app_list = []
for app_info in output_list:
2023-05-18 14:17:32 +08:00
volume = app_info["ConfigFiles"]
2023-03-28 16:07:39 +08:00
app_path = volume.rsplit('/', 1)[0]
2023-04-14 17:23:17 +08:00
customer_name = volume.split('/')[-2]
2023-05-19 17:54:31 +08:00
app_id = ""
app_name = ""
trade_mark = ""
2023-03-29 14:48:18 +08:00
port = 0
2023-05-19 17:54:31 +08:00
url = ""
admin_url = ""
image_url = ""
user_name = ""
password = ""
2023-03-29 14:48:18 +08:00
official_app = False
2023-05-19 17:54:31 +08:00
app_version = ""
create_time = ""
2023-05-20 14:14:46 +08:00
volume_data = ""
2023-05-18 11:45:59 +08:00
config_path = app_path
app_https = False
app_replace_url = False
2023-05-19 17:54:31 +08:00
default_domain = ""
2023-05-20 16:48:18 +08:00
admin_path = ""
admin_domain_url = ""
2023-06-09 12:07:43 +08:00
if customer_name in ['w9appmanage', 'w9nginxproxymanager', 'w9redis', 'w9kopia',
'w9portainer'] or app_path == '/data/apps/w9services/' + customer_name:
2023-03-29 14:48:18 +08:00
continue
2023-06-09 09:05:36 +08:00
2023-06-13 09:09:36 +08:00
var_path = app_path + "/variables.json"
official_app = check_if_official_app(var_path)
2023-05-20 14:56:24 +08:00
status_show = app_info["Status"]
2023-04-14 19:42:42 +08:00
status = app_info["Status"].split("(")[0]
2023-04-14 17:34:19 +08:00
if status == "running" or status == "exited" or status == "restarting":
2023-05-20 14:56:24 +08:00
if "exited" in status_show and "running" in status_show:
if status == "exited":
cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited"
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
container = result.split("#Exited")[0]
if container != customer_name:
status = "running"
2023-06-13 09:09:36 +08:00
if "restarting" in status_show:
about_time = get_createtime(official_app, app_path, customer_name)
if "seconds" in about_time:
status = "restarting"
else:
status = "failed"
2023-04-14 17:34:19 +08:00
elif status == "created":
status = "failed"
2023-03-24 15:23:39 +08:00
else:
2023-04-14 17:34:19 +08:00
continue
2023-03-29 14:48:18 +08:00
if official_app:
2023-04-14 14:36:37 +08:00
app_name = docker.read_var(var_path, 'name')
2023-04-14 17:23:17 +08:00
app_id = app_name + "_" + customer_name # app_id
2023-03-29 14:48:18 +08:00
# get trade_mark
trade_mark = docker.read_var(var_path, 'trademark')
2023-04-14 14:36:37 +08:00
image_url = get_Image_url(app_name)
2023-03-29 14:48:18 +08:00
# get env info
path = app_path + "/.env"
2023-05-19 15:31:01 +08:00
env_map = docker.get_map(path)
2023-06-09 09:05:36 +08:00
2023-03-29 14:48:18 +08:00
try:
2023-05-23 10:47:48 +08:00
myLogger.info_logger("get domain for APP_URL")
2023-05-19 15:31:01 +08:00
domain = env_map.get("APP_URL")
2023-05-18 11:45:59 +08:00
if "appname.example.com" in domain or ip in domain:
2023-05-19 17:54:31 +08:00
default_domain = ""
2023-05-18 11:45:59 +08:00
else:
default_domain = domain
2023-05-23 10:54:13 +08:00
except Exception:
2023-05-23 10:08:59 +08:00
myLogger.info_logger("domain exception")
2023-03-29 14:48:18 +08:00
try:
2023-05-19 16:17:53 +08:00
app_version = env_map.get("APP_VERSION")
2023-05-20 14:14:46 +08:00
volume_data = "/data/apps/" + customer_name + "/data"
2023-06-09 12:07:43 +08:00
user_name = env_map.get("APP_USER", "")
password = env_map.get("POWER_PASSWORD", "")
2023-05-20 16:48:18 +08:00
admin_path = env_map.get("APP_ADMIN_PATH")
2023-05-20 17:02:18 +08:00
if admin_path:
myLogger.info_logger(admin_path)
2023-06-09 12:07:43 +08:00
admin_path = admin_path.replace("\"", "")
2023-05-20 17:02:18 +08:00
else:
2023-06-09 12:07:43 +08:00
admin_path = ""
2023-06-09 09:05:36 +08:00
2023-05-20 17:02:18 +08:00
if default_domain != "" and admin_path != "":
2023-05-22 09:05:49 +08:00
admin_domain_url = "http://" + default_domain + admin_path
2023-05-23 10:54:13 +08:00
except Exception:
2023-05-23 10:08:59 +08:00
myLogger.info_logger("APP_USER POWER_PASSWORD exception")
2023-03-24 15:23:39 +08:00
try:
2023-06-09 12:07:43 +08:00
replace = env_map.get("APP_URL_REPLACE", "false")
myLogger.info_logger("replace=" + replace)
2023-05-18 11:45:59 +08:00
if replace == "true":
app_replace_url = True
2023-06-09 12:07:43 +08:00
https = env_map.get("APP_HTTPS_ACCESS", "false")
2023-05-18 11:45:59 +08:00
if https == "true":
app_https = True
2023-05-23 10:54:13 +08:00
except Exception:
2023-05-23 10:08:59 +08:00
myLogger.info_logger("APP_HTTPS_ACCESS exception")
2023-05-18 16:13:14 +08:00
try:
2023-06-09 12:07:43 +08:00
http_port = env_map.get("APP_HTTP_PORT", "0")
2023-05-19 16:17:53 +08:00
if http_port:
port = int(http_port)
2023-05-23 10:54:13 +08:00
except Exception:
2023-06-09 09:05:36 +08:00
pass
2023-05-18 16:13:14 +08:00
if port != 0:
2023-05-18 11:45:59 +08:00
try:
if app_https:
easy_url = "https://" + ip + ":" + str(port)
else:
easy_url = "http://" + ip + ":" + str(port)
url = easy_url
admin_url = get_admin_url(customer_name, url)
2023-05-23 10:54:13 +08:00
except Exception:
2023-05-18 11:45:59 +08:00
pass
else:
try:
db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0]
port = int(db_port)
2023-05-23 10:54:13 +08:00
except Exception:
2023-05-18 11:45:59 +08:00
pass
2023-05-09 08:20:39 +08:00
else:
app_name = customer_name
app_id = customer_name + "_" + customer_name
2023-06-09 09:05:36 +08:00
create_time = get_createtime(official_app, app_path, customer_name)
2023-04-18 14:04:51 +08:00
if status in ['running', 'exited']:
2023-06-09 12:07:43 +08:00
config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url,
admin_domain_url=admin_domain_url,
admin_path=admin_path, admin_username=user_name, admin_password=password,
default_domain=default_domain)
2023-04-18 14:04:51 +08:00
else:
config = None
2023-04-17 18:17:23 +08:00
if status == "failed":
2023-06-09 12:07:43 +08:00
status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error",
Detail="unknown error")
2023-04-17 18:17:23 +08:00
else:
2023-04-18 14:04:51 +08:00
status_reason = None
2023-05-18 14:17:32 +08:00
app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
2023-06-09 12:07:43 +08:00
app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
status=status, status_reason=status_reason, official_app=official_app, image_url=image_url,
app_https=app_https, app_replace_url=app_replace_url, config=config)
2023-05-18 11:45:59 +08:00
2023-03-24 15:23:39 +08:00
app_list.append(app.dict())
2023-04-17 09:54:03 +08:00
return app_list
2023-03-24 15:23:39 +08:00
2023-06-09 12:07:43 +08:00
2023-03-29 14:48:18 +08:00
def check_if_official_app(var_path):
if docker.check_directory(var_path):
if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var(
var_path, 'requirements') != "":
2023-03-30 10:22:26 +08:00
requirements = docker.read_var(var_path, 'requirements')
try:
cpu = requirements['cpu']
mem = requirements['memory']
2023-04-17 10:14:22 +08:00
disk = requirements['disk']
2023-03-30 10:22:26 +08:00
return True
2023-04-17 10:14:22 +08:00
except KeyError:
2023-03-30 10:22:26 +08:00
return False
2023-03-29 14:48:18 +08:00
else:
return False
2023-06-09 09:05:36 +08:00
2023-06-09 12:07:43 +08:00
def check_app_docker(app_id):
2023-04-17 14:04:14 +08:00
customer_name = app_id.split('_')[1]
app_name = app_id.split('_')[0]
flag = False
cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
try:
shell_execute.execute_command_output_all(cmd)
flag = True
myLogger.info_logger("APP in docker")
except CommandException as ce:
myLogger.info_logger("APP not in docker")
return flag
2023-06-09 09:05:36 +08:00
2023-06-09 12:07:43 +08:00
def check_app_rq(app_id):
2023-04-14 17:23:17 +08:00
myLogger.info_logger("check_app_rq")
2023-04-16 16:19:28 +08:00
2023-04-14 17:23:17 +08:00
started = StartedJobRegistry(queue=q)
failed = FailedJobRegistry(queue=q)
run_job_ids = started.get_job_ids()
2023-04-16 16:19:28 +08:00
failed_job_ids = failed.get_job_ids()
queue_job_ids = q.job_ids
2023-04-16 16:28:35 +08:00
myLogger.info_logger(queue_job_ids)
myLogger.info_logger(run_job_ids)
myLogger.info_logger(failed_job_ids)
2023-06-09 12:07:43 +08:00
if queue_job_ids and app_id in queue_job_ids:
2023-04-16 21:46:46 +08:00
myLogger.info_logger("App in RQ")
2023-06-09 09:05:36 +08:00
return True
2023-04-16 16:55:01 +08:00
if failed_job_ids and app_id in failed_job_ids:
2023-04-16 21:46:46 +08:00
myLogger.info_logger("App in RQ")
2023-06-09 09:05:36 +08:00
return True
2023-04-16 16:55:01 +08:00
if run_job_ids and app_id in run_job_ids:
2023-04-16 21:46:46 +08:00
myLogger.info_logger("App in RQ")
return True
myLogger.info_logger("App not in RQ")
2023-04-14 17:23:17 +08:00
return False
2023-04-14 14:36:37 +08:00
2023-06-09 12:07:43 +08:00
2023-04-04 12:14:00 +08:00
def get_apps_from_queue():
2023-04-06 07:57:10 +08:00
myLogger.info_logger("get queque apps...")
2023-04-04 12:14:00 +08:00
# 获取 StartedJobRegistry 实例
2023-04-14 17:23:17 +08:00
started = StartedJobRegistry(queue=q)
2023-04-04 12:14:00 +08:00
finish = FinishedJobRegistry(queue=q)
deferred = DeferredJobRegistry(queue=q)
2023-04-06 13:06:41 +08:00
failed = FailedJobRegistry(queue=q)
scheduled = ScheduledJobRegistry(queue=q)
2023-04-14 19:42:42 +08:00
cancel = CanceledJobRegistry(queue=q)
2023-04-06 13:06:41 +08:00
2023-04-04 12:14:00 +08:00
# 获取正在执行的作业 ID 列表
2023-04-14 17:23:17 +08:00
run_job_ids = started.get_job_ids()
2023-04-04 12:14:00 +08:00
finish_job_ids = finish.get_job_ids()
wait_job_ids = deferred.get_job_ids()
2023-04-06 13:06:41 +08:00
failed_jobs = failed.get_job_ids()
scheduled_jobs = scheduled.get_job_ids()
2023-04-14 17:23:17 +08:00
cancel_jobs = cancel.get_job_ids()
2023-04-06 13:06:41 +08:00
myLogger.info_logger(q.jobs)
2023-04-06 07:57:10 +08:00
myLogger.info_logger(run_job_ids)
2023-04-07 09:52:22 +08:00
myLogger.info_logger(failed_jobs)
2023-04-14 17:23:17 +08:00
myLogger.info_logger(cancel_jobs)
myLogger.info_logger(wait_job_ids)
myLogger.info_logger(finish_job_ids)
2023-04-06 13:06:41 +08:00
myLogger.info_logger(scheduled_jobs)
2023-04-07 09:52:22 +08:00
2023-04-04 17:25:23 +08:00
installing_list = []
2023-04-14 17:23:17 +08:00
for job_id in run_job_ids:
2023-04-17 09:09:19 +08:00
app = get_rq_app(job_id, 'installing', "", "", "")
2023-04-04 17:25:23 +08:00
installing_list.append(app)
2023-04-07 09:52:22 +08:00
for job in q.jobs:
2023-04-17 09:09:19 +08:00
app = get_rq_app(job.id, 'installing', "", "", "")
2023-04-04 17:25:23 +08:00
installing_list.append(app)
2023-04-14 17:23:17 +08:00
for job_id in failed_jobs:
2023-04-14 19:42:42 +08:00
job = q.fetch_job(job_id)
2023-04-17 21:15:27 +08:00
exc_info = job.exc_info
code = exc_info.split('##websoft9##')[1]
message = exc_info.split('##websoft9##')[2]
detail = exc_info.split('##websoft9##')[3]
app = get_rq_app(job_id, 'failed', code, message, detail)
2023-04-14 17:34:19 +08:00
installing_list.append(app)
2023-04-07 09:52:22 +08:00
2023-04-04 17:25:23 +08:00
return installing_list
2023-06-09 12:07:43 +08:00
2023-04-17 09:09:19 +08:00
def get_rq_app(id, status, code, message, detail):
2023-04-14 14:36:37 +08:00
app_name = id.split('_')[0]
2023-04-14 17:23:17 +08:00
customer_name = id.split('_')[1]
2023-04-17 09:09:19 +08:00
# 当app还在RQ时可能文件夹还没创建无法获取trade_mark
2023-06-09 09:05:36 +08:00
trade_mark = ""
2023-05-18 11:45:59 +08:00
app_version = ""
create_time = ""
2023-05-20 14:32:47 +08:00
volume_data = ""
2023-05-18 11:45:59 +08:00
config_path = ""
2023-04-14 14:36:37 +08:00
image_url = get_Image_url(app_name)
2023-04-18 14:04:51 +08:00
config = None
2023-06-09 12:07:43 +08:00
if status == "installing":
2023-04-19 11:08:20 +08:00
status_reason = None
else:
status_reason = StatusReason(Code=code, Message=message, Detail=detail)
2023-06-09 09:05:36 +08:00
2023-04-16 23:00:48 +08:00
app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
2023-06-09 12:07:43 +08:00
app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
2023-04-15 16:39:33 +08:00
status=status, status_reason=status_reason, official_app=True, image_url=image_url,
2023-06-09 12:07:43 +08:00
app_https=False, app_replace_url=False, config=config)
2023-04-17 09:54:03 +08:00
return app.dict()
2023-03-24 15:23:39 +08:00
2023-06-09 12:07:43 +08:00
2023-03-25 20:31:44 +08:00
def get_Image_url(app_name):
2023-03-30 13:19:54 +08:00
image_url = "static/images/" + app_name + "-websoft9.png"
2023-03-24 15:23:39 +08:00
return image_url
2023-03-25 20:31:44 +08:00
def get_url(app_name, easy_url):
2023-03-24 15:23:39 +08:00
url = easy_url
return url
2023-06-09 12:07:43 +08:00
2023-05-18 11:45:59 +08:00
def get_admin_url(customer_name, url):
2023-03-29 14:48:18 +08:00
admin_url = ""
2023-05-18 14:43:43 +08:00
path = "/data/apps/" + customer_name + "/.env"
2023-05-18 11:45:59 +08:00
try:
admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0]
2023-06-09 12:07:43 +08:00
admin_path = admin_path.replace("\"", "")
2023-05-18 11:45:59 +08:00
admin_url = url + admin_path
2023-05-19 17:54:31 +08:00
except IndexError:
2023-05-18 11:45:59 +08:00
pass
2023-03-24 15:23:39 +08:00
return admin_url
2023-04-14 09:27:08 +08:00
2023-04-14 10:51:29 +08:00
def get_error_info(code, message, detail):
2023-04-14 09:27:08 +08:00
error = {}
2023-04-14 10:51:29 +08:00
error['Code'] = code
error['Message'] = message
error['Detail'] = detail
2023-04-14 14:36:37 +08:00
return error
2023-05-10 14:58:19 +08:00
2023-06-09 12:07:43 +08:00
def app_domain_list(app_id):
2023-05-10 14:58:19 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
2023-05-11 10:08:20 +08:00
myLogger.info_logger("Check app_id ok[app_domain_list]")
2023-05-10 14:58:19 +08:00
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
2023-05-15 10:17:19 +08:00
domains = get_all_domains(app_id)
2023-06-09 09:05:36 +08:00
2023-05-15 16:48:32 +08:00
myLogger.info_logger(domains)
2023-06-09 09:05:36 +08:00
2023-05-15 16:22:41 +08:00
ret = {}
ret['domains'] = domains
2023-06-09 09:05:36 +08:00
2023-05-15 16:22:41 +08:00
default_domain = ""
2023-05-16 15:47:43 +08:00
if domains != None and len(domains) > 0:
customer_name = app_id.split('_')[1]
2023-06-09 12:07:43 +08:00
app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
2023-05-16 15:47:43 +08:00
if "APP_URL" in app_url:
2023-06-09 12:07:43 +08:00
url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env |grep APP_URL=")[
"result"].rstrip('\n')
2023-05-16 15:47:43 +08:00
default_domain = url.split('=')[1]
2023-05-15 16:22:41 +08:00
ret['default_domain'] = default_domain
2023-05-15 16:51:25 +08:00
myLogger.info_logger(ret)
2023-05-15 16:22:41 +08:00
return ret
2023-05-10 17:03:17 +08:00
2023-05-17 13:58:39 +08:00
2023-06-09 12:07:43 +08:00
def app_proxy_delete(app_id):
2023-05-17 13:58:39 +08:00
customer_name = app_id.split('_')[1]
proxy_host = None
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
for proxy in response.json():
portainer_name = proxy["forward_host"]
if customer_name == portainer_name:
proxy_id = proxy["id"]
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.delete(url, headers=headers)
2023-05-10 14:58:19 +08:00
2023-06-09 12:07:43 +08:00
2023-05-15 16:03:11 +08:00
def app_domain_delete(app_id, domain):
2023-05-10 15:20:28 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
2023-05-11 10:08:20 +08:00
myLogger.info_logger("Check app_id ok[app_domain_delete]")
2023-05-10 15:20:28 +08:00
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
2023-06-09 09:05:36 +08:00
2023-05-16 10:37:51 +08:00
if domain is None or domain == "undefined":
raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
2023-06-09 09:05:36 +08:00
2023-05-16 10:37:51 +08:00
old_all_domains = get_all_domains(app_id)
2023-05-15 16:03:11 +08:00
if domain not in old_all_domains:
myLogger.info_logger("delete domain is not binded")
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "")
2023-06-09 09:05:36 +08:00
myLogger.info_logger("Start to delete " + domain)
2023-05-15 16:03:11 +08:00
proxy = get_proxy_domain(app_id, domain)
if proxy != None:
myLogger.info_logger(proxy)
myLogger.info_logger("before update")
domains_old = proxy["domain_names"]
myLogger.info_logger(domains_old)
2023-06-09 09:05:36 +08:00
2023-05-15 16:03:11 +08:00
domains_old.remove(domain)
myLogger.info_logger("after update")
myLogger.info_logger(domains_old)
if len(domains_old) == 0:
proxy_id = proxy["id"]
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
2023-05-16 16:24:04 +08:00
response = requests.delete(url, headers=headers)
2023-05-17 11:52:53 +08:00
try:
if response.json().get("error"):
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
except Exception:
myLogger.info_logger(response.json())
2023-05-15 16:30:28 +08:00
set_domain("", app_id)
2023-05-15 16:03:11 +08:00
else:
proxy_id = proxy["id"]
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": domains_old,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
2023-05-16 16:24:04 +08:00
response = requests.put(url, data=json.dumps(data), headers=headers)
2023-05-16 17:12:17 +08:00
if response.json().get("error"):
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
2023-05-16 11:57:24 +08:00
domain_set = app_domain_list(app_id)
default_domain = domain_set['default_domain']
# 如果被删除的域名是默认域名,删除后去剩下域名的第一个
if default_domain == domain:
2023-06-09 09:05:36 +08:00
set_domain(domains_old[0], app_id)
2023-05-16 11:57:24 +08:00
2023-05-10 17:03:17 +08:00
else:
2023-05-16 17:50:51 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "")
2023-05-10 17:03:17 +08:00
2023-05-11 10:08:20 +08:00
2023-05-13 11:21:42 +08:00
def app_domain_update(app_id, domain_old, domain_new):
2023-05-16 17:36:59 +08:00
myLogger.info_logger("app_domain_update")
2023-05-13 11:21:42 +08:00
domain_list = []
domain_list.append(domain_old)
domain_list.append(domain_new)
2023-06-09 09:05:36 +08:00
2023-05-13 11:21:42 +08:00
check_domains(domain_list)
2023-05-10 15:20:28 +08:00
2023-05-10 14:58:19 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
2023-05-10 17:03:17 +08:00
myLogger.info_logger("Check app_id ok")
2023-05-10 14:58:19 +08:00
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
2023-05-15 13:54:40 +08:00
proxy = get_proxy_domain(app_id, domain_old)
2023-05-10 17:03:17 +08:00
if proxy != None:
2023-05-13 11:39:30 +08:00
domains_old = proxy["domain_names"]
2023-05-13 11:21:42 +08:00
index = domains_old.index(domain_old)
domains_old[index] = domain_new
2023-05-10 17:03:17 +08:00
proxy_id = proxy["id"]
token = get_token()
2023-05-13 21:54:09 +08:00
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
2023-05-10 17:03:17 +08:00
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
2023-05-11 15:12:53 +08:00
port = get_container_port(app_id.split('_')[1])
2023-05-10 17:42:23 +08:00
host = app_id.split('_')[1]
2023-05-10 17:03:17 +08:00
data = {
2023-05-13 11:21:42 +08:00
"domain_names": domains_old,
2023-05-10 17:03:17 +08:00
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
2023-05-16 16:24:04 +08:00
response = requests.put(url, data=json.dumps(data), headers=headers)
2023-05-16 17:12:17 +08:00
if response.json().get("error"):
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
2023-05-16 11:57:24 +08:00
domain_set = app_domain_list(app_id)
default_domain = domain_set['default_domain']
2023-06-09 12:07:43 +08:00
myLogger.info_logger("default_domain=" + default_domain + ",domain_old=" + domain_old)
2023-05-16 11:57:24 +08:00
# 如果被修改的域名是默认域名,修改后也设置为默认域名
if default_domain == domain_old:
set_domain(domain_new, app_id)
2023-05-10 17:03:17 +08:00
else:
2023-05-16 17:36:59 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "")
2023-05-10 14:58:19 +08:00
2023-06-09 09:05:36 +08:00
2023-06-09 12:07:43 +08:00
def app_domain_add(app_id, domain):
2023-05-15 16:10:50 +08:00
temp_domains = []
temp_domains.append(domain)
check_domains(temp_domains)
2023-05-10 14:58:19 +08:00
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
2023-05-10 17:03:17 +08:00
myLogger.info_logger("Check app_id ok")
2023-05-10 14:58:19 +08:00
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
2023-06-09 09:05:36 +08:00
2023-05-15 10:55:42 +08:00
old_domains = get_all_domains(app_id)
2023-05-15 16:10:50 +08:00
if domain in old_domains:
2023-06-09 09:05:36 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "")
2023-05-13 10:22:44 +08:00
proxy = get_proxy(app_id)
if proxy != None:
2023-05-13 11:39:30 +08:00
domains_old = proxy["domain_names"]
2023-05-13 10:22:44 +08:00
domain_list = domains_old
2023-05-15 16:10:50 +08:00
domain_list.append(domain)
2023-06-09 09:05:36 +08:00
2023-05-13 10:22:44 +08:00
proxy_id = proxy["id"]
token = get_token()
2023-05-13 21:54:09 +08:00
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts/" + str(proxy_id)
2023-05-13 10:22:44 +08:00
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": domain_list,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
2023-05-16 16:24:04 +08:00
response = requests.put(url, data=json.dumps(data), headers=headers)
2023-05-16 17:12:17 +08:00
if response.json().get("error"):
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
2023-05-13 10:22:44 +08:00
else:
# 追加
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
2023-05-15 16:10:50 +08:00
2023-05-13 10:22:44 +08:00
data = {
2023-05-15 16:10:50 +08:00
"domain_names": temp_domains,
2023-05-13 10:22:44 +08:00
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
2023-06-09 09:05:36 +08:00
2023-05-16 16:04:56 +08:00
response = requests.post(url, data=json.dumps(data), headers=headers)
2023-05-16 17:12:17 +08:00
if response.json().get("error"):
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
2023-05-15 16:30:28 +08:00
set_domain(domain, app_id)
2023-06-09 09:05:36 +08:00
2023-05-15 16:59:20 +08:00
return domain
2023-05-10 14:58:19 +08:00
2023-06-09 12:07:43 +08:00
2023-05-10 14:58:19 +08:00
def check_domains(domains):
2023-05-11 16:51:31 +08:00
myLogger.info_logger(domains)
2023-05-10 14:58:19 +08:00
if domains is None or len(domains) == 0:
raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
else:
for domain in domains:
if is_valid_domain(domain):
2023-06-09 12:07:43 +08:00
if check_real_domain(domain) == False:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "")
2023-05-10 14:58:19 +08:00
else:
raise CommandException(const.ERROR_CLIENT_PARAM_Format, "Domains format error", "")
def is_valid_domain(domain):
2023-05-15 10:17:19 +08:00
if domain.startswith("http"):
return False
2023-05-15 12:37:03 +08:00
return True
2023-05-10 14:58:19 +08:00
2023-06-09 12:07:43 +08:00
2023-05-10 14:58:19 +08:00
def check_real_domain(domain):
domain_real = True
2023-05-10 15:20:28 +08:00
try:
cmd = "ping -c 1 " + domain + " | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq"
2023-05-11 14:16:35 +08:00
domain_ip = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
2023-05-10 14:58:19 +08:00
2023-06-03 10:18:41 +08:00
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
2023-05-10 15:20:28 +08:00
ip_save = ip_result["result"].rstrip('\n')
if domain_ip == ip_save:
myLogger.info_logger("Domain check ok!")
else:
domain_real = False
except CommandException as ce:
domain_real = False
2023-06-09 09:05:36 +08:00
2023-05-10 17:03:17 +08:00
return domain_real
2023-06-09 12:07:43 +08:00
2023-05-10 17:03:17 +08:00
def get_token():
2023-05-11 13:50:16 +08:00
url = 'http://172.17.0.1:9092/api/tokens'
2023-05-10 17:03:17 +08:00
headers = {'Content-type': 'application/json'}
2023-06-03 11:27:45 +08:00
cmd = "cat /usr/share/cockpit/myapps/config.json | jq -r '.NGINXPROXYMANAGER.NGINXPROXYMANAGER_PASSWORD'"
2023-05-11 14:08:15 +08:00
password = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
2023-05-12 11:01:42 +08:00
myLogger.info_logger(password)
2023-05-10 17:03:17 +08:00
param = {
"identity": "help@websoft9.com",
"scope": "user",
2023-05-11 14:16:35 +08:00
"secret": password
2023-05-10 17:03:17 +08:00
}
response = requests.post(url, data=json.dumps(param), headers=headers)
2023-05-16 16:49:30 +08:00
2023-05-10 17:03:17 +08:00
token = "Bearer " + response.json()["token"]
return token
2023-06-09 12:07:43 +08:00
2023-05-10 17:03:17 +08:00
def get_proxy(app_id):
customer_name = app_id.split('_')[1]
proxy_host = None
token = get_token()
2023-05-11 11:46:33 +08:00
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
2023-05-10 17:03:17 +08:00
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
2023-05-16 16:49:30 +08:00
2023-05-10 17:03:17 +08:00
for proxy in response.json():
portainer_name = proxy["forward_host"]
if customer_name == portainer_name:
proxy_host = proxy
2023-05-16 16:24:04 +08:00
break
2023-05-10 17:03:17 +08:00
return proxy_host
2023-05-10 17:42:23 +08:00
2023-06-09 12:07:43 +08:00
2023-05-15 10:17:19 +08:00
def get_proxy_domain(app_id, domain):
customer_name = app_id.split('_')[1]
proxy_host = None
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
2023-05-16 16:49:30 +08:00
2023-05-15 17:22:39 +08:00
myLogger.info_logger(response.json())
2023-05-15 10:17:19 +08:00
for proxy in response.json():
portainer_name = proxy["forward_host"]
2023-05-15 17:42:45 +08:00
domain_list = proxy["domain_names"]
2023-05-15 10:17:19 +08:00
if customer_name == portainer_name:
2023-05-15 17:30:58 +08:00
myLogger.info_logger("-------------------")
2023-05-15 17:42:45 +08:00
if domain in domain_list:
2023-06-09 12:07:43 +08:00
myLogger.info_logger("find the domain proxy")
proxy_host = proxy
break
2023-05-15 10:17:19 +08:00
return proxy_host
2023-06-09 12:07:43 +08:00
2023-05-15 10:17:19 +08:00
def get_all_domains(app_id):
customer_name = app_id.split('_')[1]
domains = []
token = get_token()
url = "http://172.17.0.1:9092/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
2023-05-16 16:49:30 +08:00
2023-05-15 10:17:19 +08:00
for proxy in response.json():
portainer_name = proxy["forward_host"]
if customer_name == portainer_name:
2023-05-15 12:05:28 +08:00
for domain in proxy["domain_names"]:
2023-05-15 10:17:19 +08:00
domains.append(domain)
return domains
2023-06-09 12:07:43 +08:00
2023-05-15 16:36:02 +08:00
def app_domain_set(domain, app_id):
temp_domains = []
temp_domains.append(domain)
check_domains(temp_domains)
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok")
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
2023-06-09 09:05:36 +08:00
2023-05-15 16:36:02 +08:00
set_domain(domain, app_id)
2023-06-09 12:07:43 +08:00
2023-05-15 15:17:45 +08:00
def set_domain(domain, app_id):
2023-05-16 17:36:59 +08:00
myLogger.info_logger("set_domain start")
2023-05-15 16:30:28 +08:00
old_domains = get_all_domains(app_id)
2023-05-16 08:53:50 +08:00
if domain != "":
if domain not in old_domains:
2023-05-16 16:24:04 +08:00
message = domain + " is not in use"
2023-06-09 09:05:36 +08:00
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, message, "")
2023-05-10 17:42:23 +08:00
customer_name = app_id.split('_')[1]
2023-06-09 12:07:43 +08:00
app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
2023-06-09 09:05:36 +08:00
2023-05-14 09:26:26 +08:00
if "APP_URL" in app_url:
2023-05-15 15:17:45 +08:00
myLogger.info_logger("APP_URL is exist")
2023-05-14 09:26:26 +08:00
if domain == "":
2023-06-03 10:18:41 +08:00
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
2023-05-14 09:26:26 +08:00
domain = ip_result["result"].rstrip('\n')
2023-06-09 12:07:43 +08:00
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
2023-05-14 09:26:26 +08:00
shell_execute.execute_command_output_all(cmd)
2023-05-16 11:02:57 +08:00
if "APP_URL_REPLACE=true" in app_url:
2023-05-16 17:50:51 +08:00
myLogger.info_logger("need up")
2023-05-16 11:02:57 +08:00
shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
2023-05-14 09:26:26 +08:00
else:
2023-06-09 12:07:43 +08:00
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
2023-05-14 09:26:26 +08:00
shell_execute.execute_command_output_all(cmd)
2023-05-16 11:02:57 +08:00
if "APP_URL_REPLACE=true" in app_url:
2023-05-16 17:50:51 +08:00
myLogger.info_logger("need up")
2023-05-16 11:02:57 +08:00
shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
2023-05-23 13:45:28 +08:00
else:
myLogger.info_logger("APP_URL is not exist")
if domain == "":
2023-06-03 10:18:41 +08:00
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
2023-06-09 09:05:36 +08:00
domain = ip_result["result"].rstrip('\n')
2023-06-09 12:07:43 +08:00
cmd = "sed -i '/APP_NETWORK/a APP_URL=" + domain + "' /data/apps/" + customer_name + "/.env"
2023-05-23 13:45:28 +08:00
shell_execute.execute_command_output_all(cmd)
2023-05-15 15:17:45 +08:00
myLogger.info_logger("set_domain success")
2023-06-09 09:05:36 +08:00
2023-06-09 12:07:43 +08:00
2023-05-11 15:12:53 +08:00
def get_container_port(container_name):
port = "80"
2023-06-09 12:07:43 +08:00
cmd = "docker port " + container_name + " |grep ::"
2023-05-11 15:12:53 +08:00
result = shell_execute.execute_command_output_all(cmd)["result"]
myLogger.info_logger(result)
2023-05-11 16:34:59 +08:00
port = result.split('/')[0]
myLogger.info_logger(port)
2023-05-11 15:12:53 +08:00
2023-06-09 12:07:43 +08:00
return port
def compared_version(ver1, ver2):
list1 = str(ver1).split(".")
list2 = str(ver2).split(".")
# 循环次数为短的列表的len
for i in range(len(list1)) if len(list1) < len(list2) else range(len(list2)):
if int(list1[i]) == int(list2[i]):
pass
elif int(list1[i]) < int(list2[i]):
return -1
else:
return 1
# 循环结束,哪个列表长哪个版本号高
if len(list1) == len(list2):
return 0
elif len(list1) < len(list2):
return -1
else:
return 1