diff --git a/app/.env b/app/.env new file mode 100644 index 00000000..e69de29b diff --git a/app/README.md b/app/README.md new file mode 100644 index 00000000..e69de29b diff --git a/app/app/api/v1/__init__.py b/app/app/api/v1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/app/app/api/v1/api.py b/app/app/api/v1/api.py new file mode 100644 index 00000000..87c04060 --- /dev/null +++ b/app/app/api/v1/api.py @@ -0,0 +1,16 @@ +from fastapi import APIRouter +from typing import List +from schemas.user import UserCreate +from models.user import User as UserModel +from services.user_service import UserService +from db.database import SessionLocal + +router = APIRouter() +user_service = UserService() + +@router.get("/users/{user_type}", response_model=List[UserModel]) +async def get_users(user_type: str): + users = user_service.get_users_by_type(user_type) + if not users: + raise HTTPException(status_code=404, detail="Users not found") + return users diff --git a/app/app/core/api_key.py b/app/app/core/api_key.py new file mode 100644 index 00000000..e69de29b diff --git a/app/app/core/code.py b/app/app/core/code.py new file mode 100644 index 00000000..ad1bf7ac --- /dev/null +++ b/app/app/core/code.py @@ -0,0 +1,34 @@ +# 所有常量统一定义区 + +# 错误代码定义 +ERROR_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error" +ERROR_CLIENT_PARAM_Format = "Client.Parameter.Format.Error" +ERROR_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error" +ERROR_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error" +ERROR_CONFIG_NGINX = "Nginx.Configure.Error" +ERROR_SERVER_COMMAND = "Server.Container.Error" +ERROR_SERVER_SYSTEM = "Server.SystemError" +ERROR_SERVER_RESOURCE = "Server.ResourceError" +ERROR_SERVER_CONFIG_MISSING = "Server.Config.NotFound" + +# 错误信息定义 +ERRORMESSAGE_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error" +ERRORMESSAGE_CLIENT_PARAM_Format = "Client.Parameter.Format.Error" +ERRORMESSAGE_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error" +ERRORMESSAGE_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error" +ERRORMESSAGE_SERVER_COMMAND = "Server.Container.Error" +ERRORMESSAGE_SERVER_SYSTEM = "Server.SystemError" +ERRORMESSAGE_SERVER_RESOURCE = "Server.ResourceError" +ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT = "Server.Version.NotSupport" +ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE = "Server.Version.NeedUpgradeCore" + +# 应用启动中 installing +APP_STATUS_INSTALLING = "installing" +# 应用正在运行 running +APP_STATUS_RUNNING = "running" +# 应用已经停止 exited +APP_STATUS_EXITED = "exited" +# 应用不断重启 restarting +APP_STATUS_RESTARTING = "restarting" +# 应用错误 failed +APP_STATUS_FAILED = "failed" diff --git a/app/app/core/config.py b/app/app/core/config.py new file mode 100644 index 00000000..f4df0e62 --- /dev/null +++ b/app/app/core/config.py @@ -0,0 +1,4 @@ +NGINX_URL = "http://websoft9-nginxproxymanager:81" +# ARTIFACT_URL="https://artifact.azureedge.net/release/websoft9" +ARTIFACT_URL = "https://w9artifact.blob.core.windows.net/release/websoft9" +ARTIFACT_URL_DEV = "https://w9artifact.blob.core.windows.net/dev/websoft9" \ No newline at end of file diff --git a/app/app/core/exception.py b/app/app/core/exception.py new file mode 100644 index 00000000..ab4ebef1 --- /dev/null +++ b/app/app/core/exception.py @@ -0,0 +1,10 @@ +class CommandException(Exception): + def __init__(self, code, message, detail): + self.code = code + self.message = message + self.detail = detail + + +class MissingConfigException(CommandException): + + pass \ No newline at end of file diff --git a/app/app/core/log.py b/app/app/core/log.py new file mode 100644 index 00000000..099d14ec --- /dev/null +++ b/app/app/core/log.py @@ -0,0 +1,40 @@ +import logging +import os +from logging import handlers + +class MyLogging(): + # init logging + def __init__(self): + # the file of log + logPath = 'logs/' + if not os.path.exists(logPath): + os.makedirs(logPath) + logName = 'app_manage.log' + logFile = logPath + logName + formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') + # handler + time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logFile, when="MIDNIGHT", interval=1, encoding='utf-8') + time_rotating_file_handler.setLevel(logging.DEBUG) + time_rotating_file_handler.setFormatter(formatter) + # config + logging.basicConfig( + level= logging.DEBUG, + handlers= [time_rotating_file_handler], + datefmt='%Y-%m-%d %H:%M:%S', + format='%(asctime)s %(levelname)s: %(message)s' + ) + + def info_logger(self, content): + logging.info(content) + + def error_logger(self, content): + logging.error(content) + + def debug_logger(self, content): + logging.debug(content) + + def warning_logger(self, content): + logging.warning(content) + + +myLogger = MyLogging() \ No newline at end of file diff --git a/app/app/core/prerequisite.py b/app/app/core/prerequisite.py new file mode 100644 index 00000000..5510d14e --- /dev/null +++ b/app/app/core/prerequisite.py @@ -0,0 +1,327 @@ +import json, psutil +import re + +from api.utils.log import myLogger +from api.utils import shell_execute, const +from api.exception.command_exception import CommandException +from api.service import manage + + +# 已经是running的app怎么知道它已经能够访问,如页面能进入,如mysql能被客户端连接 +def if_app_access(app_name): + return True + + +def if_app_exits(app_name): + cmd = "docker compose ls -a" + output = shell_execute.execute_command_output_all(cmd) + if int(output["code"]) == 0: + pattern = app_name + '$' + info_list = output['result'].split() + is_exist = False + for info in info_list: + if re.match(pattern, info) != None: + is_exist = True + break + return is_exist + else: + return True + + +def if_app_running(app_name): + cmd = "docker compose ls -a" + output = shell_execute.execute_command_output_all(cmd) + if int(output["code"]) == 0: + app_list = output['result'].split("\n") + pattern = app_name + '\s*' + if_running = False + for app in app_list: + if re.match(pattern, app) != None and re.match('running', app) != None: + if_running = True + break + return if_running + else: + return False + + +def check_appid_exist(app_id): + myLogger.info_logger("Checking check_appid_exist ...") + appList = manage.get_my_app() + find = False + for app in appList: + if app_id == app.app_id: + find = True + break + myLogger.info_logger("Check complete.") + return find + + +def check_appid_include_rq(app_id): + message = "" + code = None + if app_id == None or app_id == "undefine": + code = const.ERROR_CLIENT_PARAM_BLANK + message = "AppID is null" + elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None: + code = const.ERROR_CLIENT_PARAM_Format + message = "App_id format error" + elif not check_appid_exist(app_id): + code = const.ERROR_CLIENT_PARAM_NOTEXIST + message = "AppID is not exist" + return code, message + + +def check_app_id(app_id): + message = "" + code = None + if app_id == None: + code = const.ERROR_CLIENT_PARAM_BLANK + message = "AppID is null" + elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None: + code = const.ERROR_CLIENT_PARAM_Format + message = "APP name can only be composed of numbers and lowercase letters" + myLogger.info_logger(code) + return code, message + + +def check_vm_resource(app_name): + myLogger.info_logger("Checking virtual memory resource ...") + var_path = "/data/library/apps/" + app_name + "/variables.json" + requirements_var = read_var(var_path, 'requirements') + need_cpu_count = int(requirements_var['cpu']) + cpu_count = int(shell_execute.execute_command_output_all("cat /proc/cpuinfo | grep \'core id\'| wc -l")["result"]) + if cpu_count < need_cpu_count: + myLogger.info_logger("Check complete: The number of CPU cores is insufficient!") + return False + need_mem_total = int(requirements_var['memory']) + mem_free = float(psutil.virtual_memory().available) / 1024 / 1024 / 1024 + if mem_free < need_mem_total * 1.2: + myLogger.info_logger("Check complete: The total amount of memory is insufficient!") + return False + need_disk = int(requirements_var['disk']) + disk_free = float(psutil.disk_usage('/').free) / 1024 / 1024 / 1024 + if round(disk_free) < need_disk + 2: + myLogger.info_logger("Check complete: There are not enough disks left!") + return False + myLogger.info_logger("Check complete.") + return True + + +def check_app_websoft9(app_name): + # websoft9's support applist + myLogger.info_logger("Checking dir...") + path = "/data/library/apps/" + app_name + is_exists = check_directory(path) + return is_exists + + +def check_directory(path): + try: + shell_execute.execute_command_output_all("ls " + path) + return True + except CommandException as ce: + return False + + +def check_app_compose(app_name, customer_name): + myLogger.info_logger("Set port and random password ...") + library_path = "/data/library/apps/" + app_name + install_path = "/data/apps/" + customer_name + port_dic = read_env(library_path + '/.env', "APP_.*_PORT=") + # 1.判断/data/apps/app_name/.env中的port是否占用,没有被占用,方法结束(get_start_port方法) + cmd1 = "docker container inspect $(docker ps -aq) | grep HostPort | awk \'{print $2}\' | sort -u" + cmd2 = "netstat -tunlp | grep \"LISTEN\" | awk '{print $4}' | awk -F \":\" '{print $NF}' | sort -u" + cmd3 = "grep -r \"APP_.*_PORT=\" /data/apps/*/.env | awk -F \"=\" '{print $2}' | sort -u" + s1 = shell_execute.execute_command_output_all(cmd1)['result'].replace('\"', '') + s2 = shell_execute.execute_command_output_all(cmd2)['result'] + try: + s3 = '' + s3 = shell_execute.execute_command_output_all(cmd3)['result'] + except: + pass + s = s1 + '\n' + s2 + '\n' + s3 + + shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path) + env_path = install_path + "/.env" + get_map(env_path) + for port_name in port_dic: + port_value = get_start_port(s, port_dic[port_name]) + modify_env(install_path + '/.env', port_name, port_value) + + # set random password + power_password = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"] + if "POWER_PASSWORD" in power_password: + try: + shell_execute.execute_command_output_all("docker rm -f pwgen") + except Exception: + pass + new_password = shell_execute.execute_command_output_all("docker run --name pwgen backplane/pwgen 15")[ + "result"].rstrip('\n') + "!" + modify_env(install_path + '/.env', 'POWER_PASSWORD', new_password) + shell_execute.execute_command_output_all("docker rm -f pwgen") + env_path = install_path + "/.env" + get_map(env_path) + myLogger.info_logger("Port check complete") + return + + +def check_app_url(customer_app_name): + myLogger.info_logger("Checking app url...") + # 如果app的.env文件中含有HTTP_URL项目,需要如此设置 HTTP_URL=ip:port + env_path = "/data/apps/" + customer_app_name + "/.env" + env_map = get_map(env_path) + if env_map.get("APP_URL_REPLACE") == "true": + myLogger.info_logger(customer_app_name + "need to change app url...") + app_url = list(read_env(env_path, "APP_URL=").values())[0] + ip = "localhost" + url = "" + try: + ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip") + ip = ip_result["result"].rstrip('\n') + except Exception: + ip = "127.0.0.1" + http_port = list(read_env(env_path, "APP_HTTP_PORT").values())[0] + + if ":" in app_url: + url = ip + ":" + http_port + else: + url = ip + cmd = "sed -i 's/APP_URL=.*/APP_URL=" + url + "/g' /data/apps/" + customer_app_name + "/.env" + shell_execute.execute_command_output_all(cmd) + + myLogger.info_logger("App url check complete") + return + + +def get_map(path): + myLogger.info_logger("Read env_dic" + path) + output = shell_execute.execute_command_output_all("cat " + path) + code = output["code"] + env_dic = {} + if int(code) == 0: + ret = output["result"] + myLogger.info_logger(ret) + env_list = ret.split("\n") + for env in env_list: + if "=" in env: + env_dic[env.split("=")[0]] = env.split("=")[1] + myLogger.info_logger(env_dic) + return env_dic + + +def read_env(path, key): + myLogger.info_logger("Read " + path) + output = shell_execute.execute_command_output_all("cat " + path) + code = output["code"] + env_dic = {} + if int(code) == 0: + ret = output["result"] + env_list = ret.split("\n") + for env in env_list: + if re.match(key, env) != None: + env_dic[env.split("=")[0]] = env.split("=")[1] + myLogger.info_logger("Read " + path + ": " + str(env_dic)) + return env_dic + + +def modify_env(path, env_name, value): + myLogger.info_logger("Modify " + path + "...") + output = shell_execute.execute_command_output_all("sed -n \'/^" + env_name + "/=\' " + path) + if int(output["code"]) == 0 and output["result"] != "": + line_num = output["result"].split("\n")[0] + s = env_name + "=" + value + output = shell_execute.execute_command_output_all("sed -i \'" + line_num + "c " + s + "\' " + path) + if int(output["code"]) == 0: + myLogger.info_logger("Modify " + path + ": Change " + env_name + " to " + value) + + +def read_var(var_path, var_name): + value = "" + myLogger.info_logger("Read " + var_path) + output = shell_execute.execute_command_output_all("cat " + var_path) + if int(output["code"]) == 0: + var = json.loads(output["result"]) + try: + value = var[var_name] + except KeyError: + myLogger.warning_logger("Read " + var_path + ": No key " + var_name) + else: + myLogger.warning_logger(var_path + " not found") + return value + + +def get_start_port(s, port): + use_port = port + while True: + if s.find(use_port) == -1: + break + else: + use_port = str(int(use_port) + 1) + + return use_port + +def check_app(app_name, customer_name, app_version): + message = "" + code = None + app_id = app_name + "_" + customer_name + if app_name == None: + code = const.ERROR_CLIENT_PARAM_BLANK + message = "app_name is null" + elif customer_name == None: + code = const.ERROR_CLIENT_PARAM_BLANK + message = "customer_name is null" + elif len(customer_name) < 2: + code = const.ERROR_CLIENT_PARAM_BLANK + message = "customer_name must be longer than 2 chars" + elif app_version == None: + code = const.ERROR_CLIENT_PARAM_BLANK + message = "app_version is null" + elif app_version == "undefined" or app_version == "": + code = const.ERROR_CLIENT_PARAM_BLANK + message = "app_version is null" + elif not docker.check_app_websoft9(app_name): + code = const.ERROR_CLIENT_PARAM_NOTEXIST + message = "It is not support to install " + app_name + elif re.match('^[a-z0-9]+$', customer_name) == None: + code = const.ERROR_CLIENT_PARAM_Format + message = "APP name can only be composed of numbers and lowercase letters" + elif docker.check_directory("/data/apps/" + customer_name): + code = const.ERROR_CLIENT_PARAM_REPEAT + message = "Repeat installation: " + customer_name + elif not docker.check_vm_resource(app_name): + code = const.ERROR_SERVER_RESOURCE + message = "Insufficient system resources (cpu, memory, disk space)" + elif check_app_docker(app_id): + code = const.ERROR_CLIENT_PARAM_REPEAT + message = "Repeat installation: " + customer_name + elif check_app_rq(app_id): + code = const.ERROR_CLIENT_PARAM_REPEAT + message = "Repeat installation: " + customer_name + + return code, message + + + def app_exits_in_docker(app_id): + customer_name = app_id.split('_')[1] + app_name = app_id.split('_')[0] + flag = False + info = "" + cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'" + try: + output = shell_execute.execute_command_output_all(cmd) + if int(output["code"]) == 0: + info = output["result"] + app_path = info.split()[-1].rsplit('/', 1)[0] + is_official = check_if_official_app(app_path + '/variables.json') + if is_official: + name = docker.read_var(app_path + '/variables.json', 'name') + if name == app_name: + flag = True + elif app_name == customer_name: + flag = True + myLogger.info_logger("APP in docker") + except CommandException as ce: + myLogger.info_logger("APP not in docker") + + return info, flag + diff --git a/app/app/core/rq.py b/app/app/core/rq.py new file mode 100644 index 00000000..5cd58dcf --- /dev/null +++ b/app/app/core/rq.py @@ -0,0 +1,6 @@ +# 删除错误任务 +def delete_app_failedjob(job_id): + myLogger.info_logger("delete_app_failedjob") + failed = FailedJobRegistry(queue=q) + failed.remove(job_id, delete_job=True) + diff --git a/app/app/core/settings.conf b/app/app/core/settings.conf new file mode 100644 index 00000000..3bbfceb3 --- /dev/null +++ b/app/app/core/settings.conf @@ -0,0 +1,12 @@ +#appstore_preview_update=false +#domain=test.websoft9.com + +#email=help@websoft9.com +#ip=127.0.0.1 +#smtp_port=743 +#smtp_server=smtp.websoft9.com +#smtp_tls/ssl=true +#smtp_user=admin +#smtp_password=password +#install_path=/data +#artifact_url=https://w9artifact.blob.core.windows.net/release/websoft9 \ No newline at end of file diff --git a/app/app/external/nginx_proxy_manager.py b/app/app/external/nginx_proxy_manager.py new file mode 100644 index 00000000..6f018ee7 --- /dev/null +++ b/app/app/external/nginx_proxy_manager.py @@ -0,0 +1,84 @@ +import requests + +class NginxProxyManagerAPI: + """ + This class provides methods to interact with the Nginx Proxy Manager API. + + Args: + base_url (str): The base URL of the Nginx Proxy Manager API. + api_token (str): The API Token to use for authorization. + + Attributes: + base_url (str): The base URL of the Nginx Proxy Manager API. + api_token (str): The API Token to use for authorization. + + Methods: + get_token(identity, scope, secret): Request a new access token from Nginx Proxy Manager + refresh_token(): Refresh your access token + """ + + def __init__(self, base_url, api_token): + """ + Initialize the NginxProxyManagerAPI instance. + + Args: + base_url (str): The base URL of the Nginx Proxy Manager API. + api_token (str): The API token to use for authorization. + """ + self.base_url = base_url + self.api_token = api_token + + def get_token(self,identity,scope,secret): + """ + Request a new access token from Nginx Proxy Manager + + Args: + identity (string): user account with an email address + scope (user): "user" + secret (string): user password + + Returns: + dict or None: A dictionary containing token-related information if successful,otherwise None. The dictionary structure is as follows: + If successful: + { + "expires": str, # Expiry timestamp of the token + "token": str # The access token + } + + If unsuccessful: + None + """ + url = f"{self.base_url}/api/tokens" + data = { + "identity": identity, + "scope": scope, + "secret": secret + } + response = requests.post(url,json=data, headers=headers) + if response.status_code == 200: + return response.json() + else: + return None + + def refresh_token(self): + """ + Refresh your access token + + Returns: + dict or None: A dictionary containing token-related information if successful,otherwise None. The dictionary structure is as follows: + If successful: + { + "expires": str, # Expiry timestamp of the token + "token": str # The access token + } + + If unsuccessful: + None + """ + url = f"{self.base_url}/api/tokens" + headers = {"Authorization": f"Bearer {self.api_token}"} + response = requests.get(url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return None \ No newline at end of file diff --git a/app/app/schemas/applist.py b/app/app/schemas/applist.py new file mode 100644 index 00000000..0978f649 --- /dev/null +++ b/app/app/schemas/applist.py @@ -0,0 +1,20 @@ +from pydantic import BaseModel +from api.model.config import Config +from api.model.status_reason import StatusReason + +class App(BaseModel): + app_id: str + app_name: str + customer_name: str + trade_mark: str + status: str + status_reason: StatusReason = None + official_app: bool + app_version: str + create_time: str + volume_data : str + config_path : str + image_url: str + app_https: bool + app_replace_url: bool + config: Config = None \ No newline at end of file diff --git a/app/app/services/app.py b/app/app/services/app.py new file mode 100644 index 00000000..52ef3617 --- /dev/null +++ b/app/app/services/app.py @@ -0,0 +1,519 @@ +# 合并applist +def conbine_list(installing_list, installed_list): + app_list = installing_list + installed_list + result_list = [] + appid_list = [] + for app in app_list: + app_id = app['app_id'] + if app_id in appid_list: + continue + else: + appid_list.append(app_id) + result_list.append(app) + return result_list + +# 获取所有app的信息 +def get_my_app(app_id): + installed_list = get_apps_from_compose() + installing_list = get_apps_from_queue() + + app_list = conbine_list(installing_list, installed_list) + find = False + ret = {} + if app_id != None: + for app in app_list: + if app_id == app['app_id']: + ret = app + find = True + break + if not find: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "") + else: + ret = app_list + myLogger.info_logger("app list result ok") + return ret + +def get_apps_from_compose(): + myLogger.info_logger("Search all of apps ...") + cmd = "docker compose ls -a --format json" + output = shell_execute.execute_command_output_all(cmd) + output_list = json.loads(output["result"]) + myLogger.info_logger(len(output_list)) + ip = "localhost" + try: + ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip") + ip = ip_result["result"].rstrip('\n') + except Exception: + ip = "127.0.0.1" + + app_list = [] + for app_info in output_list: + volume = app_info["ConfigFiles"] + app_path = volume.rsplit('/', 1)[0] + customer_name = volume.split('/')[-2] + app_id = "" + app_name = "" + trade_mark = "" + port = 0 + url = "" + admin_url = "" + image_url = "" + user_name = "" + password = "" + official_app = False + app_version = "" + create_time = "" + volume_data = "" + config_path = app_path + app_https = False + app_replace_url = False + default_domain = "" + admin_path = "" + admin_domain_url = "" + if customer_name in ['w9appmanage', 'w9nginxproxymanager', 'w9redis', 'w9kopia', + 'w9portainer'] or app_path == '/data/apps/w9services/' + customer_name: + continue + + var_path = app_path + "/variables.json" + official_app = check_if_official_app(var_path) + + status_show = app_info["Status"] + status = app_info["Status"].split("(")[0] + if status == "running" or status == "exited" or status == "restarting": + if "exited" in status_show and "running" in status_show: + if status == "exited": + cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited" + result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n') + container = result.split("#Exited")[0] + if container != customer_name: + status = "running" + if "restarting" in status_show: + about_time = get_createtime(official_app, app_path, customer_name) + if "seconds" in about_time: + status = "restarting" + else: + status = "failed" + elif status == "created": + status = "failed" + else: + continue + + if official_app: + app_name = docker.read_var(var_path, 'name') + app_id = app_name + "_" + customer_name # app_id + # get trade_mark + trade_mark = docker.read_var(var_path, 'trademark') + image_url = get_Image_url(app_name) + # get env info + path = app_path + "/.env" + env_map = docker.get_map(path) + + try: + myLogger.info_logger("get domain for APP_URL") + domain = env_map.get("APP_URL") + if "appname.example.com" in domain or ip in domain: + default_domain = "" + else: + default_domain = domain + except Exception: + myLogger.info_logger("domain exception") + try: + app_version = env_map.get("APP_VERSION") + volume_data = "/data/apps/" + customer_name + "/data" + user_name = env_map.get("APP_USER", "") + password = env_map.get("POWER_PASSWORD", "") + admin_path = env_map.get("APP_ADMIN_PATH") + if admin_path: + myLogger.info_logger(admin_path) + admin_path = admin_path.replace("\"", "") + else: + admin_path = "" + + if default_domain != "" and admin_path != "": + admin_domain_url = "http://" + default_domain + admin_path + except Exception: + myLogger.info_logger("APP_USER POWER_PASSWORD exception") + try: + replace = env_map.get("APP_URL_REPLACE", "false") + myLogger.info_logger("replace=" + replace) + if replace == "true": + app_replace_url = True + https = env_map.get("APP_HTTPS_ACCESS", "false") + if https == "true": + app_https = True + except Exception: + myLogger.info_logger("APP_HTTPS_ACCESS exception") + + try: + http_port = env_map.get("APP_HTTP_PORT", "0") + if http_port: + port = int(http_port) + except Exception: + pass + if port != 0: + try: + if app_https: + easy_url = "https://" + ip + ":" + str(port) + else: + easy_url = "http://" + ip + ":" + str(port) + url = easy_url + admin_url = get_admin_url(customer_name, url) + except Exception: + pass + else: + try: + db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0] + port = int(db_port) + except Exception: + pass + else: + app_name = customer_name + app_id = customer_name + "_" + customer_name + create_time = get_createtime(official_app, app_path, customer_name) + if status in ['running', 'exited']: + config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url, + admin_domain_url=admin_domain_url, + admin_path=admin_path, admin_username=user_name, admin_password=password, + default_domain=default_domain) + else: + config = None + if status == "failed": + status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error", + Detail="unknown error") + else: + status_reason = None + app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark, + app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path, + status=status, status_reason=status_reason, official_app=official_app, image_url=image_url, + app_https=app_https, app_replace_url=app_replace_url, config=config) + + app_list.append(app.dict()) + return app_list + +# 安装 +def install_app(app_name, customer_name, app_version): + myLogger.info_logger("Install app ...") + ret = {} + ret['ResponseData'] = {} + app_id = app_name + "_" + customer_name + ret['ResponseData']['app_id'] = app_id + + code, message = check_app(app_name, customer_name, app_version) + if code == None: + q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id) + else: + ret['Error'] = get_error_info(code, message, "") + + return ret + + def start_app(app_id): + info, flag = app_exits_in_docker(app_id) + if flag: + app_path = info.split()[-1].rsplit('/', 1)[0] + cmd = "docker compose -f " + app_path + "/docker-compose.yml start" + shell_execute.execute_command_output_all(cmd) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + + +def stop_app(app_id): + info, flag = app_exits_in_docker(app_id) + if flag: + app_path = info.split()[-1].rsplit('/', 1)[0] + cmd = "docker compose -f " + app_path + "/docker-compose.yml stop" + shell_execute.execute_command_output_all(cmd) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + + +def restart_app(app_id): + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + app_path = info.split()[-1].rsplit('/', 1)[0] + cmd = "docker compose -f " + app_path + "/docker-compose.yml restart" + shell_execute.execute_command_output_all(cmd) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + +def uninstall_app(app_id): + app_name = app_id.split('_')[0] + customer_name = app_id.split('_')[1] + app_path = "" + info, code_exist = app_exits_in_docker(app_id) + if code_exist: + app_path = info.split()[-1].rsplit('/', 1)[0] + cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v" + lib_path = '/data/library/apps/' + app_name + if app_path != lib_path: + cmd = cmd + " && sudo rm -rf " + app_path + shell_execute.execute_command_output_all(cmd) + else: + if check_app_rq(app_id): + delete_app_failedjob(app_id) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "") + # Force to delete docker compose + try: + cmd = " sudo rm -rf /data/apps/" + customer_name + shell_execute.execute_command_output_all(cmd) + except CommandException as ce: + myLogger.info_logger("Delete app compose exception") + # Delete proxy config when uninstall app + app_proxy_delete(app_id) + + +# 安装失败后的处理 +def delete_app(app_id): + try: + app_name = app_id.split('_')[0] + customer_name = app_id.split('_')[1] + app_path = "" + info, code_exist = app_exits_in_docker(app_id) + if code_exist: + app_path = info.split()[-1].rsplit('/', 1)[0] + cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v" + lib_path = '/data/library/apps/' + app_name + if app_path != lib_path: + cmd = cmd + " && sudo rm -rf " + app_path + try: + myLogger.info_logger("Intall fail, down app and delete files") + shell_execute.execute_command_output_all(cmd) + except Exception: + myLogger.info_logger("Delete app compose exception") + # 强制删除失败又无法通过docker compose down 删除的容器 + try: + myLogger.info_logger("IF delete fail, force to delete containers") + force_cmd = "docker rm -f $(docker ps -f name=^" + customer_name + " -aq)" + shell_execute.execute_command_output_all(force_cmd) + except Exception: + myLogger.info_logger("force delete app compose exception") + + else: + if check_app_rq(app_id): + delete_app_failedjob(app_id) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "") + cmd = " sudo rm -rf /data/apps/" + customer_name + shell_execute.execute_command_output_all(cmd) + except CommandException as ce: + myLogger.info_logger("Delete app compose exception") + +#安装准备 +def prepare_app(app_name, customer_name): + library_path = "/data/library/apps/" + app_name + install_path = "/data/apps/" + customer_name + shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path) + + +def install_app_delay(app_name, customer_name, app_version): + myLogger.info_logger("-------RQ install start --------") + job_id = app_name + "_" + customer_name + + try: + # 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的,应该是app_name,而不是customer_name + resource_flag = docker.check_vm_resource(app_name) + + if resource_flag == True: + + myLogger.info_logger("job check ok, continue to install app") + env_path = "/data/apps/" + customer_name + "/.env" + # prepare_app(app_name, customer_name) + docker.check_app_compose(app_name, customer_name) + myLogger.info_logger("start JobID=" + job_id) + docker.modify_env(env_path, 'APP_NAME', customer_name) + docker.modify_env(env_path, "APP_VERSION", app_version) + docker.check_app_url(customer_name) + cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d" + output = shell_execute.execute_command_output_all(cmd) + myLogger.info_logger("-------Install result--------") + myLogger.info_logger(output["code"]) + myLogger.info_logger(output["result"]) + try: + shell_execute.execute_command_output_all("bash /data/apps/" + customer_name + "/src/after_up.sh") + except Exception as e: + myLogger.info_logger(str(e)) + else: + error_info = "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + myLogger.info_logger(error_info) + raise Exception(error_info) + except CommandException as ce: + myLogger.info_logger(customer_name + " install failed(docker)!") + delete_app(job_id) + error_info = "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail + myLogger.info_logger(error_info) + raise Exception(error_info) + except Exception as e: + myLogger.info_logger(customer_name + " install failed(system)!") + delete_app(job_id) + error_info = "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str( + e) + myLogger.info_logger(error_info) + raise Exception(error_info) + +def get_createtime(official_app, app_path, customer_name): + data_time = "" + try: + if official_app: + cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1" + result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n') + data_time = result + else: + cmd_all = "cd " + app_path + " && docker compose ps -a --format json" + output = shell_execute.execute_command_output_all(cmd_all) + container_name = json.loads(output["result"])[0]["Name"] + cmd = "docker ps -f name=" + container_name + " --format {{.RunningFor}} | head -n 1" + result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n') + data_time = result + + except Exception as e: + myLogger.info_logger(str(e)) + myLogger.info_logger("get_createtime get success" + data_time) + return data_time + +def check_if_official_app(var_path): + if docker.check_directory(var_path): + if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var( + var_path, 'requirements') != "": + requirements = docker.read_var(var_path, 'requirements') + try: + cpu = requirements['cpu'] + mem = requirements['memory'] + disk = requirements['disk'] + return True + except KeyError: + return False + else: + return False + +# 应用是否已经安装 +def check_app_docker(app_id): + customer_name = app_id.split('_')[1] + app_name = app_id.split('_')[0] + flag = False + cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'" + try: + shell_execute.execute_command_output_all(cmd) + flag = True + myLogger.info_logger("APP in docker") + except CommandException as ce: + myLogger.info_logger("APP not in docker") + + return flag + + +def check_app_rq(app_id): + myLogger.info_logger("check_app_rq") + + started = StartedJobRegistry(queue=q) + failed = FailedJobRegistry(queue=q) + run_job_ids = started.get_job_ids() + failed_job_ids = failed.get_job_ids() + queue_job_ids = q.job_ids + myLogger.info_logger(queue_job_ids) + myLogger.info_logger(run_job_ids) + myLogger.info_logger(failed_job_ids) + if queue_job_ids and app_id in queue_job_ids: + myLogger.info_logger("App in RQ") + return True + if failed_job_ids and app_id in failed_job_ids: + myLogger.info_logger("App in RQ") + return True + if run_job_ids and app_id in run_job_ids: + myLogger.info_logger("App in RQ") + return True + myLogger.info_logger("App not in RQ") + return False + + + def get_apps_from_queue(): + myLogger.info_logger("get queque apps...") + # 获取 StartedJobRegistry 实例 + started = StartedJobRegistry(queue=q) + finish = FinishedJobRegistry(queue=q) + deferred = DeferredJobRegistry(queue=q) + failed = FailedJobRegistry(queue=q) + scheduled = ScheduledJobRegistry(queue=q) + cancel = CanceledJobRegistry(queue=q) + + # 获取正在执行的作业 ID 列表 + run_job_ids = started.get_job_ids() + finish_job_ids = finish.get_job_ids() + wait_job_ids = deferred.get_job_ids() + failed_jobs = failed.get_job_ids() + scheduled_jobs = scheduled.get_job_ids() + cancel_jobs = cancel.get_job_ids() + + myLogger.info_logger(q.jobs) + myLogger.info_logger(run_job_ids) + myLogger.info_logger(failed_jobs) + myLogger.info_logger(cancel_jobs) + myLogger.info_logger(wait_job_ids) + myLogger.info_logger(finish_job_ids) + myLogger.info_logger(scheduled_jobs) + + installing_list = [] + for job_id in run_job_ids: + app = get_rq_app(job_id, 'installing', "", "", "") + installing_list.append(app) + for job in q.jobs: + app = get_rq_app(job.id, 'installing', "", "", "") + installing_list.append(app) + for job_id in failed_jobs: + job = q.fetch_job(job_id) + exc_info = job.exc_info + code = exc_info.split('##websoft9##')[1] + message = exc_info.split('##websoft9##')[2] + detail = exc_info.split('##websoft9##')[3] + app = get_rq_app(job_id, 'failed', code, message, detail) + installing_list.append(app) + + return installing_list + +#从rq获取app信息 +def get_rq_app(id, status, code, message, detail): + app_name = id.split('_')[0] + customer_name = id.split('_')[1] + # 当app还在RQ时,可能文件夹还没创建,无法获取trade_mark + trade_mark = "" + app_version = "" + create_time = "" + volume_data = "" + config_path = "" + image_url = get_Image_url(app_name) + config = None + if status == "installing": + status_reason = None + else: + status_reason = StatusReason(Code=code, Message=message, Detail=detail) + + app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark, + app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path, + status=status, status_reason=status_reason, official_app=True, image_url=image_url, + app_https=False, app_replace_url=False, config=config) + return app.dict() + + +def get_admin_url(customer_name, url): + admin_url = "" + path = "/data/apps/" + customer_name + "/.env" + try: + admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0] + admin_path = admin_path.replace("\"", "") + admin_url = url + admin_path + except IndexError: + pass + return admin_url + +def get_container_port(container_name): + port = "80" + cmd = "docker port " + container_name + " |grep ::" + result = shell_execute.execute_command_output_all(cmd)["result"] + myLogger.info_logger(result) + port = result.split('/')[0] + myLogger.info_logger(port) + + return port \ No newline at end of file diff --git a/app/app/services/domain.py b/app/app/services/domain.py new file mode 100644 index 00000000..95c0d14d --- /dev/null +++ b/app/app/services/domain.py @@ -0,0 +1,444 @@ +def app_domain_list(app_id): + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + myLogger.info_logger("Check app_id ok[app_domain_list]") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + + domains = get_all_domains(app_id) + + myLogger.info_logger(domains) + + ret = {} + ret['domains'] = domains + + default_domain = "" + if domains != None and len(domains) > 0: + customer_name = app_id.split('_')[1] + app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"] + if "APP_URL" in app_url: + url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env |grep APP_URL=")[ + "result"].rstrip('\n') + default_domain = url.split('=')[1] + ret['default_domain'] = default_domain + myLogger.info_logger(ret) + return ret + +def app_proxy_delete(app_id): + customer_name = app_id.split('_')[1] + proxy_host = None + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts" + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + response = requests.get(url, headers=headers) + + for proxy in response.json(): + portainer_name = proxy["forward_host"] + if customer_name == portainer_name: + proxy_id = proxy["id"] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id) + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + response = requests.delete(url, headers=headers) + + +def app_domain_delete(app_id, domain): + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + myLogger.info_logger("Check app_id ok[app_domain_delete]") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + + if domain is None or domain == "undefined": + raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "") + + old_all_domains = get_all_domains(app_id) + if domain not in old_all_domains: + myLogger.info_logger("delete domain is not binded") + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "") + + myLogger.info_logger("Start to delete " + domain) + proxy = get_proxy_domain(app_id, domain) + if proxy != None: + myLogger.info_logger(proxy) + myLogger.info_logger("before update") + domains_old = proxy["domain_names"] + myLogger.info_logger(domains_old) + + domains_old.remove(domain) + myLogger.info_logger("after update") + myLogger.info_logger(domains_old) + if len(domains_old) == 0: + proxy_id = proxy["id"] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id) + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + response = requests.delete(url, headers=headers) + try: + if response.json().get("error"): + raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "") + except Exception: + myLogger.info_logger(response.json()) + set_domain("", app_id) + else: + proxy_id = proxy["id"] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id) + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + port = get_container_port(app_id.split('_')[1]) + host = app_id.split('_')[1] + data = { + "domain_names": domains_old, + "forward_scheme": "http", + "forward_host": host, + "forward_port": port, + "access_list_id": "0", + "certificate_id": 0, + "meta": { + "letsencrypt_agree": False, + "dns_challenge": False + }, + "advanced_config": "", + "locations": [], + "block_exploits": False, + "caching_enabled": False, + "allow_websocket_upgrade": False, + "http2_support": False, + "hsts_enabled": False, + "hsts_subdomains": False, + "ssl_forced": False + } + + response = requests.put(url, data=json.dumps(data), headers=headers) + if response.json().get("error"): + raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "") + domain_set = app_domain_list(app_id) + default_domain = domain_set['default_domain'] + # 如果被删除的域名是默认域名,删除后去剩下域名的第一个 + if default_domain == domain: + set_domain(domains_old[0], app_id) + + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "") + +def app_domain_update(app_id, domain_old, domain_new): + myLogger.info_logger("app_domain_update") + domain_list = [] + domain_list.append(domain_old) + domain_list.append(domain_new) + + check_domains(domain_list) + + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + myLogger.info_logger("Check app_id ok") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + proxy = get_proxy_domain(app_id, domain_old) + if proxy != None: + domains_old = proxy["domain_names"] + index = domains_old.index(domain_old) + domains_old[index] = domain_new + proxy_id = proxy["id"] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id) + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + port = get_container_port(app_id.split('_')[1]) + host = app_id.split('_')[1] + data = { + "domain_names": domains_old, + "forward_scheme": "http", + "forward_host": host, + "forward_port": port, + "access_list_id": "0", + "certificate_id": 0, + "meta": { + "letsencrypt_agree": False, + "dns_challenge": False + }, + "advanced_config": "", + "locations": [], + "block_exploits": False, + "caching_enabled": False, + "allow_websocket_upgrade": False, + "http2_support": False, + "hsts_enabled": False, + "hsts_subdomains": False, + "ssl_forced": False + } + + response = requests.put(url, data=json.dumps(data), headers=headers) + if response.json().get("error"): + raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "") + domain_set = app_domain_list(app_id) + default_domain = domain_set['default_domain'] + myLogger.info_logger("default_domain=" + default_domain + ",domain_old=" + domain_old) + # 如果被修改的域名是默认域名,修改后也设置为默认域名 + if default_domain == domain_old: + set_domain(domain_new, app_id) + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "") + +def app_domain_add(app_id, domain): + temp_domains = [] + temp_domains.append(domain) + check_domains(temp_domains) + + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + myLogger.info_logger("Check app_id ok") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + + old_domains = get_all_domains(app_id) + if domain in old_domains: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "") + + proxy = get_proxy(app_id) + if proxy != None: + domains_old = proxy["domain_names"] + domain_list = domains_old + domain_list.append(domain) + + proxy_id = proxy["id"] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id) + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + port = get_container_port(app_id.split('_')[1]) + host = app_id.split('_')[1] + data = { + "domain_names": domain_list, + "forward_scheme": "http", + "forward_host": host, + "forward_port": port, + "access_list_id": "0", + "certificate_id": 0, + "meta": { + "letsencrypt_agree": False, + "dns_challenge": False + }, + "advanced_config": "", + "locations": [], + "block_exploits": False, + "caching_enabled": False, + "allow_websocket_upgrade": False, + "http2_support": False, + "hsts_enabled": False, + "hsts_subdomains": False, + "ssl_forced": False + } + response = requests.put(url, data=json.dumps(data), headers=headers) + if response.json().get("error"): + raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "") + else: + # 追加 + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts" + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + port = get_container_port(app_id.split('_')[1]) + host = app_id.split('_')[1] + + data = { + "domain_names": temp_domains, + "forward_scheme": "http", + "forward_host": host, + "forward_port": port, + "access_list_id": "0", + "certificate_id": 0, + "meta": { + "letsencrypt_agree": False, + "dns_challenge": False + }, + "advanced_config": "", + "locations": [], + "block_exploits": False, + "caching_enabled": False, + "allow_websocket_upgrade": False, + "http2_support": False, + "hsts_enabled": False, + "hsts_subdomains": False, + "ssl_forced": False + } + + response = requests.post(url, data=json.dumps(data), headers=headers) + + if response.json().get("error"): + raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "") + set_domain(domain, app_id) + + return domain + +def check_domains(domains): + myLogger.info_logger(domains) + if domains is None or len(domains) == 0: + raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "") + else: + for domain in domains: + if is_valid_domain(domain): + if check_real_domain(domain) == False: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_Format, "Domains format error", "") + + +def is_valid_domain(domain): + if domain.startswith("http"): + return False + + return True + +def check_real_domain(domain): + domain_real = True + try: + cmd = "ping -c 1 " + domain + " | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq" + domain_ip = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n') + + ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip") + ip_save = ip_result["result"].rstrip('\n') + + if domain_ip == ip_save: + myLogger.info_logger("Domain check ok!") + else: + domain_real = False + except CommandException as ce: + domain_real = False + + return domain_real + + +def get_proxy_domain(app_id, domain): + customer_name = app_id.split('_')[1] + proxy_host = None + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts" + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + response = requests.get(url, headers=headers) + + myLogger.info_logger(response.json()) + for proxy in response.json(): + portainer_name = proxy["forward_host"] + domain_list = proxy["domain_names"] + if customer_name == portainer_name: + myLogger.info_logger("-------------------") + if domain in domain_list: + myLogger.info_logger("find the domain proxy") + proxy_host = proxy + break + + return proxy_host + + +def get_all_domains(app_id): + customer_name = app_id.split('_')[1] + domains = [] + token = get_token() + url = const.NGINX_URL+"/api/nginx/proxy-hosts" + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + response = requests.get(url, headers=headers) + + for proxy in response.json(): + portainer_name = proxy["forward_host"] + if customer_name == portainer_name: + for domain in proxy["domain_names"]: + domains.append(domain) + return domains + + +def app_domain_set(domain, app_id): + temp_domains = [] + temp_domains.append(domain) + check_domains(temp_domains) + + code, message = docker.check_app_id(app_id) + if code == None: + info, flag = app_exits_in_docker(app_id) + if flag: + myLogger.info_logger("Check app_id ok") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "") + else: + raise CommandException(code, message, "") + + set_domain(domain, app_id) + + +def set_domain(domain, app_id): + myLogger.info_logger("set_domain start") + old_domains = get_all_domains(app_id) + if domain != "": + if domain not in old_domains: + message = domain + " is not in use" + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, message, "") + + customer_name = app_id.split('_')[1] + app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"] + + if "APP_URL" in app_url: + myLogger.info_logger("APP_URL is exist") + if domain == "": + ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip") + domain = ip_result["result"].rstrip('\n') + cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env" + shell_execute.execute_command_output_all(cmd) + if "APP_URL_REPLACE=true" in app_url: + myLogger.info_logger("need up") + shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d") + else: + cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env" + shell_execute.execute_command_output_all(cmd) + if "APP_URL_REPLACE=true" in app_url: + myLogger.info_logger("need up") + shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d") + else: + myLogger.info_logger("APP_URL is not exist") + if domain == "": + ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip") + domain = ip_result["result"].rstrip('\n') + + cmd = "sed -i '/APP_NETWORK/a APP_URL=" + domain + "' /data/apps/" + customer_name + "/.env" + shell_execute.execute_command_output_all(cmd) + myLogger.info_logger("set_domain success") + diff --git a/app/app/services/update.py b/app/app/services/update.py new file mode 100644 index 00000000..232cbfcf --- /dev/null +++ b/app/app/services/update.py @@ -0,0 +1,177 @@ +def get_release_url(): + preview = db.AppSearchPreview().get("preview") + myLogger.info_logger(preview) + if preview == "false": + return const.ARTIFACT_URL + else: + return const.ARTIFACT_URL_DEV + +def appstore_update(): + myLogger.info_logger("appstore update start...") + # 当点击appstore升级时,是无条件升级,不需要做版本的判定 + release_url = get_release_url() + download_url = release_url + "/plugin/appstore/appstore-latest.zip" + cmd = "cd /opt && rm -rf /opt/appstore* && wget -q " + download_url + " && unzip -q appstore-latest.zip " + shell_execute.execute_command_output_all(cmd) + + shell_execute.execute_command_output_all("rm -rf /usr/share/cockpit/appstore && cp -r /opt/appstore /usr/share/cockpit") + shell_execute.execute_command_output_all("rm -rf /opt/appstore*") + + library_url = release_url + "/plugin/library/library-latest.zip" + library_cmd = "cd /opt && rm -rf /opt/library* && wget -q " + library_url + " && unzip -q library-latest.zip " + shell_execute.execute_command_output_all(library_cmd) + shell_execute.execute_command_output_all("rm -rf /data/library && cp -r /opt/library /data") + shell_execute.execute_command_output_all("rm -rf /opt/library*") + myLogger.info_logger("auto update success...") + +def AppStoreUpdate(): + core_support = AppStoreCore() + release_url = get_release_url() + if core_support == "-1": + raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE, "You must upgrade websoft9 core", "You must upgrade websoft9 core") + elif core_support == "1": + raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT, "core not support,can not upgrade", "core not support,can not upgrade") + local_path = '/usr/share/cockpit/appstore/appstore.json' + local_version = "0" + try: + op = shell_execute.execute_command_output_all("cat " + local_path)['result'] + local_version = json.loads(op)['Version'] + except: + local_version = "0.0.0" + + version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json" + latest = shell_execute.execute_command_output_all(version_cmd)['result'] + version = json.loads(latest)['Version'] + if local_version < version: + appstore_update() + else: + myLogger.info_logger("You click update appstore, but not need to update") + + + +def AppPreviewUpdate(preview): + myLogger.info_logger("AppPreviewUpdate") + if preview == "true" or preview == "True": + db.AppUpdatePreview(preview) + return "true" + elif preview == "false" or preview == "False": + db.AppUpdatePreview(preview) + return "false" + elif preview == None or preview == "" or preview == "undefine": + return db.AppSearchPreview().get("preview") + else: + raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "preview is true,false,blank", "preview is true,false,blank") + +#检查内核VERSION 是否支持Appstore的更新 +def AppStoreCore(): + release_url = get_release_url() + version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json" + latest = shell_execute.execute_command_output_all(version_cmd)['result'] + most_version = json.loads(latest)['Requires at most'] + least_version = json.loads(latest)['Requires at least'] + now = shell_execute.execute_command_output_all("cat /data/apps/websoft9/version.json")['result'] + now_version = json.loads(now)['VERSION'] + version_str = "now_version:" + now_version + " least_version:" + least_version + " most_version:" + most_version + myLogger.info_logger(version_str) + if now_version >= least_version and now_version <= most_version: + return "0" + elif now_version < least_version: + return "-1" + elif now_version > most_version: + return "1" + return "0" + +# 获取 核心更新日志 +def get_update_list(url: str=None): + local_path = '/data/apps/websoft9/version.json' + artifact_url = const.ARTIFACT_URL + if url: + artifact_url = url + + try: + op = shell_execute.execute_command_output_all("cat " + local_path)['result'] + local_version = json.loads(op)['VERSION'] + except: + local_version = "0.0.0" + version_cmd = f"wget -O version.json {artifact_url}/version.json && cat version.json" + latest = shell_execute.execute_command_output_all(version_cmd)['result'] + version = json.loads(latest)['VERSION'] + ret = {} + ret['local_version'] = local_version + ret['target_version'] = version + content = [] + date = "" + + if compared_version(local_version, version) == -1: + ret['update'] = True + cmd = f"wget -O CHANGELOG.md {artifact_url}/CHANGELOG.md && head -n 20 CHANGELOG.md" + change_log_contents = shell_execute.execute_command_output(cmd) + change_log = change_log_contents.split('## ')[1].split('\n') + date = change_log[0].split()[-1] + for change in change_log[1:]: + if change != '': + content.append(change) + else: + ret['update'] = False + ret['date'] = date + ret['content'] = content + return ret + +# 获取 appstore 更新日志 +def get_appstore_update_list(): + release_url = get_release_url() + local_path = '/usr/share/cockpit/appstore/appstore.json' + local_version = "0" + try: + op = shell_execute.execute_command_output_all("cat " + local_path)['result'] + local_version = json.loads(op)['Version'] + except: + local_version = "0.0.0" + + + version_cmd = "wget -O appstore.json -N " + release_url + "/plugin/appstore/appstore.json && cat appstore.json" + latest = shell_execute.execute_command_output_all(version_cmd)['result'] + version = json.loads(latest)['Version'] + ret = {} + ret['local_version'] = local_version + ret['target_version'] = version + content = [] + date = "" + core_compare = "" + + if compared_version(local_version, version) == -1: + ret['update'] = True + cmd = "wget -O CHANGELOG.md " + release_url + "/plugin/appstore/CHANGELOG.md && cat CHANGELOG.md" + change_log_contents = shell_execute.execute_command_output_all(cmd)['result'] + change_log = change_log_contents.split('## ')[1].split('\n') + date = change_log[0].split()[-1] + for change in change_log[1:]: + if change != '': + content.append(change) + core_compare = AppStoreCore() + else: + ret['update'] = False + ret['date'] = date + ret['content'] = content + ret['core_compare'] = core_compare + return ret + + +def compared_version(ver1, ver2): + list1 = str(ver1).split(".") + list2 = str(ver2).split(".") + # 循环次数为短的列表的len + for i in range(len(list1)) if len(list1) < len(list2) else range(len(list2)): + if int(list1[i]) == int(list2[i]): + pass + elif int(list1[i]) < int(list2[i]): + return -1 + else: + return 1 + # 循环结束,哪个列表长哪个版本号高 + if len(list1) == len(list2): + return 0 + elif len(list1) < len(list2): + return -1 + else: + return 1 \ No newline at end of file diff --git a/app/app/utils/getIP.py b/app/app/utils/getIP.py new file mode 100644 index 00000000..1a898ea8 --- /dev/null +++ b/app/app/utils/getIP.py @@ -0,0 +1,41 @@ +#!/bin/bash +url_list=( + api.ipify.org + bot.whatismyipaddress.com + icanhazip.com + ifconfig.co + ident.me + ifconfig.me + icanhazip.com + ipecho.net/plain + ipinfo.io/ip + ip.sb + whatismyip.akamai.com + inet-ip.info +) + +curl_ip(){ + curl --connect-timeout 1 -m 2 $1 2>/dev/null + return $? +} + +debug(){ + for x in ${url_list[*]} + do + curl_ip $x + done +} + +print_ip(){ + for n in ${url_list[*]} + do + public_ip=`curl_ip $n` + check_ip=`echo $public_ip | awk -F"." '{print NF}'` + if [ ! -z "$public_ip" -a $check_ip -eq "4" ]; then + echo $public_ip + exit 0 + fi + done +} +#debug +print_ip \ No newline at end of file diff --git a/app/app/utils/helper.py b/app/app/utils/helper.py new file mode 100644 index 00000000..8a09d959 --- /dev/null +++ b/app/app/utils/helper.py @@ -0,0 +1,6 @@ +class Singleton(type): + _instances = {} + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] \ No newline at end of file diff --git a/app/app/utils/runshell.py b/app/app/utils/runshell.py new file mode 100644 index 00000000..637f9b04 --- /dev/null +++ b/app/app/utils/runshell.py @@ -0,0 +1,46 @@ +#!/usr/bin/python3 +import subprocess + +from api.utils.log import myLogger +from api.exception.command_exception import CommandException +from api.utils import const + + +# This fuction is for running shell commands on container +# cmd_str e.g: "ls -a" +# return string limit: 4000 chars? to do +def execute_command_output(cmd_str): + print(cmd_str) + out_str = subprocess.getoutput(cmd_str) + print(out_str) + return out_str + + +# This fuction is for running shell commands on host machine +# cmd_str e.g: "ls -a" +# return string limit: 4000 chars +def execute_command_output_all(cmd_str): + + myLogger.info_logger("Start to execute cmd: " + cmd_str) + + process = subprocess.run(f'nsenter -m -u -i -n -p -t 1 sh -c "{cmd_str}"', capture_output=True, bufsize=65536, check=False, text=True, shell=True) + + if process.returncode == 0 and 'Fail' not in process.stdout and 'fail' not in process.stdout and 'Error' not in process.stdout and 'error' not in process.stdout: + + return {"code": "0", "result": process.stdout} + else: + myLogger.info_logger("Failed to execute cmd, output failed result") + myLogger.info_logger(process) + raise CommandException(const.ERROR_SERVER_COMMAND, "Docker returns the original error", process.stderr) + + + +# This fuction is convert container commands to host machine commands +def convert_command(cmd_str): + convert_cmd = "" + if cmd_str == "": + convert_cmd=cmd_str + else: + convert_cmd="nsenter -m -u -i -n -p -t 1 sh -c " + "'"+cmd_str+"'" + + return convert_cmd \ No newline at end of file diff --git a/app/app/utils/settings_file.py b/app/app/utils/settings_file.py new file mode 100644 index 00000000..f5ab7955 --- /dev/null +++ b/app/app/utils/settings_file.py @@ -0,0 +1,68 @@ +from api.utils.log import myLogger +from api.utils.helper import Singleton + + +# This class is add/modify/list/delete item to item=value(键值对) model settings file + +class SettingsFile(object): + + __metaclass__ = Singleton + + def __init__(self, path): + self._config = {} + self.config_file = path + + def build_config(self): + try: + with open(self.config_file, 'r') as f: + data = f.readlines() + except Exception as e: + data = [] + for i in data: + if i.startswith('#'): + continue + i = i.replace('\n', '').replace('\r\n', '') + if not i: + continue + tmp = i.split('=') + if len(tmp) != 2: + myLogger.error_logger(f'invalid format {i}') + continue + + key, value = i.split('=') + if self._config.get(key) != value: + self._config[key] = value + return self._config + + def init_config_from_file(self, config_file: str=None): + if config_file: + self.config_file = config_file + self.build_config() + + def update_setting(self, key: str, value: str): + self._config[key] = value + self.flush_config() + + def get_setting(self, key: str, default=None): + return self._config.get(key, default) + + def list_all_settings(self) -> dict: + self.build_config() + return self._config + + def delete_setting(self, key: str, value: str): + if self._config.get(key) == value: + del self._config[key] + self.flush_config() + + def flush_config(self): + try: + with open(self.config_file, 'w') as f: + for key, value in self._config.items(): + f.write(f'{key}={value}\n') + except Exception as e: + myLogger.error_logger(e) + + +# This class is add/modify/cat/delete content from file +# src: path | URL \ No newline at end of file diff --git a/app/main.py b/app/main.py new file mode 100644 index 00000000..24789c67 --- /dev/null +++ b/app/main.py @@ -0,0 +1,7 @@ +from fastapi import FastAPI +from fastapi.routing import APIRouter +from api.v1 import main as v1_router + +app = FastAPI() + +app.include_router(v1_router.router, prefix="/api/v1") diff --git a/app/requirements.txt b/app/requirements.txt new file mode 100644 index 00000000..82090dbd --- /dev/null +++ b/app/requirements.txt @@ -0,0 +1,10 @@ +fastapi==0.98.0 +uvicorn +rq +apscheduler +docker +psutil +gunicorn +python-dotenv +sqlalchemy +databases[sqlite] \ No newline at end of file