update README.md

This commit is contained in:
zhaojing1987 2023-09-11 17:01:43 +08:00
parent 1f97713a91
commit 68ba89a873
33 changed files with 188 additions and 1689 deletions

View File

@ -0,0 +1,84 @@
# run app : uvicorn app.main:app --reload --port 8080
# 项目结构
## src
- src
- main.py # 主程序入口
- api # API路由和端点相关的代码(对外发布的API)
- v1 # 版本1的API路由和端点代码
- routers # 版本1的所有路由器
- app.py # 应用相关的路由和端点代码
- @router.get("/apps/") # 获取所有应用的列表
- @router.post("/apps/install") # 安装新的应用
- @router.post("/apps/{app_id}/start") # 启动指定ID的应用
- @router.post("/apps/{app_id}/stop") # 停止指定ID的应用
- @router.post("/apps/{app_id}/restart") # 重启指定ID的应用
- @router.post("/apps/{app_id}/redeploy") # 重新部署指定ID的应用
- @router.delete("/apps/{app_id}/uninstall") # 卸载指定ID的应用
- settings.py # 配置相关的路由和端点代码
- @router.get("/settings") # 获取当前的设置
- @router.put("/settings") # 更新当前的设置
- proxy.py
- @router.get("/proxy/{app_id}") # 获取指定应用的代理信息
- @router.post("/proxy/{app_id}") # 为指定的应用创建一个新的代理
- @router.put("/proxy/{app_id}") # 更新指定应用的代理信息
- @router.delete("/proxy/{app_id}") # 删除指定应用的代理
- v2 # 版本2的API路由和端点代码
- config # 项目的配置代码,例如环境变量、常量等
- system.ini # 系统级别的配置
- config.ini # 应用级别的配置
- core # 项目的核心代码
- api_key.py # 处理API密钥的模块
- config.get_value() # 获取配置值
- config.set_value() # 设置配置值
- config.remove_value() # 删除配置值
- config.remove_section() # 删除配置的一个区块
- logger.py # 日志处理模块
-logger.access() # 记录访问日志
-logger.error() # 记录错误日志
- external # 与外部系统交互的代码第三方API
- gitea_api.py # Gitea交互的API调用代码
- nginx_proxy_manager_api.py # Nginx Proxy Manager交互的API调用代码
- get_token() # 获取访问Nginx Proxy Manager API的令牌
- get_proxy_hosts() # 获取所有的代理主机列表
- create_proxy_host() # 创建一个新的代理主机
- update_proxy_host() # 更新一个已存在的代理主机
- delete_proxy_host() # 删除一个已存在的代理主机
- portainer_api.py # Portainer交互的API调用代码
- schemas # 存放Pydantic模型定义用于请求和响应的数据验证
- appInstall.py # 定义与应用安装相关的请求和响应模型
- proxy.py
- services # 存放服务层的代码,处理业务逻辑
- app_manager.py # 处理应用管理相关的业务逻辑,如应用的查询、创建、卸载、启动、停止、重启、重建等
- get_app() # 查询应用
- install_app() # 创建新的应用
- uninstall_app() # 卸载已存在的应用
- start_app() # 启动已存在的应用
- stop_app() # 停止已运行的应用
- restart_app() # 重启已存在的应用
- rebuild_app() # 重建已存在的应用
- gitea_manager.py # 处理与Gitea相关的业务逻辑如仓库的创建、删除、更新等
- get_repo() # 查询仓库
- create_repo() # 创建新的仓库
- update_repo() # 更新已存在的仓库
- delete_repo() # 删除已存在的仓库
- portainer_manager.py # 处理与Portainer相关的业务逻辑如Stack的创建、删除、启动、停止、重启重建等
- get_stack() # 查询Stack
- create_stack() # 创建新的Stack
- delete_stack() # 删除已存在的Stack
- start_stack() # 启动已存在的Stack
- stop_stack() # 停止已运行的Stack
- restart_stack() # 重启已存在的Stack
- rebuild_stack() # 重建已存在的Stack
- proxy_manager.py # 处理代理管理相关的业务逻辑,如代理的查询、创建、删除、更新等
- get_proxy() # 查询代理
- create_proxy() # 创建新的代理
- update_proxy() # 更新已存在的代理
- delete_proxy() # 删除已存在的代理
- settings_manager.py # 处理设置管理相关的业务逻辑,如设置的读取和更新等
- get_settings() # 获取设置
- update_settings() # 更新设置
- utils # 存放工具函数和工具类

View File

@ -1,16 +0,0 @@
from fastapi import APIRouter
from typing import List
from schemas.user import UserCreate
from models.user import User as UserModel
from services.user_service import UserService
from db.database import SessionLocal
router = APIRouter()
user_service = UserService()
@router.get("/users/{user_type}", response_model=List[UserModel])
async def get_users(user_type: str):
users = user_service.get_users_by_type(user_type)
if not users:
raise HTTPException(status_code=404, detail="Users not found")
return users

View File

@ -1,34 +0,0 @@
# 所有常量统一定义区
# 错误代码定义
ERROR_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error"
ERROR_CLIENT_PARAM_Format = "Client.Parameter.Format.Error"
ERROR_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error"
ERROR_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error"
ERROR_CONFIG_NGINX = "Nginx.Configure.Error"
ERROR_SERVER_COMMAND = "Server.Container.Error"
ERROR_SERVER_SYSTEM = "Server.SystemError"
ERROR_SERVER_RESOURCE = "Server.ResourceError"
ERROR_SERVER_CONFIG_MISSING = "Server.Config.NotFound"
# 错误信息定义
ERRORMESSAGE_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error"
ERRORMESSAGE_CLIENT_PARAM_Format = "Client.Parameter.Format.Error"
ERRORMESSAGE_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error"
ERRORMESSAGE_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error"
ERRORMESSAGE_SERVER_COMMAND = "Server.Container.Error"
ERRORMESSAGE_SERVER_SYSTEM = "Server.SystemError"
ERRORMESSAGE_SERVER_RESOURCE = "Server.ResourceError"
ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT = "Server.Version.NotSupport"
ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE = "Server.Version.NeedUpgradeCore"
# 应用启动中 installing
APP_STATUS_INSTALLING = "installing"
# 应用正在运行 running
APP_STATUS_RUNNING = "running"
# 应用已经停止 exited
APP_STATUS_EXITED = "exited"
# 应用不断重启 restarting
APP_STATUS_RESTARTING = "restarting"
# 应用错误 failed
APP_STATUS_FAILED = "failed"

View File

@ -1,10 +0,0 @@
class CommandException(Exception):
def __init__(self, code, message, detail):
self.code = code
self.message = message
self.detail = detail
class MissingConfigException(CommandException):
pass

View File

@ -1,327 +0,0 @@
import json, psutil
import re
from api.utils.log import myLogger
from api.utils import shell_execute, const
from api.exception.command_exception import CommandException
from api.service import manage
# 已经是running的app怎么知道它已经能够访问如页面能进入如mysql能被客户端连接
def if_app_access(app_name):
return True
def if_app_exits(app_name):
cmd = "docker compose ls -a"
output = shell_execute.execute_command_output_all(cmd)
if int(output["code"]) == 0:
pattern = app_name + '$'
info_list = output['result'].split()
is_exist = False
for info in info_list:
if re.match(pattern, info) != None:
is_exist = True
break
return is_exist
else:
return True
def if_app_running(app_name):
cmd = "docker compose ls -a"
output = shell_execute.execute_command_output_all(cmd)
if int(output["code"]) == 0:
app_list = output['result'].split("\n")
pattern = app_name + '\s*'
if_running = False
for app in app_list:
if re.match(pattern, app) != None and re.match('running', app) != None:
if_running = True
break
return if_running
else:
return False
def check_appid_exist(app_id):
myLogger.info_logger("Checking check_appid_exist ...")
appList = manage.get_my_app()
find = False
for app in appList:
if app_id == app.app_id:
find = True
break
myLogger.info_logger("Check complete.")
return find
def check_appid_include_rq(app_id):
message = ""
code = None
if app_id == None or app_id == "undefine":
code = const.ERROR_CLIENT_PARAM_BLANK
message = "AppID is null"
elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None:
code = const.ERROR_CLIENT_PARAM_Format
message = "App_id format error"
elif not check_appid_exist(app_id):
code = const.ERROR_CLIENT_PARAM_NOTEXIST
message = "AppID is not exist"
return code, message
def check_app_id(app_id):
message = ""
code = None
if app_id == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "AppID is null"
elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None:
code = const.ERROR_CLIENT_PARAM_Format
message = "APP name can only be composed of numbers and lowercase letters"
myLogger.info_logger(code)
return code, message
def check_vm_resource(app_name):
myLogger.info_logger("Checking virtual memory resource ...")
var_path = "/data/library/apps/" + app_name + "/variables.json"
requirements_var = read_var(var_path, 'requirements')
need_cpu_count = int(requirements_var['cpu'])
cpu_count = int(shell_execute.execute_command_output_all("cat /proc/cpuinfo | grep \'core id\'| wc -l")["result"])
if cpu_count < need_cpu_count:
myLogger.info_logger("Check complete: The number of CPU cores is insufficient!")
return False
need_mem_total = int(requirements_var['memory'])
mem_free = float(psutil.virtual_memory().available) / 1024 / 1024 / 1024
if mem_free < need_mem_total * 1.2:
myLogger.info_logger("Check complete: The total amount of memory is insufficient!")
return False
need_disk = int(requirements_var['disk'])
disk_free = float(psutil.disk_usage('/').free) / 1024 / 1024 / 1024
if round(disk_free) < need_disk + 2:
myLogger.info_logger("Check complete: There are not enough disks left!")
return False
myLogger.info_logger("Check complete.")
return True
def check_app_websoft9(app_name):
# websoft9's support applist
myLogger.info_logger("Checking dir...")
path = "/data/library/apps/" + app_name
is_exists = check_directory(path)
return is_exists
def check_directory(path):
try:
shell_execute.execute_command_output_all("ls " + path)
return True
except CommandException as ce:
return False
def check_app_compose(app_name, customer_name):
myLogger.info_logger("Set port and random password ...")
library_path = "/data/library/apps/" + app_name
install_path = "/data/apps/" + customer_name
port_dic = read_env(library_path + '/.env', "APP_.*_PORT=")
# 1.判断/data/apps/app_name/.env中的port是否占用没有被占用方法结束get_start_port方法
cmd1 = "docker container inspect $(docker ps -aq) | grep HostPort | awk \'{print $2}\' | sort -u"
cmd2 = "netstat -tunlp | grep \"LISTEN\" | awk '{print $4}' | awk -F \":\" '{print $NF}' | sort -u"
cmd3 = "grep -r \"APP_.*_PORT=\" /data/apps/*/.env | awk -F \"=\" '{print $2}' | sort -u"
s1 = shell_execute.execute_command_output_all(cmd1)['result'].replace('\"', '')
s2 = shell_execute.execute_command_output_all(cmd2)['result']
try:
s3 = ''
s3 = shell_execute.execute_command_output_all(cmd3)['result']
except:
pass
s = s1 + '\n' + s2 + '\n' + s3
shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
env_path = install_path + "/.env"
get_map(env_path)
for port_name in port_dic:
port_value = get_start_port(s, port_dic[port_name])
modify_env(install_path + '/.env', port_name, port_value)
# set random password
power_password = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
if "POWER_PASSWORD" in power_password:
try:
shell_execute.execute_command_output_all("docker rm -f pwgen")
except Exception:
pass
new_password = shell_execute.execute_command_output_all("docker run --name pwgen backplane/pwgen 15")[
"result"].rstrip('\n') + "!"
modify_env(install_path + '/.env', 'POWER_PASSWORD', new_password)
shell_execute.execute_command_output_all("docker rm -f pwgen")
env_path = install_path + "/.env"
get_map(env_path)
myLogger.info_logger("Port check complete")
return
def check_app_url(customer_app_name):
myLogger.info_logger("Checking app url...")
# 如果app的.env文件中含有HTTP_URL项目,需要如此设置 HTTP_URL=ip:port
env_path = "/data/apps/" + customer_app_name + "/.env"
env_map = get_map(env_path)
if env_map.get("APP_URL_REPLACE") == "true":
myLogger.info_logger(customer_app_name + "need to change app url...")
app_url = list(read_env(env_path, "APP_URL=").values())[0]
ip = "localhost"
url = ""
try:
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
ip = ip_result["result"].rstrip('\n')
except Exception:
ip = "127.0.0.1"
http_port = list(read_env(env_path, "APP_HTTP_PORT").values())[0]
if ":" in app_url:
url = ip + ":" + http_port
else:
url = ip
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + url + "/g' /data/apps/" + customer_app_name + "/.env"
shell_execute.execute_command_output_all(cmd)
myLogger.info_logger("App url check complete")
return
def get_map(path):
myLogger.info_logger("Read env_dic" + path)
output = shell_execute.execute_command_output_all("cat " + path)
code = output["code"]
env_dic = {}
if int(code) == 0:
ret = output["result"]
myLogger.info_logger(ret)
env_list = ret.split("\n")
for env in env_list:
if "=" in env:
env_dic[env.split("=")[0]] = env.split("=")[1]
myLogger.info_logger(env_dic)
return env_dic
def read_env(path, key):
myLogger.info_logger("Read " + path)
output = shell_execute.execute_command_output_all("cat " + path)
code = output["code"]
env_dic = {}
if int(code) == 0:
ret = output["result"]
env_list = ret.split("\n")
for env in env_list:
if re.match(key, env) != None:
env_dic[env.split("=")[0]] = env.split("=")[1]
myLogger.info_logger("Read " + path + ": " + str(env_dic))
return env_dic
def modify_env(path, env_name, value):
myLogger.info_logger("Modify " + path + "...")
output = shell_execute.execute_command_output_all("sed -n \'/^" + env_name + "/=\' " + path)
if int(output["code"]) == 0 and output["result"] != "":
line_num = output["result"].split("\n")[0]
s = env_name + "=" + value
output = shell_execute.execute_command_output_all("sed -i \'" + line_num + "c " + s + "\' " + path)
if int(output["code"]) == 0:
myLogger.info_logger("Modify " + path + ": Change " + env_name + " to " + value)
def read_var(var_path, var_name):
value = ""
myLogger.info_logger("Read " + var_path)
output = shell_execute.execute_command_output_all("cat " + var_path)
if int(output["code"]) == 0:
var = json.loads(output["result"])
try:
value = var[var_name]
except KeyError:
myLogger.warning_logger("Read " + var_path + ": No key " + var_name)
else:
myLogger.warning_logger(var_path + " not found")
return value
def get_start_port(s, port):
use_port = port
while True:
if s.find(use_port) == -1:
break
else:
use_port = str(int(use_port) + 1)
return use_port
def check_app(app_name, customer_name, app_version):
message = ""
code = None
app_id = app_name + "_" + customer_name
if app_name == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_name is null"
elif customer_name == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "customer_name is null"
elif len(customer_name) < 2:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "customer_name must be longer than 2 chars"
elif app_version == None:
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_version is null"
elif app_version == "undefined" or app_version == "":
code = const.ERROR_CLIENT_PARAM_BLANK
message = "app_version is null"
elif not docker.check_app_websoft9(app_name):
code = const.ERROR_CLIENT_PARAM_NOTEXIST
message = "It is not support to install " + app_name
elif re.match('^[a-z0-9]+$', customer_name) == None:
code = const.ERROR_CLIENT_PARAM_Format
message = "APP name can only be composed of numbers and lowercase letters"
elif docker.check_directory("/data/apps/" + customer_name):
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
elif not docker.check_vm_resource(app_name):
code = const.ERROR_SERVER_RESOURCE
message = "Insufficient system resources (cpu, memory, disk space)"
elif check_app_docker(app_id):
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
elif check_app_rq(app_id):
code = const.ERROR_CLIENT_PARAM_REPEAT
message = "Repeat installation: " + customer_name
return code, message
def app_exits_in_docker(app_id):
customer_name = app_id.split('_')[1]
app_name = app_id.split('_')[0]
flag = False
info = ""
cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
try:
output = shell_execute.execute_command_output_all(cmd)
if int(output["code"]) == 0:
info = output["result"]
app_path = info.split()[-1].rsplit('/', 1)[0]
is_official = check_if_official_app(app_path + '/variables.json')
if is_official:
name = docker.read_var(app_path + '/variables.json', 'name')
if name == app_name:
flag = True
elif app_name == customer_name:
flag = True
myLogger.info_logger("APP in docker")
except CommandException as ce:
myLogger.info_logger("APP not in docker")
return info, flag

View File

@ -1,6 +0,0 @@
# 删除错误任务
def delete_app_failedjob(job_id):
myLogger.info_logger("delete_app_failedjob")
failed = FailedJobRegistry(queue=q)
failed.remove(job_id, delete_job=True)

View File

@ -1,20 +0,0 @@
from pydantic import BaseModel
from api.model.config import Config
from api.model.status_reason import StatusReason
class App(BaseModel):
app_id: str
app_name: str
customer_name: str
trade_mark: str
status: str
status_reason: StatusReason = None
official_app: bool
app_version: str
create_time: str
volume_data : str
config_path : str
image_url: str
app_https: bool
app_replace_url: bool
config: Config = None

View File

@ -1,519 +0,0 @@
# # 合并applist
# def conbine_list(installing_list, installed_list):
# app_list = installing_list + installed_list
# result_list = []
# appid_list = []
# for app in app_list:
# app_id = app['app_id']
# if app_id in appid_list:
# continue
# else:
# appid_list.append(app_id)
# result_list.append(app)
# return result_list
# # 获取所有app的信息
# def get_my_app(app_id):
# installed_list = get_apps_from_compose()
# installing_list = get_apps_from_queue()
# app_list = conbine_list(installing_list, installed_list)
# find = False
# ret = {}
# if app_id != None:
# for app in app_list:
# if app_id == app['app_id']:
# ret = app
# find = True
# break
# if not find:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "")
# else:
# ret = app_list
# myLogger.info_logger("app list result ok")
# return ret
# def get_apps_from_compose():
# myLogger.info_logger("Search all of apps ...")
# cmd = "docker compose ls -a --format json"
# output = shell_execute.execute_command_output_all(cmd)
# output_list = json.loads(output["result"])
# myLogger.info_logger(len(output_list))
# ip = "localhost"
# try:
# ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
# ip = ip_result["result"].rstrip('\n')
# except Exception:
# ip = "127.0.0.1"
# app_list = []
# for app_info in output_list:
# volume = app_info["ConfigFiles"]
# app_path = volume.rsplit('/', 1)[0]
# customer_name = volume.split('/')[-2]
# app_id = ""
# app_name = ""
# trade_mark = ""
# port = 0
# url = ""
# admin_url = ""
# image_url = ""
# user_name = ""
# password = ""
# official_app = False
# app_version = ""
# create_time = ""
# volume_data = ""
# config_path = app_path
# app_https = False
# app_replace_url = False
# default_domain = ""
# admin_path = ""
# admin_domain_url = ""
# if customer_name in ['w9appmanage', 'w9nginxproxymanager', 'w9redis', 'w9kopia',
# 'w9portainer'] or app_path == '/data/apps/w9services/' + customer_name:
# continue
# var_path = app_path + "/variables.json"
# official_app = check_if_official_app(var_path)
# status_show = app_info["Status"]
# status = app_info["Status"].split("(")[0]
# if status == "running" or status == "exited" or status == "restarting":
# if "exited" in status_show and "running" in status_show:
# if status == "exited":
# cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited"
# result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
# container = result.split("#Exited")[0]
# if container != customer_name:
# status = "running"
# if "restarting" in status_show:
# about_time = get_createtime(official_app, app_path, customer_name)
# if "seconds" in about_time:
# status = "restarting"
# else:
# status = "failed"
# elif status == "created":
# status = "failed"
# else:
# continue
# if official_app:
# app_name = docker.read_var(var_path, 'name')
# app_id = app_name + "_" + customer_name # app_id
# # get trade_mark
# trade_mark = docker.read_var(var_path, 'trademark')
# image_url = get_Image_url(app_name)
# # get env info
# path = app_path + "/.env"
# env_map = docker.get_map(path)
# try:
# myLogger.info_logger("get domain for APP_URL")
# domain = env_map.get("APP_URL")
# if "appname.example.com" in domain or ip in domain:
# default_domain = ""
# else:
# default_domain = domain
# except Exception:
# myLogger.info_logger("domain exception")
# try:
# app_version = env_map.get("APP_VERSION")
# volume_data = "/data/apps/" + customer_name + "/data"
# user_name = env_map.get("APP_USER", "")
# password = env_map.get("POWER_PASSWORD", "")
# admin_path = env_map.get("APP_ADMIN_PATH")
# if admin_path:
# myLogger.info_logger(admin_path)
# admin_path = admin_path.replace("\"", "")
# else:
# admin_path = ""
# if default_domain != "" and admin_path != "":
# admin_domain_url = "http://" + default_domain + admin_path
# except Exception:
# myLogger.info_logger("APP_USER POWER_PASSWORD exception")
# try:
# replace = env_map.get("APP_URL_REPLACE", "false")
# myLogger.info_logger("replace=" + replace)
# if replace == "true":
# app_replace_url = True
# https = env_map.get("APP_HTTPS_ACCESS", "false")
# if https == "true":
# app_https = True
# except Exception:
# myLogger.info_logger("APP_HTTPS_ACCESS exception")
# try:
# http_port = env_map.get("APP_HTTP_PORT", "0")
# if http_port:
# port = int(http_port)
# except Exception:
# pass
# if port != 0:
# try:
# if app_https:
# easy_url = "https://" + ip + ":" + str(port)
# else:
# easy_url = "http://" + ip + ":" + str(port)
# url = easy_url
# admin_url = get_admin_url(customer_name, url)
# except Exception:
# pass
# else:
# try:
# db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0]
# port = int(db_port)
# except Exception:
# pass
# else:
# app_name = customer_name
# app_id = customer_name + "_" + customer_name
# create_time = get_createtime(official_app, app_path, customer_name)
# if status in ['running', 'exited']:
# config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url,
# admin_domain_url=admin_domain_url,
# admin_path=admin_path, admin_username=user_name, admin_password=password,
# default_domain=default_domain)
# else:
# config = None
# if status == "failed":
# status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error",
# Detail="unknown error")
# else:
# status_reason = None
# app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
# app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
# status=status, status_reason=status_reason, official_app=official_app, image_url=image_url,
# app_https=app_https, app_replace_url=app_replace_url, config=config)
# app_list.append(app.dict())
# return app_list
# # 安装
# def install_app(app_name, customer_name, app_version):
# myLogger.info_logger("Install app ...")
# ret = {}
# ret['ResponseData'] = {}
# app_id = app_name + "_" + customer_name
# ret['ResponseData']['app_id'] = app_id
# code, message = check_app(app_name, customer_name, app_version)
# if code == None:
# q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id)
# else:
# ret['Error'] = get_error_info(code, message, "")
# return ret
# def start_app(app_id):
# info, flag = app_exits_in_docker(app_id)
# if flag:
# app_path = info.split()[-1].rsplit('/', 1)[0]
# cmd = "docker compose -f " + app_path + "/docker-compose.yml start"
# shell_execute.execute_command_output_all(cmd)
# else:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
# def stop_app(app_id):
# info, flag = app_exits_in_docker(app_id)
# if flag:
# app_path = info.split()[-1].rsplit('/', 1)[0]
# cmd = "docker compose -f " + app_path + "/docker-compose.yml stop"
# shell_execute.execute_command_output_all(cmd)
# else:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
# def restart_app(app_id):
# code, message = docker.check_app_id(app_id)
# if code == None:
# info, flag = app_exits_in_docker(app_id)
# if flag:
# app_path = info.split()[-1].rsplit('/', 1)[0]
# cmd = "docker compose -f " + app_path + "/docker-compose.yml restart"
# shell_execute.execute_command_output_all(cmd)
# else:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
# else:
# raise CommandException(code, message, "")
# def uninstall_app(app_id):
# app_name = app_id.split('_')[0]
# customer_name = app_id.split('_')[1]
# app_path = ""
# info, code_exist = app_exits_in_docker(app_id)
# if code_exist:
# app_path = info.split()[-1].rsplit('/', 1)[0]
# cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
# lib_path = '/data/library/apps/' + app_name
# if app_path != lib_path:
# cmd = cmd + " && sudo rm -rf " + app_path
# shell_execute.execute_command_output_all(cmd)
# else:
# if check_app_rq(app_id):
# delete_app_failedjob(app_id)
# else:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
# # Force to delete docker compose
# try:
# cmd = " sudo rm -rf /data/apps/" + customer_name
# shell_execute.execute_command_output_all(cmd)
# except CommandException as ce:
# myLogger.info_logger("Delete app compose exception")
# # Delete proxy config when uninstall app
# app_proxy_delete(app_id)
# # 安装失败后的处理
# def delete_app(app_id):
# try:
# app_name = app_id.split('_')[0]
# customer_name = app_id.split('_')[1]
# app_path = ""
# info, code_exist = app_exits_in_docker(app_id)
# if code_exist:
# app_path = info.split()[-1].rsplit('/', 1)[0]
# cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
# lib_path = '/data/library/apps/' + app_name
# if app_path != lib_path:
# cmd = cmd + " && sudo rm -rf " + app_path
# try:
# myLogger.info_logger("Intall fail, down app and delete files")
# shell_execute.execute_command_output_all(cmd)
# except Exception:
# myLogger.info_logger("Delete app compose exception")
# # 强制删除失败又无法通过docker compose down 删除的容器
# try:
# myLogger.info_logger("IF delete fail, force to delete containers")
# force_cmd = "docker rm -f $(docker ps -f name=^" + customer_name + " -aq)"
# shell_execute.execute_command_output_all(force_cmd)
# except Exception:
# myLogger.info_logger("force delete app compose exception")
# else:
# if check_app_rq(app_id):
# delete_app_failedjob(app_id)
# else:
# raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
# cmd = " sudo rm -rf /data/apps/" + customer_name
# shell_execute.execute_command_output_all(cmd)
# except CommandException as ce:
# myLogger.info_logger("Delete app compose exception")
# #安装准备
# def prepare_app(app_name, customer_name):
# library_path = "/data/library/apps/" + app_name
# install_path = "/data/apps/" + customer_name
# shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
# def install_app_delay(app_name, customer_name, app_version):
# myLogger.info_logger("-------RQ install start --------")
# job_id = app_name + "_" + customer_name
# try:
# # 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的应该是app_name,而不是customer_name
# resource_flag = docker.check_vm_resource(app_name)
# if resource_flag == True:
# myLogger.info_logger("job check ok, continue to install app")
# env_path = "/data/apps/" + customer_name + "/.env"
# # prepare_app(app_name, customer_name)
# docker.check_app_compose(app_name, customer_name)
# myLogger.info_logger("start JobID=" + job_id)
# docker.modify_env(env_path, 'APP_NAME', customer_name)
# docker.modify_env(env_path, "APP_VERSION", app_version)
# docker.check_app_url(customer_name)
# cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d"
# output = shell_execute.execute_command_output_all(cmd)
# myLogger.info_logger("-------Install result--------")
# myLogger.info_logger(output["code"])
# myLogger.info_logger(output["result"])
# try:
# shell_execute.execute_command_output_all("bash /data/apps/" + customer_name + "/src/after_up.sh")
# except Exception as e:
# myLogger.info_logger(str(e))
# else:
# error_info = "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)"
# myLogger.info_logger(error_info)
# raise Exception(error_info)
# except CommandException as ce:
# myLogger.info_logger(customer_name + " install failed(docker)!")
# delete_app(job_id)
# error_info = "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail
# myLogger.info_logger(error_info)
# raise Exception(error_info)
# except Exception as e:
# myLogger.info_logger(customer_name + " install failed(system)!")
# delete_app(job_id)
# error_info = "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str(
# e)
# myLogger.info_logger(error_info)
# raise Exception(error_info)
# def get_createtime(official_app, app_path, customer_name):
# data_time = ""
# try:
# if official_app:
# cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
# result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
# data_time = result
# else:
# cmd_all = "cd " + app_path + " && docker compose ps -a --format json"
# output = shell_execute.execute_command_output_all(cmd_all)
# container_name = json.loads(output["result"])[0]["Name"]
# cmd = "docker ps -f name=" + container_name + " --format {{.RunningFor}} | head -n 1"
# result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
# data_time = result
# except Exception as e:
# myLogger.info_logger(str(e))
# myLogger.info_logger("get_createtime get success" + data_time)
# return data_time
# def check_if_official_app(var_path):
# if docker.check_directory(var_path):
# if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var(
# var_path, 'requirements') != "":
# requirements = docker.read_var(var_path, 'requirements')
# try:
# cpu = requirements['cpu']
# mem = requirements['memory']
# disk = requirements['disk']
# return True
# except KeyError:
# return False
# else:
# return False
# # 应用是否已经安装
# def check_app_docker(app_id):
# customer_name = app_id.split('_')[1]
# app_name = app_id.split('_')[0]
# flag = False
# cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
# try:
# shell_execute.execute_command_output_all(cmd)
# flag = True
# myLogger.info_logger("APP in docker")
# except CommandException as ce:
# myLogger.info_logger("APP not in docker")
# return flag
# def check_app_rq(app_id):
# myLogger.info_logger("check_app_rq")
# started = StartedJobRegistry(queue=q)
# failed = FailedJobRegistry(queue=q)
# run_job_ids = started.get_job_ids()
# failed_job_ids = failed.get_job_ids()
# queue_job_ids = q.job_ids
# myLogger.info_logger(queue_job_ids)
# myLogger.info_logger(run_job_ids)
# myLogger.info_logger(failed_job_ids)
# if queue_job_ids and app_id in queue_job_ids:
# myLogger.info_logger("App in RQ")
# return True
# if failed_job_ids and app_id in failed_job_ids:
# myLogger.info_logger("App in RQ")
# return True
# if run_job_ids and app_id in run_job_ids:
# myLogger.info_logger("App in RQ")
# return True
# myLogger.info_logger("App not in RQ")
# return False
# def get_apps_from_queue():
# myLogger.info_logger("get queque apps...")
# # 获取 StartedJobRegistry 实例
# started = StartedJobRegistry(queue=q)
# finish = FinishedJobRegistry(queue=q)
# deferred = DeferredJobRegistry(queue=q)
# failed = FailedJobRegistry(queue=q)
# scheduled = ScheduledJobRegistry(queue=q)
# cancel = CanceledJobRegistry(queue=q)
# # 获取正在执行的作业 ID 列表
# run_job_ids = started.get_job_ids()
# finish_job_ids = finish.get_job_ids()
# wait_job_ids = deferred.get_job_ids()
# failed_jobs = failed.get_job_ids()
# scheduled_jobs = scheduled.get_job_ids()
# cancel_jobs = cancel.get_job_ids()
# myLogger.info_logger(q.jobs)
# myLogger.info_logger(run_job_ids)
# myLogger.info_logger(failed_jobs)
# myLogger.info_logger(cancel_jobs)
# myLogger.info_logger(wait_job_ids)
# myLogger.info_logger(finish_job_ids)
# myLogger.info_logger(scheduled_jobs)
# installing_list = []
# for job_id in run_job_ids:
# app = get_rq_app(job_id, 'installing', "", "", "")
# installing_list.append(app)
# for job in q.jobs:
# app = get_rq_app(job.id, 'installing', "", "", "")
# installing_list.append(app)
# for job_id in failed_jobs:
# job = q.fetch_job(job_id)
# exc_info = job.exc_info
# code = exc_info.split('##websoft9##')[1]
# message = exc_info.split('##websoft9##')[2]
# detail = exc_info.split('##websoft9##')[3]
# app = get_rq_app(job_id, 'failed', code, message, detail)
# installing_list.append(app)
# return installing_list
# #从rq获取app信息
# def get_rq_app(id, status, code, message, detail):
# app_name = id.split('_')[0]
# customer_name = id.split('_')[1]
# # 当app还在RQ时可能文件夹还没创建无法获取trade_mark
# trade_mark = ""
# app_version = ""
# create_time = ""
# volume_data = ""
# config_path = ""
# image_url = get_Image_url(app_name)
# config = None
# if status == "installing":
# status_reason = None
# else:
# status_reason = StatusReason(Code=code, Message=message, Detail=detail)
# app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
# app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
# status=status, status_reason=status_reason, official_app=True, image_url=image_url,
# app_https=False, app_replace_url=False, config=config)
# return app.dict()
# def get_admin_url(customer_name, url):
# admin_url = ""
# path = "/data/apps/" + customer_name + "/.env"
# try:
# admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0]
# admin_path = admin_path.replace("\"", "")
# admin_url = url + admin_path
# except IndexError:
# pass
# return admin_url
# def get_container_port(container_name):
# port = "80"
# cmd = "docker port " + container_name + " |grep ::"
# result = shell_execute.execute_command_output_all(cmd)["result"]
# myLogger.info_logger(result)
# port = result.split('/')[0]
# myLogger.info_logger(port)
# return port

View File

@ -1,484 +0,0 @@
import time
def app_domain_list(app_id):
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok[app_domain_list]")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
domains = get_all_domains(app_id)
myLogger.info_logger(domains)
ret = {}
ret['domains'] = domains
default_domain = ""
if domains != None and len(domains) > 0:
customer_name = app_id.split('_')[1]
app_url = shell_execute.execute_command_output_all(
"cat /data/apps/" + customer_name + "/.env")["result"]
if "APP_URL" in app_url:
url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env |grep APP_URL=")[
"result"].rstrip('\n')
default_domain = url.split('=')[1]
ret['default_domain'] = default_domain
myLogger.info_logger(ret)
return ret
def get_all_domains(app_id):
customer_name = app_id.split('_')[1]
domains = []
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
for proxy in response.json():
portainer_name = proxy["forward_host"]
if customer_name == portainer_name:
for domain in proxy["domain_names"]:
domains.append(domain)
return domains
def app_proxy_delete(app_id):
customer_name = app_id.split('_')[1]
proxy_host = None
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
for proxy in response.json():
portainer_name = proxy["forward_host"]
if customer_name == portainer_name:
proxy_id = proxy["id"]
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.delete(url, headers=headers)
def app_domain_delete(app_id, domain):
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok[app_domain_delete]")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
if domain is None or domain == "undefined":
raise CommandException(
const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
old_all_domains = get_all_domains(app_id)
if domain not in old_all_domains:
myLogger.info_logger("delete domain is not binded")
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "")
myLogger.info_logger("Start to delete " + domain)
proxy = get_proxy_domain(app_id, domain)
if proxy != None:
myLogger.info_logger(proxy)
myLogger.info_logger("before update")
domains_old = proxy["domain_names"]
myLogger.info_logger(domains_old)
domains_old.remove(domain)
myLogger.info_logger("after update")
myLogger.info_logger(domains_old)
if len(domains_old) == 0:
proxy_id = proxy["id"]
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.delete(url, headers=headers)
try:
if response.json().get("error"):
raise CommandException(
const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
except Exception:
myLogger.info_logger(response.json())
set_domain("", app_id)
else:
proxy_id = proxy["id"]
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": domains_old,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
response = requests.put(
url, data=json.dumps(data), headers=headers)
if response.json().get("error"):
raise CommandException(
const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
domain_set = app_domain_list(app_id)
default_domain = domain_set['default_domain']
# 如果被删除的域名是默认域名,删除后去剩下域名的第一个
if default_domain == domain:
set_domain(domains_old[0], app_id)
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "")
def app_domain_update(app_id, domain_old, domain_new):
myLogger.info_logger("app_domain_update")
domain_list = []
domain_list.append(domain_old)
domain_list.append(domain_new)
check_domains(domain_list)
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
proxy = get_proxy_domain(app_id, domain_old)
if proxy != None:
domains_old = proxy["domain_names"]
index = domains_old.index(domain_old)
domains_old[index] = domain_new
proxy_id = proxy["id"]
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": domains_old,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
response = requests.put(url, data=json.dumps(data), headers=headers)
if response.json().get("error"):
raise CommandException(
const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
domain_set = app_domain_list(app_id)
default_domain = domain_set['default_domain']
myLogger.info_logger("default_domain=" +
default_domain + ",domain_old=" + domain_old)
# 如果被修改的域名是默认域名,修改后也设置为默认域名
if default_domain == domain_old:
set_domain(domain_new, app_id)
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "")
def app_domain_add(app_id, domain):
temp_domains = []
temp_domains.append(domain)
check_domains(temp_domains)
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
old_domains = get_all_domains(app_id)
if domain in old_domains:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "")
proxy = get_proxy(app_id)
if proxy != None:
domains_old = proxy["domain_names"]
domain_list = domains_old
domain_list.append(domain)
proxy_id = proxy["id"]
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": domain_list,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
response = requests.put(url, data=json.dumps(data), headers=headers)
if response.json().get("error"):
raise CommandException(
const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
else:
# 追加
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
port = get_container_port(app_id.split('_')[1])
host = app_id.split('_')[1]
data = {
"domain_names": temp_domains,
"forward_scheme": "http",
"forward_host": host,
"forward_port": port,
"access_list_id": "0",
"certificate_id": 0,
"meta": {
"letsencrypt_agree": False,
"dns_challenge": False
},
"advanced_config": "",
"locations": [],
"block_exploits": False,
"caching_enabled": False,
"allow_websocket_upgrade": False,
"http2_support": False,
"hsts_enabled": False,
"hsts_subdomains": False,
"ssl_forced": False
}
response = requests.post(url, data=json.dumps(data), headers=headers)
if response.json().get("error"):
raise CommandException(
const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
set_domain(domain, app_id)
return domain
def check_domains(domains):
myLogger.info_logger(domains)
if domains is None or len(domains) == 0:
raise CommandException(
const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
else:
for domain in domains:
if is_valid_domain(domain):
if check_real_domain(domain) == False:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_Format, "Domains format error", "")
def is_valid_domain(domain):
if domain.startswith("http"):
return False
return True
def check_real_domain(domain):
domain_real = True
try:
cmd = "ping -c 1 " + domain + \
" | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq"
domain_ip = shell_execute.execute_command_output_all(cmd)[
"result"].rstrip('\n')
ip_result = shell_execute.execute_command_output_all(
"cat /data/apps/w9services/w9appmanage/public_ip")
ip_save = ip_result["result"].rstrip('\n')
if domain_ip == ip_save:
myLogger.info_logger("Domain check ok!")
else:
domain_real = False
except CommandException as ce:
domain_real = False
return domain_real
def get_proxy_domain(app_id, domain):
customer_name = app_id.split('_')[1]
proxy_host = None
token = get_token()
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
headers = {
'Authorization': token,
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers)
myLogger.info_logger(response.json())
for proxy in response.json():
portainer_name = proxy["forward_host"]
domain_list = proxy["domain_names"]
if customer_name == portainer_name:
myLogger.info_logger("-------------------")
if domain in domain_list:
myLogger.info_logger("find the domain proxy")
proxy_host = proxy
break
return proxy_host
def app_domain_set(domain, app_id):
temp_domains = []
temp_domains.append(domain)
check_domains(temp_domains)
code, message = docker.check_app_id(app_id)
if code == None:
info, flag = app_exits_in_docker(app_id)
if flag:
myLogger.info_logger("Check app_id ok")
else:
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
else:
raise CommandException(code, message, "")
set_domain(domain, app_id)
def set_domain(domain, app_id):
myLogger.info_logger("set_domain start")
old_domains = get_all_domains(app_id)
if domain != "":
if domain not in old_domains:
message = domain + " is not in use"
raise CommandException(
const.ERROR_CLIENT_PARAM_NOTEXIST, message, "")
customer_name = app_id.split('_')[1]
app_url = shell_execute.execute_command_output_all(
"cat /data/apps/" + customer_name + "/.env")["result"]
if "APP_URL" in app_url:
myLogger.info_logger("APP_URL is exist")
if domain == "":
ip_result = shell_execute.execute_command_output_all(
"cat /data/apps/w9services/w9appmanage/public_ip")
domain = ip_result["result"].rstrip('\n')
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + \
"/g' /data/apps/" + customer_name + "/.env"
shell_execute.execute_command_output_all(cmd)
if "APP_URL_REPLACE=true" in app_url:
myLogger.info_logger("need up")
shell_execute.execute_command_output_all(
"cd /data/apps/" + customer_name + " && docker compose up -d")
else:
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + \
"/g' /data/apps/" + customer_name + "/.env"
shell_execute.execute_command_output_all(cmd)
if "APP_URL_REPLACE=true" in app_url:
myLogger.info_logger("need up")
shell_execute.execute_command_output_all(
"cd /data/apps/" + customer_name + " && docker compose up -d")
else:
myLogger.info_logger("APP_URL is not exist")
if domain == "":
ip_result = shell_execute.execute_command_output_all(
"cat /data/apps/w9services/w9appmanage/public_ip")
domain = ip_result["result"].rstrip('\n')
cmd = "sed -i '/APP_NETWORK/a APP_URL=" + domain + \
"' /data/apps/" + customer_name + "/.env"
shell_execute.execute_command_output_all(cmd)
myLogger.info_logger("set_domain success")

View File

@ -1,177 +0,0 @@
def get_release_url():
preview = db.AppSearchPreview().get("preview")
myLogger.info_logger(preview)
if preview == "false":
return const.ARTIFACT_URL
else:
return const.ARTIFACT_URL_DEV
def appstore_update():
myLogger.info_logger("appstore update start...")
# 当点击appstore升级时是无条件升级不需要做版本的判定
release_url = get_release_url()
download_url = release_url + "/plugin/appstore/appstore-latest.zip"
cmd = "cd /opt && rm -rf /opt/appstore* && wget -q " + download_url + " && unzip -q appstore-latest.zip "
shell_execute.execute_command_output_all(cmd)
shell_execute.execute_command_output_all("rm -rf /usr/share/cockpit/appstore && cp -r /opt/appstore /usr/share/cockpit")
shell_execute.execute_command_output_all("rm -rf /opt/appstore*")
library_url = release_url + "/plugin/library/library-latest.zip"
library_cmd = "cd /opt && rm -rf /opt/library* && wget -q " + library_url + " && unzip -q library-latest.zip "
shell_execute.execute_command_output_all(library_cmd)
shell_execute.execute_command_output_all("rm -rf /data/library && cp -r /opt/library /data")
shell_execute.execute_command_output_all("rm -rf /opt/library*")
myLogger.info_logger("auto update success...")
def AppStoreUpdate():
core_support = AppStoreCore()
release_url = get_release_url()
if core_support == "-1":
raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE, "You must upgrade websoft9 core", "You must upgrade websoft9 core")
elif core_support == "1":
raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT, "core not support,can not upgrade", "core not support,can not upgrade")
local_path = '/usr/share/cockpit/appstore/appstore.json'
local_version = "0"
try:
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
local_version = json.loads(op)['Version']
except:
local_version = "0.0.0"
version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
latest = shell_execute.execute_command_output_all(version_cmd)['result']
version = json.loads(latest)['Version']
if local_version < version:
appstore_update()
else:
myLogger.info_logger("You click update appstore, but not need to update")
def AppPreviewUpdate(preview):
myLogger.info_logger("AppPreviewUpdate")
if preview == "true" or preview == "True":
db.AppUpdatePreview(preview)
return "true"
elif preview == "false" or preview == "False":
db.AppUpdatePreview(preview)
return "false"
elif preview == None or preview == "" or preview == "undefine":
return db.AppSearchPreview().get("preview")
else:
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "preview is true,false,blank", "preview is true,false,blank")
#检查内核VERSION 是否支持Appstore的更新
def AppStoreCore():
release_url = get_release_url()
version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
latest = shell_execute.execute_command_output_all(version_cmd)['result']
most_version = json.loads(latest)['Requires at most']
least_version = json.loads(latest)['Requires at least']
now = shell_execute.execute_command_output_all("cat /data/apps/websoft9/version.json")['result']
now_version = json.loads(now)['VERSION']
version_str = "now_version:" + now_version + " least_version:" + least_version + " most_version:" + most_version
myLogger.info_logger(version_str)
if now_version >= least_version and now_version <= most_version:
return "0"
elif now_version < least_version:
return "-1"
elif now_version > most_version:
return "1"
return "0"
# 获取 核心更新日志
def get_update_list(url: str=None):
local_path = '/data/apps/websoft9/version.json'
artifact_url = const.ARTIFACT_URL
if url:
artifact_url = url
try:
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
local_version = json.loads(op)['VERSION']
except:
local_version = "0.0.0"
version_cmd = f"wget -O version.json {artifact_url}/version.json && cat version.json"
latest = shell_execute.execute_command_output_all(version_cmd)['result']
version = json.loads(latest)['VERSION']
ret = {}
ret['local_version'] = local_version
ret['target_version'] = version
content = []
date = ""
if compared_version(local_version, version) == -1:
ret['update'] = True
cmd = f"wget -O CHANGELOG.md {artifact_url}/CHANGELOG.md && head -n 20 CHANGELOG.md"
change_log_contents = shell_execute.execute_command_output(cmd)
change_log = change_log_contents.split('## ')[1].split('\n')
date = change_log[0].split()[-1]
for change in change_log[1:]:
if change != '':
content.append(change)
else:
ret['update'] = False
ret['date'] = date
ret['content'] = content
return ret
# 获取 appstore 更新日志
def get_appstore_update_list():
release_url = get_release_url()
local_path = '/usr/share/cockpit/appstore/appstore.json'
local_version = "0"
try:
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
local_version = json.loads(op)['Version']
except:
local_version = "0.0.0"
version_cmd = "wget -O appstore.json -N " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
latest = shell_execute.execute_command_output_all(version_cmd)['result']
version = json.loads(latest)['Version']
ret = {}
ret['local_version'] = local_version
ret['target_version'] = version
content = []
date = ""
core_compare = ""
if compared_version(local_version, version) == -1:
ret['update'] = True
cmd = "wget -O CHANGELOG.md " + release_url + "/plugin/appstore/CHANGELOG.md && cat CHANGELOG.md"
change_log_contents = shell_execute.execute_command_output_all(cmd)['result']
change_log = change_log_contents.split('## ')[1].split('\n')
date = change_log[0].split()[-1]
for change in change_log[1:]:
if change != '':
content.append(change)
core_compare = AppStoreCore()
else:
ret['update'] = False
ret['date'] = date
ret['content'] = content
ret['core_compare'] = core_compare
return ret
def compared_version(ver1, ver2):
list1 = str(ver1).split(".")
list2 = str(ver2).split(".")
# 循环次数为短的列表的len
for i in range(len(list1)) if len(list1) < len(list2) else range(len(list2)):
if int(list1[i]) == int(list2[i]):
pass
elif int(list1[i]) < int(list2[i]):
return -1
else:
return 1
# 循环结束,哪个列表长哪个版本号高
if len(list1) == len(list2):
return 0
elif len(list1) < len(list2):
return -1
else:
return 1

View File

@ -1,45 +0,0 @@
#!/usr/bin/python3
import subprocess
from api.utils.log import myLogger
from api.exception.command_exception import CommandException
from api.utils import const
# This fuction is for running shell commands on container
# cmd_str e.g: "ls -a"
# return string limit: 4000 chars? to do
def execute_command_output(cmd_str):
print(cmd_str)
out_str = subprocess.getoutput(cmd_str)
print(out_str)
return out_str
# This fuction is for running shell commands on host machine
# cmd_str e.g: "ls -a"
# return string limit: 4000 chars
def execute_command_output_all(cmd_str):
myLogger.info_logger("Start to execute cmd: " + cmd_str)
process = subprocess.run(f'nsenter -m -u -i -n -p -t 1 sh -c "{cmd_str}"', capture_output=True, bufsize=65536, check=False, text=True, shell=True)
if process.returncode == 0 and 'Fail' not in process.stdout and 'fail' not in process.stdout and 'Error' not in process.stdout and 'error' not in process.stdout:
return {"code": "0", "result": process.stdout}
else:
myLogger.info_logger("Failed to execute cmd, output failed result")
myLogger.info_logger(process)
raise CommandException(const.ERROR_SERVER_COMMAND, "Docker returns the original error", process.stderr)
# This fuction is convert container commands to host machine commands
def convert_command(cmd_str):
convert_cmd = ""
if cmd_str == "":
convert_cmd=cmd_str
else:
convert_cmd="nsenter -m -u -i -n -p -t 1 sh -c " + "'"+cmd_str+"'"
return convert_cmd

View File

@ -1,7 +0,0 @@
# from fastapi import FastAPI
# from fastapi.routing import APIRouter
# from api.v1 import main as v1_router
# app = FastAPI()
# app.include_router(v1_router.router, prefix="/api/v1")

View File

@ -0,0 +1,22 @@
from fastapi import APIRouter, HTTPException, Query
from typing import List, Optional
from pydantic import BaseModel
from src.schemas.appInstall import appInstallPayload
router = APIRouter()
@router.get("/apps/")
def get_apps():
return {"apps": "apps"}
@router.post("/apps/install",summary="Install App",description="Install an app on an endpoint",responses={400: {"description": "Invalid EndpointId"}, 500: {"description": "Internal Server Error"}})
def apps_install(app_install_payload: appInstallPayload, endpointId: str = Query(..., description="Endpoint ID to install app on")):
try:
if endpointId < 0:
raise HTTPException(status_code=400, detail="Invalid EndpointId")
app_name = app_install_payload.app_name
return app_name
except Exception as e:
raise HTTPException(status_code=500, detail="Internal Server Error")

View File

@ -0,0 +1,12 @@
from fastapi import APIRouter, Query
from typing import List, Optional
router = APIRouter()
@router.get("/settings",summary="Get settings",description="Get settings")
def get_settings():
return {"settings": "settings"}
@router.put("/settings")
def update_settings():
return {"settings": "settings"}

View File

@ -1,13 +1,3 @@
"""
FileName: config.py
Author: Jing.zhao
Created: 2023-08-31
Description: This script demonstrates how to managing configuration using configparser.
Modified by:
Modified Date:
Modification:
"""
import configparser

View File

@ -1,13 +1,3 @@
"""
FileName: logger.py
Author: Jing.zhao
Created: 2023-08-30
Description: This script defines a custom logger that can create and manage two types of logs: 'access' and 'error' for the application.
Modified by:
Modified Date:
Modification:
"""
import os
import logging

View File

@ -0,0 +1,3 @@
class GiteaAPI:
def __init__(self) -> None:
pass

View File

@ -1,7 +1,7 @@
import requests
from typing import List, Union
from app.core.config import ConfigManager
from src.core.config import ConfigManager
class NginxProxyManagerAPI:
"""

View File

@ -0,0 +1,3 @@
class PortainerAPI:
def __init__(self) -> None:
pass

12
appmanage_new/src/main.py Normal file
View File

@ -0,0 +1,12 @@
from fastapi import FastAPI
from src.api.v1.routers import app as api_app
from src.api.v1.routers import settings as api_settings
app = FastAPI(
title="FastAPI Template",
description="FastAPI Template 123",
version="0.0.1"
)
app.include_router(api_app.router,tags=["apps"])
app.include_router(api_settings.router,tags=["settings"])

View File

@ -0,0 +1,24 @@
from typing import Optional, List
from pydantic import BaseModel, Field, validator
class Edition(BaseModel):
dist: str = Field("community", description="The edition of the app",examples=["community","enterprise"])
version: str = Field(..., description="The version of the app",examples=["1.0.0","latest"])
class appInstall(BaseModel):
app_name: str = Field(..., description="The name of the app",examples=["wordpress","mysql"])
edition: Edition = Field(..., description="The edition of the app", example={"dist":"community","version":"1.0.0"})
app_id: str = Field(..., pattern="^[a-z][a-z0-9]{1,19}$", description="The custom identifier for the application. It must be a combination of 2 to 20 lowercase letters and numbers, and cannot start with a number.", example="wordpress")
domain_names: Optional[List[str]] = Field(None, description="The domain names for the app, not exceeding 2, one wildcard domain and one custom domain.", example=["wordpress.example.com","mysql.example.com"])
default_domain: Optional[str] = Field(None, description="The default domain for the app, sourced from domain_names. If not set, the custom domain will be used automatically.", example="wordpress.example.com")
@validator('domain_names', check_fields=False)
def validate_domain_names(cls, v):
if v and len(v) > 2:
raise ValueError('domain_names should not exceed 2.')
return v
class Config:
title = "App Installation"
description = "App Installation Payload"

View File

@ -0,0 +1,23 @@
from fastapi import logger
from src.external.portainer_api import PortainerAPI
class PortainerManager:
def __init__(self, portainer_url, portainer_username, portainer_password):
"""
Init Portainer Manager
Args:
portainer_url (str): The url of the portainer
portainer_username (str): The username of the portainer
portainer_password (str): The password of the portainer
"""
self.portainer_url = portainer_url
self.portainer_username = portainer_username
self.portainer_password = portainer_password
try:
self.portainer = PortainerAPI(self.portainer_url)
self._init_portainer_token()
except Exception as e:
logger.error(f"Init Portainer API Error:{e}")
raise e

View File

@ -2,11 +2,11 @@
import time
import keyring
import json
from app.core.logger import logger
from app.external.nginx_proxy_manager_api import NginxProxyManagerAPI
from src.core.logger import logger
from src.external.nginx_proxy_manager_api import NginxProxyManagerAPI
class DomainManager:
class ProxyManager:
def __init__(self, app_name):
"""
Init Domain Manager
@ -70,25 +70,6 @@ class DomainManager:
logger.error(f"Set Nginx Proxy Manager's Token To Keyring Error:{e}")
return
def is_valid_domain(self, domain_names: list[str]):
# 验证domain_names这个列表中的域名格式是否合法如果不合法返回以列表格式返回不合法的域名如果合法继续验证其是否解析到本机如果没有解析到本机返回以列表格式返回没有解析到本机的域名
# 验证域名格式是否合法
invalid_domain_names = []
for domain_name in domain_names:
if not self.nginx.is_valid_domain(domain_name):
invalid_domain_names.append(domain_name)
if len(invalid_domain_names) > 0:
return False, invalid_domain_names
# 验证域名是否解析到本机
not_resolved_domain_names = []
for domain_name in domain_names:
if not self.nginx.is_resolved_domain(domain_name):
not_resolved_domain_names.append(domain_name)
if len(not_resolved_domain_names) > 0:
return False, not_resolved_domain_names
return True, None
def create_proxy_for_app(self, domain_names:list[str],forward_port:int,advanced_config:str="",forward_scheme:str="http"):
try:
self.nginx.create_proxy_host(domain_names=domain_names,forward_scheme=forward_scheme,forward_port=forward_port,advanced_config=advanced_config)

View File

@ -1,6 +1,6 @@
import requests
from app.core.config import ConfigManager
from src.core.config import ConfigManager
class PublicIPGetter:
"""