mirror of
https://github.com/Websoft9/websoft9.git
synced 2025-02-02 17:08:38 +08:00
add appmanage_new
This commit is contained in:
parent
97efc1301a
commit
dd8721e382
0
appmanage_new/.env
Normal file
0
appmanage_new/.env
Normal file
0
appmanage_new/README.md
Normal file
0
appmanage_new/README.md
Normal file
0
appmanage_new/app/api/v1/__init__.py
Normal file
0
appmanage_new/app/api/v1/__init__.py
Normal file
16
appmanage_new/app/api/v1/api.py
Normal file
16
appmanage_new/app/api/v1/api.py
Normal file
@ -0,0 +1,16 @@
|
||||
from fastapi import APIRouter
|
||||
from typing import List
|
||||
from schemas.user import UserCreate
|
||||
from models.user import User as UserModel
|
||||
from services.user_service import UserService
|
||||
from db.database import SessionLocal
|
||||
|
||||
router = APIRouter()
|
||||
user_service = UserService()
|
||||
|
||||
@router.get("/users/{user_type}", response_model=List[UserModel])
|
||||
async def get_users(user_type: str):
|
||||
users = user_service.get_users_by_type(user_type)
|
||||
if not users:
|
||||
raise HTTPException(status_code=404, detail="Users not found")
|
||||
return users
|
0
appmanage_new/app/core/api_key.py
Normal file
0
appmanage_new/app/core/api_key.py
Normal file
34
appmanage_new/app/core/code.py
Normal file
34
appmanage_new/app/core/code.py
Normal file
@ -0,0 +1,34 @@
|
||||
# 所有常量统一定义区
|
||||
|
||||
# 错误代码定义
|
||||
ERROR_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error"
|
||||
ERROR_CLIENT_PARAM_Format = "Client.Parameter.Format.Error"
|
||||
ERROR_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error"
|
||||
ERROR_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error"
|
||||
ERROR_CONFIG_NGINX = "Nginx.Configure.Error"
|
||||
ERROR_SERVER_COMMAND = "Server.Container.Error"
|
||||
ERROR_SERVER_SYSTEM = "Server.SystemError"
|
||||
ERROR_SERVER_RESOURCE = "Server.ResourceError"
|
||||
ERROR_SERVER_CONFIG_MISSING = "Server.Config.NotFound"
|
||||
|
||||
# 错误信息定义
|
||||
ERRORMESSAGE_CLIENT_PARAM_BLANK = "Client.Parameter.Blank.Error"
|
||||
ERRORMESSAGE_CLIENT_PARAM_Format = "Client.Parameter.Format.Error"
|
||||
ERRORMESSAGE_CLIENT_PARAM_NOTEXIST = "Client.Parameter.Value.NotExist.Error"
|
||||
ERRORMESSAGE_CLIENT_PARAM_REPEAT = "Client.Parameter.Value.Repeat.Error"
|
||||
ERRORMESSAGE_SERVER_COMMAND = "Server.Container.Error"
|
||||
ERRORMESSAGE_SERVER_SYSTEM = "Server.SystemError"
|
||||
ERRORMESSAGE_SERVER_RESOURCE = "Server.ResourceError"
|
||||
ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT = "Server.Version.NotSupport"
|
||||
ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE = "Server.Version.NeedUpgradeCore"
|
||||
|
||||
# 应用启动中 installing
|
||||
APP_STATUS_INSTALLING = "installing"
|
||||
# 应用正在运行 running
|
||||
APP_STATUS_RUNNING = "running"
|
||||
# 应用已经停止 exited
|
||||
APP_STATUS_EXITED = "exited"
|
||||
# 应用不断重启 restarting
|
||||
APP_STATUS_RESTARTING = "restarting"
|
||||
# 应用错误 failed
|
||||
APP_STATUS_FAILED = "failed"
|
4
appmanage_new/app/core/config.py
Normal file
4
appmanage_new/app/core/config.py
Normal file
@ -0,0 +1,4 @@
|
||||
NGINX_URL = "http://websoft9-nginxproxymanager:81"
|
||||
# ARTIFACT_URL="https://artifact.azureedge.net/release/websoft9"
|
||||
ARTIFACT_URL = "https://w9artifact.blob.core.windows.net/release/websoft9"
|
||||
ARTIFACT_URL_DEV = "https://w9artifact.blob.core.windows.net/dev/websoft9"
|
10
appmanage_new/app/core/exception.py
Normal file
10
appmanage_new/app/core/exception.py
Normal file
@ -0,0 +1,10 @@
|
||||
class CommandException(Exception):
|
||||
def __init__(self, code, message, detail):
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.detail = detail
|
||||
|
||||
|
||||
class MissingConfigException(CommandException):
|
||||
|
||||
pass
|
40
appmanage_new/app/core/log.py
Normal file
40
appmanage_new/app/core/log.py
Normal file
@ -0,0 +1,40 @@
|
||||
import logging
|
||||
import os
|
||||
from logging import handlers
|
||||
|
||||
class MyLogging():
|
||||
# init logging
|
||||
def __init__(self):
|
||||
# the file of log
|
||||
logPath = 'logs/'
|
||||
if not os.path.exists(logPath):
|
||||
os.makedirs(logPath)
|
||||
logName = 'app_manage.log'
|
||||
logFile = logPath + logName
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
|
||||
# handler
|
||||
time_rotating_file_handler = handlers.TimedRotatingFileHandler(filename=logFile, when="MIDNIGHT", interval=1, encoding='utf-8')
|
||||
time_rotating_file_handler.setLevel(logging.DEBUG)
|
||||
time_rotating_file_handler.setFormatter(formatter)
|
||||
# config
|
||||
logging.basicConfig(
|
||||
level= logging.DEBUG,
|
||||
handlers= [time_rotating_file_handler],
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
format='%(asctime)s %(levelname)s: %(message)s'
|
||||
)
|
||||
|
||||
def info_logger(self, content):
|
||||
logging.info(content)
|
||||
|
||||
def error_logger(self, content):
|
||||
logging.error(content)
|
||||
|
||||
def debug_logger(self, content):
|
||||
logging.debug(content)
|
||||
|
||||
def warning_logger(self, content):
|
||||
logging.warning(content)
|
||||
|
||||
|
||||
myLogger = MyLogging()
|
327
appmanage_new/app/core/prerequisite.py
Normal file
327
appmanage_new/app/core/prerequisite.py
Normal file
@ -0,0 +1,327 @@
|
||||
import json, psutil
|
||||
import re
|
||||
|
||||
from api.utils.log import myLogger
|
||||
from api.utils import shell_execute, const
|
||||
from api.exception.command_exception import CommandException
|
||||
from api.service import manage
|
||||
|
||||
|
||||
# 已经是running的app怎么知道它已经能够访问,如页面能进入,如mysql能被客户端连接
|
||||
def if_app_access(app_name):
|
||||
return True
|
||||
|
||||
|
||||
def if_app_exits(app_name):
|
||||
cmd = "docker compose ls -a"
|
||||
output = shell_execute.execute_command_output_all(cmd)
|
||||
if int(output["code"]) == 0:
|
||||
pattern = app_name + '$'
|
||||
info_list = output['result'].split()
|
||||
is_exist = False
|
||||
for info in info_list:
|
||||
if re.match(pattern, info) != None:
|
||||
is_exist = True
|
||||
break
|
||||
return is_exist
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def if_app_running(app_name):
|
||||
cmd = "docker compose ls -a"
|
||||
output = shell_execute.execute_command_output_all(cmd)
|
||||
if int(output["code"]) == 0:
|
||||
app_list = output['result'].split("\n")
|
||||
pattern = app_name + '\s*'
|
||||
if_running = False
|
||||
for app in app_list:
|
||||
if re.match(pattern, app) != None and re.match('running', app) != None:
|
||||
if_running = True
|
||||
break
|
||||
return if_running
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def check_appid_exist(app_id):
|
||||
myLogger.info_logger("Checking check_appid_exist ...")
|
||||
appList = manage.get_my_app()
|
||||
find = False
|
||||
for app in appList:
|
||||
if app_id == app.app_id:
|
||||
find = True
|
||||
break
|
||||
myLogger.info_logger("Check complete.")
|
||||
return find
|
||||
|
||||
|
||||
def check_appid_include_rq(app_id):
|
||||
message = ""
|
||||
code = None
|
||||
if app_id == None or app_id == "undefine":
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "AppID is null"
|
||||
elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None:
|
||||
code = const.ERROR_CLIENT_PARAM_Format
|
||||
message = "App_id format error"
|
||||
elif not check_appid_exist(app_id):
|
||||
code = const.ERROR_CLIENT_PARAM_NOTEXIST
|
||||
message = "AppID is not exist"
|
||||
return code, message
|
||||
|
||||
|
||||
def check_app_id(app_id):
|
||||
message = ""
|
||||
code = None
|
||||
if app_id == None:
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "AppID is null"
|
||||
elif re.match('^[a-z0-9]+_[a-z0-9]+$', app_id) == None:
|
||||
code = const.ERROR_CLIENT_PARAM_Format
|
||||
message = "APP name can only be composed of numbers and lowercase letters"
|
||||
myLogger.info_logger(code)
|
||||
return code, message
|
||||
|
||||
|
||||
def check_vm_resource(app_name):
|
||||
myLogger.info_logger("Checking virtual memory resource ...")
|
||||
var_path = "/data/library/apps/" + app_name + "/variables.json"
|
||||
requirements_var = read_var(var_path, 'requirements')
|
||||
need_cpu_count = int(requirements_var['cpu'])
|
||||
cpu_count = int(shell_execute.execute_command_output_all("cat /proc/cpuinfo | grep \'core id\'| wc -l")["result"])
|
||||
if cpu_count < need_cpu_count:
|
||||
myLogger.info_logger("Check complete: The number of CPU cores is insufficient!")
|
||||
return False
|
||||
need_mem_total = int(requirements_var['memory'])
|
||||
mem_free = float(psutil.virtual_memory().available) / 1024 / 1024 / 1024
|
||||
if mem_free < need_mem_total * 1.2:
|
||||
myLogger.info_logger("Check complete: The total amount of memory is insufficient!")
|
||||
return False
|
||||
need_disk = int(requirements_var['disk'])
|
||||
disk_free = float(psutil.disk_usage('/').free) / 1024 / 1024 / 1024
|
||||
if round(disk_free) < need_disk + 2:
|
||||
myLogger.info_logger("Check complete: There are not enough disks left!")
|
||||
return False
|
||||
myLogger.info_logger("Check complete.")
|
||||
return True
|
||||
|
||||
|
||||
def check_app_websoft9(app_name):
|
||||
# websoft9's support applist
|
||||
myLogger.info_logger("Checking dir...")
|
||||
path = "/data/library/apps/" + app_name
|
||||
is_exists = check_directory(path)
|
||||
return is_exists
|
||||
|
||||
|
||||
def check_directory(path):
|
||||
try:
|
||||
shell_execute.execute_command_output_all("ls " + path)
|
||||
return True
|
||||
except CommandException as ce:
|
||||
return False
|
||||
|
||||
|
||||
def check_app_compose(app_name, customer_name):
|
||||
myLogger.info_logger("Set port and random password ...")
|
||||
library_path = "/data/library/apps/" + app_name
|
||||
install_path = "/data/apps/" + customer_name
|
||||
port_dic = read_env(library_path + '/.env', "APP_.*_PORT=")
|
||||
# 1.判断/data/apps/app_name/.env中的port是否占用,没有被占用,方法结束(get_start_port方法)
|
||||
cmd1 = "docker container inspect $(docker ps -aq) | grep HostPort | awk \'{print $2}\' | sort -u"
|
||||
cmd2 = "netstat -tunlp | grep \"LISTEN\" | awk '{print $4}' | awk -F \":\" '{print $NF}' | sort -u"
|
||||
cmd3 = "grep -r \"APP_.*_PORT=\" /data/apps/*/.env | awk -F \"=\" '{print $2}' | sort -u"
|
||||
s1 = shell_execute.execute_command_output_all(cmd1)['result'].replace('\"', '')
|
||||
s2 = shell_execute.execute_command_output_all(cmd2)['result']
|
||||
try:
|
||||
s3 = ''
|
||||
s3 = shell_execute.execute_command_output_all(cmd3)['result']
|
||||
except:
|
||||
pass
|
||||
s = s1 + '\n' + s2 + '\n' + s3
|
||||
|
||||
shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
|
||||
env_path = install_path + "/.env"
|
||||
get_map(env_path)
|
||||
for port_name in port_dic:
|
||||
port_value = get_start_port(s, port_dic[port_name])
|
||||
modify_env(install_path + '/.env', port_name, port_value)
|
||||
|
||||
# set random password
|
||||
power_password = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
|
||||
if "POWER_PASSWORD" in power_password:
|
||||
try:
|
||||
shell_execute.execute_command_output_all("docker rm -f pwgen")
|
||||
except Exception:
|
||||
pass
|
||||
new_password = shell_execute.execute_command_output_all("docker run --name pwgen backplane/pwgen 15")[
|
||||
"result"].rstrip('\n') + "!"
|
||||
modify_env(install_path + '/.env', 'POWER_PASSWORD', new_password)
|
||||
shell_execute.execute_command_output_all("docker rm -f pwgen")
|
||||
env_path = install_path + "/.env"
|
||||
get_map(env_path)
|
||||
myLogger.info_logger("Port check complete")
|
||||
return
|
||||
|
||||
|
||||
def check_app_url(customer_app_name):
|
||||
myLogger.info_logger("Checking app url...")
|
||||
# 如果app的.env文件中含有HTTP_URL项目,需要如此设置 HTTP_URL=ip:port
|
||||
env_path = "/data/apps/" + customer_app_name + "/.env"
|
||||
env_map = get_map(env_path)
|
||||
if env_map.get("APP_URL_REPLACE") == "true":
|
||||
myLogger.info_logger(customer_app_name + "need to change app url...")
|
||||
app_url = list(read_env(env_path, "APP_URL=").values())[0]
|
||||
ip = "localhost"
|
||||
url = ""
|
||||
try:
|
||||
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
|
||||
ip = ip_result["result"].rstrip('\n')
|
||||
except Exception:
|
||||
ip = "127.0.0.1"
|
||||
http_port = list(read_env(env_path, "APP_HTTP_PORT").values())[0]
|
||||
|
||||
if ":" in app_url:
|
||||
url = ip + ":" + http_port
|
||||
else:
|
||||
url = ip
|
||||
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + url + "/g' /data/apps/" + customer_app_name + "/.env"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
|
||||
myLogger.info_logger("App url check complete")
|
||||
return
|
||||
|
||||
|
||||
def get_map(path):
|
||||
myLogger.info_logger("Read env_dic" + path)
|
||||
output = shell_execute.execute_command_output_all("cat " + path)
|
||||
code = output["code"]
|
||||
env_dic = {}
|
||||
if int(code) == 0:
|
||||
ret = output["result"]
|
||||
myLogger.info_logger(ret)
|
||||
env_list = ret.split("\n")
|
||||
for env in env_list:
|
||||
if "=" in env:
|
||||
env_dic[env.split("=")[0]] = env.split("=")[1]
|
||||
myLogger.info_logger(env_dic)
|
||||
return env_dic
|
||||
|
||||
|
||||
def read_env(path, key):
|
||||
myLogger.info_logger("Read " + path)
|
||||
output = shell_execute.execute_command_output_all("cat " + path)
|
||||
code = output["code"]
|
||||
env_dic = {}
|
||||
if int(code) == 0:
|
||||
ret = output["result"]
|
||||
env_list = ret.split("\n")
|
||||
for env in env_list:
|
||||
if re.match(key, env) != None:
|
||||
env_dic[env.split("=")[0]] = env.split("=")[1]
|
||||
myLogger.info_logger("Read " + path + ": " + str(env_dic))
|
||||
return env_dic
|
||||
|
||||
|
||||
def modify_env(path, env_name, value):
|
||||
myLogger.info_logger("Modify " + path + "...")
|
||||
output = shell_execute.execute_command_output_all("sed -n \'/^" + env_name + "/=\' " + path)
|
||||
if int(output["code"]) == 0 and output["result"] != "":
|
||||
line_num = output["result"].split("\n")[0]
|
||||
s = env_name + "=" + value
|
||||
output = shell_execute.execute_command_output_all("sed -i \'" + line_num + "c " + s + "\' " + path)
|
||||
if int(output["code"]) == 0:
|
||||
myLogger.info_logger("Modify " + path + ": Change " + env_name + " to " + value)
|
||||
|
||||
|
||||
def read_var(var_path, var_name):
|
||||
value = ""
|
||||
myLogger.info_logger("Read " + var_path)
|
||||
output = shell_execute.execute_command_output_all("cat " + var_path)
|
||||
if int(output["code"]) == 0:
|
||||
var = json.loads(output["result"])
|
||||
try:
|
||||
value = var[var_name]
|
||||
except KeyError:
|
||||
myLogger.warning_logger("Read " + var_path + ": No key " + var_name)
|
||||
else:
|
||||
myLogger.warning_logger(var_path + " not found")
|
||||
return value
|
||||
|
||||
|
||||
def get_start_port(s, port):
|
||||
use_port = port
|
||||
while True:
|
||||
if s.find(use_port) == -1:
|
||||
break
|
||||
else:
|
||||
use_port = str(int(use_port) + 1)
|
||||
|
||||
return use_port
|
||||
|
||||
def check_app(app_name, customer_name, app_version):
|
||||
message = ""
|
||||
code = None
|
||||
app_id = app_name + "_" + customer_name
|
||||
if app_name == None:
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "app_name is null"
|
||||
elif customer_name == None:
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "customer_name is null"
|
||||
elif len(customer_name) < 2:
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "customer_name must be longer than 2 chars"
|
||||
elif app_version == None:
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "app_version is null"
|
||||
elif app_version == "undefined" or app_version == "":
|
||||
code = const.ERROR_CLIENT_PARAM_BLANK
|
||||
message = "app_version is null"
|
||||
elif not docker.check_app_websoft9(app_name):
|
||||
code = const.ERROR_CLIENT_PARAM_NOTEXIST
|
||||
message = "It is not support to install " + app_name
|
||||
elif re.match('^[a-z0-9]+$', customer_name) == None:
|
||||
code = const.ERROR_CLIENT_PARAM_Format
|
||||
message = "APP name can only be composed of numbers and lowercase letters"
|
||||
elif docker.check_directory("/data/apps/" + customer_name):
|
||||
code = const.ERROR_CLIENT_PARAM_REPEAT
|
||||
message = "Repeat installation: " + customer_name
|
||||
elif not docker.check_vm_resource(app_name):
|
||||
code = const.ERROR_SERVER_RESOURCE
|
||||
message = "Insufficient system resources (cpu, memory, disk space)"
|
||||
elif check_app_docker(app_id):
|
||||
code = const.ERROR_CLIENT_PARAM_REPEAT
|
||||
message = "Repeat installation: " + customer_name
|
||||
elif check_app_rq(app_id):
|
||||
code = const.ERROR_CLIENT_PARAM_REPEAT
|
||||
message = "Repeat installation: " + customer_name
|
||||
|
||||
return code, message
|
||||
|
||||
|
||||
def app_exits_in_docker(app_id):
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_name = app_id.split('_')[0]
|
||||
flag = False
|
||||
info = ""
|
||||
cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
|
||||
try:
|
||||
output = shell_execute.execute_command_output_all(cmd)
|
||||
if int(output["code"]) == 0:
|
||||
info = output["result"]
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
is_official = check_if_official_app(app_path + '/variables.json')
|
||||
if is_official:
|
||||
name = docker.read_var(app_path + '/variables.json', 'name')
|
||||
if name == app_name:
|
||||
flag = True
|
||||
elif app_name == customer_name:
|
||||
flag = True
|
||||
myLogger.info_logger("APP in docker")
|
||||
except CommandException as ce:
|
||||
myLogger.info_logger("APP not in docker")
|
||||
|
||||
return info, flag
|
||||
|
6
appmanage_new/app/core/rq.py
Normal file
6
appmanage_new/app/core/rq.py
Normal file
@ -0,0 +1,6 @@
|
||||
# 删除错误任务
|
||||
def delete_app_failedjob(job_id):
|
||||
myLogger.info_logger("delete_app_failedjob")
|
||||
failed = FailedJobRegistry(queue=q)
|
||||
failed.remove(job_id, delete_job=True)
|
||||
|
12
appmanage_new/app/core/settings.conf
Normal file
12
appmanage_new/app/core/settings.conf
Normal file
@ -0,0 +1,12 @@
|
||||
#appstore_preview_update=false
|
||||
#domain=test.websoft9.com
|
||||
|
||||
#email=help@websoft9.com
|
||||
#ip=127.0.0.1
|
||||
#smtp_port=743
|
||||
#smtp_server=smtp.websoft9.com
|
||||
#smtp_tls/ssl=true
|
||||
#smtp_user=admin
|
||||
#smtp_password=password
|
||||
#install_path=/data
|
||||
#artifact_url=https://w9artifact.blob.core.windows.net/release/websoft9
|
84
appmanage_new/app/external/nginx_proxy_manager.py
vendored
Normal file
84
appmanage_new/app/external/nginx_proxy_manager.py
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
import requests
|
||||
|
||||
class NginxProxyManagerAPI:
|
||||
"""
|
||||
This class provides methods to interact with the Nginx Proxy Manager API.
|
||||
|
||||
Args:
|
||||
base_url (str): The base URL of the Nginx Proxy Manager API.
|
||||
api_token (str): The API Token to use for authorization.
|
||||
|
||||
Attributes:
|
||||
base_url (str): The base URL of the Nginx Proxy Manager API.
|
||||
api_token (str): The API Token to use for authorization.
|
||||
|
||||
Methods:
|
||||
get_token(identity, scope, secret): Request a new access token from Nginx Proxy Manager
|
||||
refresh_token(): Refresh your access token
|
||||
"""
|
||||
|
||||
def __init__(self, base_url, api_token):
|
||||
"""
|
||||
Initialize the NginxProxyManagerAPI instance.
|
||||
|
||||
Args:
|
||||
base_url (str): The base URL of the Nginx Proxy Manager API.
|
||||
api_token (str): The API token to use for authorization.
|
||||
"""
|
||||
self.base_url = base_url
|
||||
self.api_token = api_token
|
||||
|
||||
def get_token(self,identity,scope,secret):
|
||||
"""
|
||||
Request a new access token from Nginx Proxy Manager
|
||||
|
||||
Args:
|
||||
identity (string): user account with an email address
|
||||
scope (user): "user"
|
||||
secret (string): user password
|
||||
|
||||
Returns:
|
||||
dict or None: A dictionary containing token-related information if successful,otherwise None. The dictionary structure is as follows:
|
||||
If successful:
|
||||
{
|
||||
"expires": str, # Expiry timestamp of the token
|
||||
"token": str # The access token
|
||||
}
|
||||
|
||||
If unsuccessful:
|
||||
None
|
||||
"""
|
||||
url = f"{self.base_url}/api/tokens"
|
||||
data = {
|
||||
"identity": identity,
|
||||
"scope": scope,
|
||||
"secret": secret
|
||||
}
|
||||
response = requests.post(url,json=data, headers=headers)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return None
|
||||
|
||||
def refresh_token(self):
|
||||
"""
|
||||
Refresh your access token
|
||||
|
||||
Returns:
|
||||
dict or None: A dictionary containing token-related information if successful,otherwise None. The dictionary structure is as follows:
|
||||
If successful:
|
||||
{
|
||||
"expires": str, # Expiry timestamp of the token
|
||||
"token": str # The access token
|
||||
}
|
||||
|
||||
If unsuccessful:
|
||||
None
|
||||
"""
|
||||
url = f"{self.base_url}/api/tokens"
|
||||
headers = {"Authorization": f"Bearer {self.api_token}"}
|
||||
response = requests.get(url, headers=headers)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return None
|
20
appmanage_new/app/schemas/applist.py
Normal file
20
appmanage_new/app/schemas/applist.py
Normal file
@ -0,0 +1,20 @@
|
||||
from pydantic import BaseModel
|
||||
from api.model.config import Config
|
||||
from api.model.status_reason import StatusReason
|
||||
|
||||
class App(BaseModel):
|
||||
app_id: str
|
||||
app_name: str
|
||||
customer_name: str
|
||||
trade_mark: str
|
||||
status: str
|
||||
status_reason: StatusReason = None
|
||||
official_app: bool
|
||||
app_version: str
|
||||
create_time: str
|
||||
volume_data : str
|
||||
config_path : str
|
||||
image_url: str
|
||||
app_https: bool
|
||||
app_replace_url: bool
|
||||
config: Config = None
|
519
appmanage_new/app/services/app.py
Normal file
519
appmanage_new/app/services/app.py
Normal file
@ -0,0 +1,519 @@
|
||||
# 合并applist
|
||||
def conbine_list(installing_list, installed_list):
|
||||
app_list = installing_list + installed_list
|
||||
result_list = []
|
||||
appid_list = []
|
||||
for app in app_list:
|
||||
app_id = app['app_id']
|
||||
if app_id in appid_list:
|
||||
continue
|
||||
else:
|
||||
appid_list.append(app_id)
|
||||
result_list.append(app)
|
||||
return result_list
|
||||
|
||||
# 获取所有app的信息
|
||||
def get_my_app(app_id):
|
||||
installed_list = get_apps_from_compose()
|
||||
installing_list = get_apps_from_queue()
|
||||
|
||||
app_list = conbine_list(installing_list, installed_list)
|
||||
find = False
|
||||
ret = {}
|
||||
if app_id != None:
|
||||
for app in app_list:
|
||||
if app_id == app['app_id']:
|
||||
ret = app
|
||||
find = True
|
||||
break
|
||||
if not find:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "This App doesn't exist!", "")
|
||||
else:
|
||||
ret = app_list
|
||||
myLogger.info_logger("app list result ok")
|
||||
return ret
|
||||
|
||||
def get_apps_from_compose():
|
||||
myLogger.info_logger("Search all of apps ...")
|
||||
cmd = "docker compose ls -a --format json"
|
||||
output = shell_execute.execute_command_output_all(cmd)
|
||||
output_list = json.loads(output["result"])
|
||||
myLogger.info_logger(len(output_list))
|
||||
ip = "localhost"
|
||||
try:
|
||||
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
|
||||
ip = ip_result["result"].rstrip('\n')
|
||||
except Exception:
|
||||
ip = "127.0.0.1"
|
||||
|
||||
app_list = []
|
||||
for app_info in output_list:
|
||||
volume = app_info["ConfigFiles"]
|
||||
app_path = volume.rsplit('/', 1)[0]
|
||||
customer_name = volume.split('/')[-2]
|
||||
app_id = ""
|
||||
app_name = ""
|
||||
trade_mark = ""
|
||||
port = 0
|
||||
url = ""
|
||||
admin_url = ""
|
||||
image_url = ""
|
||||
user_name = ""
|
||||
password = ""
|
||||
official_app = False
|
||||
app_version = ""
|
||||
create_time = ""
|
||||
volume_data = ""
|
||||
config_path = app_path
|
||||
app_https = False
|
||||
app_replace_url = False
|
||||
default_domain = ""
|
||||
admin_path = ""
|
||||
admin_domain_url = ""
|
||||
if customer_name in ['w9appmanage', 'w9nginxproxymanager', 'w9redis', 'w9kopia',
|
||||
'w9portainer'] or app_path == '/data/apps/w9services/' + customer_name:
|
||||
continue
|
||||
|
||||
var_path = app_path + "/variables.json"
|
||||
official_app = check_if_official_app(var_path)
|
||||
|
||||
status_show = app_info["Status"]
|
||||
status = app_info["Status"].split("(")[0]
|
||||
if status == "running" or status == "exited" or status == "restarting":
|
||||
if "exited" in status_show and "running" in status_show:
|
||||
if status == "exited":
|
||||
cmd = "docker ps -a -f name=" + customer_name + " --format {{.Names}}#{{.Status}}|grep Exited"
|
||||
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
|
||||
container = result.split("#Exited")[0]
|
||||
if container != customer_name:
|
||||
status = "running"
|
||||
if "restarting" in status_show:
|
||||
about_time = get_createtime(official_app, app_path, customer_name)
|
||||
if "seconds" in about_time:
|
||||
status = "restarting"
|
||||
else:
|
||||
status = "failed"
|
||||
elif status == "created":
|
||||
status = "failed"
|
||||
else:
|
||||
continue
|
||||
|
||||
if official_app:
|
||||
app_name = docker.read_var(var_path, 'name')
|
||||
app_id = app_name + "_" + customer_name # app_id
|
||||
# get trade_mark
|
||||
trade_mark = docker.read_var(var_path, 'trademark')
|
||||
image_url = get_Image_url(app_name)
|
||||
# get env info
|
||||
path = app_path + "/.env"
|
||||
env_map = docker.get_map(path)
|
||||
|
||||
try:
|
||||
myLogger.info_logger("get domain for APP_URL")
|
||||
domain = env_map.get("APP_URL")
|
||||
if "appname.example.com" in domain or ip in domain:
|
||||
default_domain = ""
|
||||
else:
|
||||
default_domain = domain
|
||||
except Exception:
|
||||
myLogger.info_logger("domain exception")
|
||||
try:
|
||||
app_version = env_map.get("APP_VERSION")
|
||||
volume_data = "/data/apps/" + customer_name + "/data"
|
||||
user_name = env_map.get("APP_USER", "")
|
||||
password = env_map.get("POWER_PASSWORD", "")
|
||||
admin_path = env_map.get("APP_ADMIN_PATH")
|
||||
if admin_path:
|
||||
myLogger.info_logger(admin_path)
|
||||
admin_path = admin_path.replace("\"", "")
|
||||
else:
|
||||
admin_path = ""
|
||||
|
||||
if default_domain != "" and admin_path != "":
|
||||
admin_domain_url = "http://" + default_domain + admin_path
|
||||
except Exception:
|
||||
myLogger.info_logger("APP_USER POWER_PASSWORD exception")
|
||||
try:
|
||||
replace = env_map.get("APP_URL_REPLACE", "false")
|
||||
myLogger.info_logger("replace=" + replace)
|
||||
if replace == "true":
|
||||
app_replace_url = True
|
||||
https = env_map.get("APP_HTTPS_ACCESS", "false")
|
||||
if https == "true":
|
||||
app_https = True
|
||||
except Exception:
|
||||
myLogger.info_logger("APP_HTTPS_ACCESS exception")
|
||||
|
||||
try:
|
||||
http_port = env_map.get("APP_HTTP_PORT", "0")
|
||||
if http_port:
|
||||
port = int(http_port)
|
||||
except Exception:
|
||||
pass
|
||||
if port != 0:
|
||||
try:
|
||||
if app_https:
|
||||
easy_url = "https://" + ip + ":" + str(port)
|
||||
else:
|
||||
easy_url = "http://" + ip + ":" + str(port)
|
||||
url = easy_url
|
||||
admin_url = get_admin_url(customer_name, url)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
db_port = list(docker.read_env(path, "APP_DB.*_PORT").values())[0]
|
||||
port = int(db_port)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
app_name = customer_name
|
||||
app_id = customer_name + "_" + customer_name
|
||||
create_time = get_createtime(official_app, app_path, customer_name)
|
||||
if status in ['running', 'exited']:
|
||||
config = Config(port=port, compose_file=volume, url=url, admin_url=admin_url,
|
||||
admin_domain_url=admin_domain_url,
|
||||
admin_path=admin_path, admin_username=user_name, admin_password=password,
|
||||
default_domain=default_domain)
|
||||
else:
|
||||
config = None
|
||||
if status == "failed":
|
||||
status_reason = StatusReason(Code=const.ERROR_SERVER_SYSTEM, Message="system original error",
|
||||
Detail="unknown error")
|
||||
else:
|
||||
status_reason = None
|
||||
app = App(app_id=app_id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
|
||||
app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
|
||||
status=status, status_reason=status_reason, official_app=official_app, image_url=image_url,
|
||||
app_https=app_https, app_replace_url=app_replace_url, config=config)
|
||||
|
||||
app_list.append(app.dict())
|
||||
return app_list
|
||||
|
||||
# 安装
|
||||
def install_app(app_name, customer_name, app_version):
|
||||
myLogger.info_logger("Install app ...")
|
||||
ret = {}
|
||||
ret['ResponseData'] = {}
|
||||
app_id = app_name + "_" + customer_name
|
||||
ret['ResponseData']['app_id'] = app_id
|
||||
|
||||
code, message = check_app(app_name, customer_name, app_version)
|
||||
if code == None:
|
||||
q.enqueue(install_app_delay, app_name, customer_name, app_version, job_id=app_id)
|
||||
else:
|
||||
ret['Error'] = get_error_info(code, message, "")
|
||||
|
||||
return ret
|
||||
|
||||
def start_app(app_id):
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
cmd = "docker compose -f " + app_path + "/docker-compose.yml start"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
|
||||
|
||||
def stop_app(app_id):
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
cmd = "docker compose -f " + app_path + "/docker-compose.yml stop"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
|
||||
|
||||
def restart_app(app_id):
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
cmd = "docker compose -f " + app_path + "/docker-compose.yml restart"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
|
||||
def uninstall_app(app_id):
|
||||
app_name = app_id.split('_')[0]
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_path = ""
|
||||
info, code_exist = app_exits_in_docker(app_id)
|
||||
if code_exist:
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
|
||||
lib_path = '/data/library/apps/' + app_name
|
||||
if app_path != lib_path:
|
||||
cmd = cmd + " && sudo rm -rf " + app_path
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
else:
|
||||
if check_app_rq(app_id):
|
||||
delete_app_failedjob(app_id)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
|
||||
# Force to delete docker compose
|
||||
try:
|
||||
cmd = " sudo rm -rf /data/apps/" + customer_name
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
except CommandException as ce:
|
||||
myLogger.info_logger("Delete app compose exception")
|
||||
# Delete proxy config when uninstall app
|
||||
app_proxy_delete(app_id)
|
||||
|
||||
|
||||
# 安装失败后的处理
|
||||
def delete_app(app_id):
|
||||
try:
|
||||
app_name = app_id.split('_')[0]
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_path = ""
|
||||
info, code_exist = app_exits_in_docker(app_id)
|
||||
if code_exist:
|
||||
app_path = info.split()[-1].rsplit('/', 1)[0]
|
||||
cmd = "docker compose -f " + app_path + "/docker-compose.yml down -v"
|
||||
lib_path = '/data/library/apps/' + app_name
|
||||
if app_path != lib_path:
|
||||
cmd = cmd + " && sudo rm -rf " + app_path
|
||||
try:
|
||||
myLogger.info_logger("Intall fail, down app and delete files")
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
except Exception:
|
||||
myLogger.info_logger("Delete app compose exception")
|
||||
# 强制删除失败又无法通过docker compose down 删除的容器
|
||||
try:
|
||||
myLogger.info_logger("IF delete fail, force to delete containers")
|
||||
force_cmd = "docker rm -f $(docker ps -f name=^" + customer_name + " -aq)"
|
||||
shell_execute.execute_command_output_all(force_cmd)
|
||||
except Exception:
|
||||
myLogger.info_logger("force delete app compose exception")
|
||||
|
||||
else:
|
||||
if check_app_rq(app_id):
|
||||
delete_app_failedjob(app_id)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "AppID is not exist", "")
|
||||
cmd = " sudo rm -rf /data/apps/" + customer_name
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
except CommandException as ce:
|
||||
myLogger.info_logger("Delete app compose exception")
|
||||
|
||||
#安装准备
|
||||
def prepare_app(app_name, customer_name):
|
||||
library_path = "/data/library/apps/" + app_name
|
||||
install_path = "/data/apps/" + customer_name
|
||||
shell_execute.execute_command_output_all("cp -r " + library_path + " " + install_path)
|
||||
|
||||
|
||||
def install_app_delay(app_name, customer_name, app_version):
|
||||
myLogger.info_logger("-------RQ install start --------")
|
||||
job_id = app_name + "_" + customer_name
|
||||
|
||||
try:
|
||||
# 因为这个时候还没有复制文件夹,是从/data/library里面文件读取json来检查的,应该是app_name,而不是customer_name
|
||||
resource_flag = docker.check_vm_resource(app_name)
|
||||
|
||||
if resource_flag == True:
|
||||
|
||||
myLogger.info_logger("job check ok, continue to install app")
|
||||
env_path = "/data/apps/" + customer_name + "/.env"
|
||||
# prepare_app(app_name, customer_name)
|
||||
docker.check_app_compose(app_name, customer_name)
|
||||
myLogger.info_logger("start JobID=" + job_id)
|
||||
docker.modify_env(env_path, 'APP_NAME', customer_name)
|
||||
docker.modify_env(env_path, "APP_VERSION", app_version)
|
||||
docker.check_app_url(customer_name)
|
||||
cmd = "cd /data/apps/" + customer_name + " && sudo docker compose pull && sudo docker compose up -d"
|
||||
output = shell_execute.execute_command_output_all(cmd)
|
||||
myLogger.info_logger("-------Install result--------")
|
||||
myLogger.info_logger(output["code"])
|
||||
myLogger.info_logger(output["result"])
|
||||
try:
|
||||
shell_execute.execute_command_output_all("bash /data/apps/" + customer_name + "/src/after_up.sh")
|
||||
except Exception as e:
|
||||
myLogger.info_logger(str(e))
|
||||
else:
|
||||
error_info = "##websoft9##" + const.ERROR_SERVER_RESOURCE + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)" + "##websoft9##" + "Insufficient system resources (cpu, memory, disk space)"
|
||||
myLogger.info_logger(error_info)
|
||||
raise Exception(error_info)
|
||||
except CommandException as ce:
|
||||
myLogger.info_logger(customer_name + " install failed(docker)!")
|
||||
delete_app(job_id)
|
||||
error_info = "##websoft9##" + ce.code + "##websoft9##" + ce.message + "##websoft9##" + ce.detail
|
||||
myLogger.info_logger(error_info)
|
||||
raise Exception(error_info)
|
||||
except Exception as e:
|
||||
myLogger.info_logger(customer_name + " install failed(system)!")
|
||||
delete_app(job_id)
|
||||
error_info = "##websoft9##" + const.ERROR_SERVER_SYSTEM + "##websoft9##" + 'system original error' + "##websoft9##" + str(
|
||||
e)
|
||||
myLogger.info_logger(error_info)
|
||||
raise Exception(error_info)
|
||||
|
||||
def get_createtime(official_app, app_path, customer_name):
|
||||
data_time = ""
|
||||
try:
|
||||
if official_app:
|
||||
cmd = "docker ps -f name=" + customer_name + " --format {{.RunningFor}} | head -n 1"
|
||||
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
|
||||
data_time = result
|
||||
else:
|
||||
cmd_all = "cd " + app_path + " && docker compose ps -a --format json"
|
||||
output = shell_execute.execute_command_output_all(cmd_all)
|
||||
container_name = json.loads(output["result"])[0]["Name"]
|
||||
cmd = "docker ps -f name=" + container_name + " --format {{.RunningFor}} | head -n 1"
|
||||
result = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
|
||||
data_time = result
|
||||
|
||||
except Exception as e:
|
||||
myLogger.info_logger(str(e))
|
||||
myLogger.info_logger("get_createtime get success" + data_time)
|
||||
return data_time
|
||||
|
||||
def check_if_official_app(var_path):
|
||||
if docker.check_directory(var_path):
|
||||
if docker.read_var(var_path, 'name') != "" and docker.read_var(var_path, 'trademark') != "" and docker.read_var(
|
||||
var_path, 'requirements') != "":
|
||||
requirements = docker.read_var(var_path, 'requirements')
|
||||
try:
|
||||
cpu = requirements['cpu']
|
||||
mem = requirements['memory']
|
||||
disk = requirements['disk']
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
# 应用是否已经安装
|
||||
def check_app_docker(app_id):
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_name = app_id.split('_')[0]
|
||||
flag = False
|
||||
cmd = "docker compose ls -a | grep \'/" + customer_name + "/\'"
|
||||
try:
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
flag = True
|
||||
myLogger.info_logger("APP in docker")
|
||||
except CommandException as ce:
|
||||
myLogger.info_logger("APP not in docker")
|
||||
|
||||
return flag
|
||||
|
||||
|
||||
def check_app_rq(app_id):
|
||||
myLogger.info_logger("check_app_rq")
|
||||
|
||||
started = StartedJobRegistry(queue=q)
|
||||
failed = FailedJobRegistry(queue=q)
|
||||
run_job_ids = started.get_job_ids()
|
||||
failed_job_ids = failed.get_job_ids()
|
||||
queue_job_ids = q.job_ids
|
||||
myLogger.info_logger(queue_job_ids)
|
||||
myLogger.info_logger(run_job_ids)
|
||||
myLogger.info_logger(failed_job_ids)
|
||||
if queue_job_ids and app_id in queue_job_ids:
|
||||
myLogger.info_logger("App in RQ")
|
||||
return True
|
||||
if failed_job_ids and app_id in failed_job_ids:
|
||||
myLogger.info_logger("App in RQ")
|
||||
return True
|
||||
if run_job_ids and app_id in run_job_ids:
|
||||
myLogger.info_logger("App in RQ")
|
||||
return True
|
||||
myLogger.info_logger("App not in RQ")
|
||||
return False
|
||||
|
||||
|
||||
def get_apps_from_queue():
|
||||
myLogger.info_logger("get queque apps...")
|
||||
# 获取 StartedJobRegistry 实例
|
||||
started = StartedJobRegistry(queue=q)
|
||||
finish = FinishedJobRegistry(queue=q)
|
||||
deferred = DeferredJobRegistry(queue=q)
|
||||
failed = FailedJobRegistry(queue=q)
|
||||
scheduled = ScheduledJobRegistry(queue=q)
|
||||
cancel = CanceledJobRegistry(queue=q)
|
||||
|
||||
# 获取正在执行的作业 ID 列表
|
||||
run_job_ids = started.get_job_ids()
|
||||
finish_job_ids = finish.get_job_ids()
|
||||
wait_job_ids = deferred.get_job_ids()
|
||||
failed_jobs = failed.get_job_ids()
|
||||
scheduled_jobs = scheduled.get_job_ids()
|
||||
cancel_jobs = cancel.get_job_ids()
|
||||
|
||||
myLogger.info_logger(q.jobs)
|
||||
myLogger.info_logger(run_job_ids)
|
||||
myLogger.info_logger(failed_jobs)
|
||||
myLogger.info_logger(cancel_jobs)
|
||||
myLogger.info_logger(wait_job_ids)
|
||||
myLogger.info_logger(finish_job_ids)
|
||||
myLogger.info_logger(scheduled_jobs)
|
||||
|
||||
installing_list = []
|
||||
for job_id in run_job_ids:
|
||||
app = get_rq_app(job_id, 'installing', "", "", "")
|
||||
installing_list.append(app)
|
||||
for job in q.jobs:
|
||||
app = get_rq_app(job.id, 'installing', "", "", "")
|
||||
installing_list.append(app)
|
||||
for job_id in failed_jobs:
|
||||
job = q.fetch_job(job_id)
|
||||
exc_info = job.exc_info
|
||||
code = exc_info.split('##websoft9##')[1]
|
||||
message = exc_info.split('##websoft9##')[2]
|
||||
detail = exc_info.split('##websoft9##')[3]
|
||||
app = get_rq_app(job_id, 'failed', code, message, detail)
|
||||
installing_list.append(app)
|
||||
|
||||
return installing_list
|
||||
|
||||
#从rq获取app信息
|
||||
def get_rq_app(id, status, code, message, detail):
|
||||
app_name = id.split('_')[0]
|
||||
customer_name = id.split('_')[1]
|
||||
# 当app还在RQ时,可能文件夹还没创建,无法获取trade_mark
|
||||
trade_mark = ""
|
||||
app_version = ""
|
||||
create_time = ""
|
||||
volume_data = ""
|
||||
config_path = ""
|
||||
image_url = get_Image_url(app_name)
|
||||
config = None
|
||||
if status == "installing":
|
||||
status_reason = None
|
||||
else:
|
||||
status_reason = StatusReason(Code=code, Message=message, Detail=detail)
|
||||
|
||||
app = App(app_id=id, app_name=app_name, customer_name=customer_name, trade_mark=trade_mark,
|
||||
app_version=app_version, create_time=create_time, volume_data=volume_data, config_path=config_path,
|
||||
status=status, status_reason=status_reason, official_app=True, image_url=image_url,
|
||||
app_https=False, app_replace_url=False, config=config)
|
||||
return app.dict()
|
||||
|
||||
|
||||
def get_admin_url(customer_name, url):
|
||||
admin_url = ""
|
||||
path = "/data/apps/" + customer_name + "/.env"
|
||||
try:
|
||||
admin_path = list(docker.read_env(path, "APP_ADMIN_PATH").values())[0]
|
||||
admin_path = admin_path.replace("\"", "")
|
||||
admin_url = url + admin_path
|
||||
except IndexError:
|
||||
pass
|
||||
return admin_url
|
||||
|
||||
def get_container_port(container_name):
|
||||
port = "80"
|
||||
cmd = "docker port " + container_name + " |grep ::"
|
||||
result = shell_execute.execute_command_output_all(cmd)["result"]
|
||||
myLogger.info_logger(result)
|
||||
port = result.split('/')[0]
|
||||
myLogger.info_logger(port)
|
||||
|
||||
return port
|
444
appmanage_new/app/services/domain.py
Normal file
444
appmanage_new/app/services/domain.py
Normal file
@ -0,0 +1,444 @@
|
||||
def app_domain_list(app_id):
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
myLogger.info_logger("Check app_id ok[app_domain_list]")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
|
||||
domains = get_all_domains(app_id)
|
||||
|
||||
myLogger.info_logger(domains)
|
||||
|
||||
ret = {}
|
||||
ret['domains'] = domains
|
||||
|
||||
default_domain = ""
|
||||
if domains != None and len(domains) > 0:
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
|
||||
if "APP_URL" in app_url:
|
||||
url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env |grep APP_URL=")[
|
||||
"result"].rstrip('\n')
|
||||
default_domain = url.split('=')[1]
|
||||
ret['default_domain'] = default_domain
|
||||
myLogger.info_logger(ret)
|
||||
return ret
|
||||
|
||||
def app_proxy_delete(app_id):
|
||||
customer_name = app_id.split('_')[1]
|
||||
proxy_host = None
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
for proxy in response.json():
|
||||
portainer_name = proxy["forward_host"]
|
||||
if customer_name == portainer_name:
|
||||
proxy_id = proxy["id"]
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.delete(url, headers=headers)
|
||||
|
||||
|
||||
def app_domain_delete(app_id, domain):
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
myLogger.info_logger("Check app_id ok[app_domain_delete]")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
|
||||
if domain is None or domain == "undefined":
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
|
||||
|
||||
old_all_domains = get_all_domains(app_id)
|
||||
if domain not in old_all_domains:
|
||||
myLogger.info_logger("delete domain is not binded")
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is not bind.", "")
|
||||
|
||||
myLogger.info_logger("Start to delete " + domain)
|
||||
proxy = get_proxy_domain(app_id, domain)
|
||||
if proxy != None:
|
||||
myLogger.info_logger(proxy)
|
||||
myLogger.info_logger("before update")
|
||||
domains_old = proxy["domain_names"]
|
||||
myLogger.info_logger(domains_old)
|
||||
|
||||
domains_old.remove(domain)
|
||||
myLogger.info_logger("after update")
|
||||
myLogger.info_logger(domains_old)
|
||||
if len(domains_old) == 0:
|
||||
proxy_id = proxy["id"]
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.delete(url, headers=headers)
|
||||
try:
|
||||
if response.json().get("error"):
|
||||
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
|
||||
except Exception:
|
||||
myLogger.info_logger(response.json())
|
||||
set_domain("", app_id)
|
||||
else:
|
||||
proxy_id = proxy["id"]
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
port = get_container_port(app_id.split('_')[1])
|
||||
host = app_id.split('_')[1]
|
||||
data = {
|
||||
"domain_names": domains_old,
|
||||
"forward_scheme": "http",
|
||||
"forward_host": host,
|
||||
"forward_port": port,
|
||||
"access_list_id": "0",
|
||||
"certificate_id": 0,
|
||||
"meta": {
|
||||
"letsencrypt_agree": False,
|
||||
"dns_challenge": False
|
||||
},
|
||||
"advanced_config": "",
|
||||
"locations": [],
|
||||
"block_exploits": False,
|
||||
"caching_enabled": False,
|
||||
"allow_websocket_upgrade": False,
|
||||
"http2_support": False,
|
||||
"hsts_enabled": False,
|
||||
"hsts_subdomains": False,
|
||||
"ssl_forced": False
|
||||
}
|
||||
|
||||
response = requests.put(url, data=json.dumps(data), headers=headers)
|
||||
if response.json().get("error"):
|
||||
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
|
||||
domain_set = app_domain_list(app_id)
|
||||
default_domain = domain_set['default_domain']
|
||||
# 如果被删除的域名是默认域名,删除后去剩下域名的第一个
|
||||
if default_domain == domain:
|
||||
set_domain(domains_old[0], app_id)
|
||||
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Delete domain is not bind", "")
|
||||
|
||||
def app_domain_update(app_id, domain_old, domain_new):
|
||||
myLogger.info_logger("app_domain_update")
|
||||
domain_list = []
|
||||
domain_list.append(domain_old)
|
||||
domain_list.append(domain_new)
|
||||
|
||||
check_domains(domain_list)
|
||||
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
myLogger.info_logger("Check app_id ok")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
proxy = get_proxy_domain(app_id, domain_old)
|
||||
if proxy != None:
|
||||
domains_old = proxy["domain_names"]
|
||||
index = domains_old.index(domain_old)
|
||||
domains_old[index] = domain_new
|
||||
proxy_id = proxy["id"]
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
port = get_container_port(app_id.split('_')[1])
|
||||
host = app_id.split('_')[1]
|
||||
data = {
|
||||
"domain_names": domains_old,
|
||||
"forward_scheme": "http",
|
||||
"forward_host": host,
|
||||
"forward_port": port,
|
||||
"access_list_id": "0",
|
||||
"certificate_id": 0,
|
||||
"meta": {
|
||||
"letsencrypt_agree": False,
|
||||
"dns_challenge": False
|
||||
},
|
||||
"advanced_config": "",
|
||||
"locations": [],
|
||||
"block_exploits": False,
|
||||
"caching_enabled": False,
|
||||
"allow_websocket_upgrade": False,
|
||||
"http2_support": False,
|
||||
"hsts_enabled": False,
|
||||
"hsts_subdomains": False,
|
||||
"ssl_forced": False
|
||||
}
|
||||
|
||||
response = requests.put(url, data=json.dumps(data), headers=headers)
|
||||
if response.json().get("error"):
|
||||
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
|
||||
domain_set = app_domain_list(app_id)
|
||||
default_domain = domain_set['default_domain']
|
||||
myLogger.info_logger("default_domain=" + default_domain + ",domain_old=" + domain_old)
|
||||
# 如果被修改的域名是默认域名,修改后也设置为默认域名
|
||||
if default_domain == domain_old:
|
||||
set_domain(domain_new, app_id)
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "edit domain is not exist", "")
|
||||
|
||||
def app_domain_add(app_id, domain):
|
||||
temp_domains = []
|
||||
temp_domains.append(domain)
|
||||
check_domains(temp_domains)
|
||||
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
myLogger.info_logger("Check app_id ok")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
|
||||
old_domains = get_all_domains(app_id)
|
||||
if domain in old_domains:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain is in use", "")
|
||||
|
||||
proxy = get_proxy(app_id)
|
||||
if proxy != None:
|
||||
domains_old = proxy["domain_names"]
|
||||
domain_list = domains_old
|
||||
domain_list.append(domain)
|
||||
|
||||
proxy_id = proxy["id"]
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts/" + str(proxy_id)
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
port = get_container_port(app_id.split('_')[1])
|
||||
host = app_id.split('_')[1]
|
||||
data = {
|
||||
"domain_names": domain_list,
|
||||
"forward_scheme": "http",
|
||||
"forward_host": host,
|
||||
"forward_port": port,
|
||||
"access_list_id": "0",
|
||||
"certificate_id": 0,
|
||||
"meta": {
|
||||
"letsencrypt_agree": False,
|
||||
"dns_challenge": False
|
||||
},
|
||||
"advanced_config": "",
|
||||
"locations": [],
|
||||
"block_exploits": False,
|
||||
"caching_enabled": False,
|
||||
"allow_websocket_upgrade": False,
|
||||
"http2_support": False,
|
||||
"hsts_enabled": False,
|
||||
"hsts_subdomains": False,
|
||||
"ssl_forced": False
|
||||
}
|
||||
response = requests.put(url, data=json.dumps(data), headers=headers)
|
||||
if response.json().get("error"):
|
||||
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
|
||||
else:
|
||||
# 追加
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
port = get_container_port(app_id.split('_')[1])
|
||||
host = app_id.split('_')[1]
|
||||
|
||||
data = {
|
||||
"domain_names": temp_domains,
|
||||
"forward_scheme": "http",
|
||||
"forward_host": host,
|
||||
"forward_port": port,
|
||||
"access_list_id": "0",
|
||||
"certificate_id": 0,
|
||||
"meta": {
|
||||
"letsencrypt_agree": False,
|
||||
"dns_challenge": False
|
||||
},
|
||||
"advanced_config": "",
|
||||
"locations": [],
|
||||
"block_exploits": False,
|
||||
"caching_enabled": False,
|
||||
"allow_websocket_upgrade": False,
|
||||
"http2_support": False,
|
||||
"hsts_enabled": False,
|
||||
"hsts_subdomains": False,
|
||||
"ssl_forced": False
|
||||
}
|
||||
|
||||
response = requests.post(url, data=json.dumps(data), headers=headers)
|
||||
|
||||
if response.json().get("error"):
|
||||
raise CommandException(const.ERROR_CONFIG_NGINX, response.json().get("error").get("message"), "")
|
||||
set_domain(domain, app_id)
|
||||
|
||||
return domain
|
||||
|
||||
def check_domains(domains):
|
||||
myLogger.info_logger(domains)
|
||||
if domains is None or len(domains) == 0:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_BLANK, "Domains is blank", "")
|
||||
else:
|
||||
for domain in domains:
|
||||
if is_valid_domain(domain):
|
||||
if check_real_domain(domain) == False:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "Domain and server not match", "")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_Format, "Domains format error", "")
|
||||
|
||||
|
||||
def is_valid_domain(domain):
|
||||
if domain.startswith("http"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_real_domain(domain):
|
||||
domain_real = True
|
||||
try:
|
||||
cmd = "ping -c 1 " + domain + " | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | uniq"
|
||||
domain_ip = shell_execute.execute_command_output_all(cmd)["result"].rstrip('\n')
|
||||
|
||||
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
|
||||
ip_save = ip_result["result"].rstrip('\n')
|
||||
|
||||
if domain_ip == ip_save:
|
||||
myLogger.info_logger("Domain check ok!")
|
||||
else:
|
||||
domain_real = False
|
||||
except CommandException as ce:
|
||||
domain_real = False
|
||||
|
||||
return domain_real
|
||||
|
||||
|
||||
def get_proxy_domain(app_id, domain):
|
||||
customer_name = app_id.split('_')[1]
|
||||
proxy_host = None
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
myLogger.info_logger(response.json())
|
||||
for proxy in response.json():
|
||||
portainer_name = proxy["forward_host"]
|
||||
domain_list = proxy["domain_names"]
|
||||
if customer_name == portainer_name:
|
||||
myLogger.info_logger("-------------------")
|
||||
if domain in domain_list:
|
||||
myLogger.info_logger("find the domain proxy")
|
||||
proxy_host = proxy
|
||||
break
|
||||
|
||||
return proxy_host
|
||||
|
||||
|
||||
def get_all_domains(app_id):
|
||||
customer_name = app_id.split('_')[1]
|
||||
domains = []
|
||||
token = get_token()
|
||||
url = const.NGINX_URL+"/api/nginx/proxy-hosts"
|
||||
headers = {
|
||||
'Authorization': token,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
for proxy in response.json():
|
||||
portainer_name = proxy["forward_host"]
|
||||
if customer_name == portainer_name:
|
||||
for domain in proxy["domain_names"]:
|
||||
domains.append(domain)
|
||||
return domains
|
||||
|
||||
|
||||
def app_domain_set(domain, app_id):
|
||||
temp_domains = []
|
||||
temp_domains.append(domain)
|
||||
check_domains(temp_domains)
|
||||
|
||||
code, message = docker.check_app_id(app_id)
|
||||
if code == None:
|
||||
info, flag = app_exits_in_docker(app_id)
|
||||
if flag:
|
||||
myLogger.info_logger("Check app_id ok")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "APP is not exist", "")
|
||||
else:
|
||||
raise CommandException(code, message, "")
|
||||
|
||||
set_domain(domain, app_id)
|
||||
|
||||
|
||||
def set_domain(domain, app_id):
|
||||
myLogger.info_logger("set_domain start")
|
||||
old_domains = get_all_domains(app_id)
|
||||
if domain != "":
|
||||
if domain not in old_domains:
|
||||
message = domain + " is not in use"
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, message, "")
|
||||
|
||||
customer_name = app_id.split('_')[1]
|
||||
app_url = shell_execute.execute_command_output_all("cat /data/apps/" + customer_name + "/.env")["result"]
|
||||
|
||||
if "APP_URL" in app_url:
|
||||
myLogger.info_logger("APP_URL is exist")
|
||||
if domain == "":
|
||||
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
|
||||
domain = ip_result["result"].rstrip('\n')
|
||||
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
if "APP_URL_REPLACE=true" in app_url:
|
||||
myLogger.info_logger("need up")
|
||||
shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
|
||||
else:
|
||||
cmd = "sed -i 's/APP_URL=.*/APP_URL=" + domain + "/g' /data/apps/" + customer_name + "/.env"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
if "APP_URL_REPLACE=true" in app_url:
|
||||
myLogger.info_logger("need up")
|
||||
shell_execute.execute_command_output_all("cd /data/apps/" + customer_name + " && docker compose up -d")
|
||||
else:
|
||||
myLogger.info_logger("APP_URL is not exist")
|
||||
if domain == "":
|
||||
ip_result = shell_execute.execute_command_output_all("cat /data/apps/w9services/w9appmanage/public_ip")
|
||||
domain = ip_result["result"].rstrip('\n')
|
||||
|
||||
cmd = "sed -i '/APP_NETWORK/a APP_URL=" + domain + "' /data/apps/" + customer_name + "/.env"
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
myLogger.info_logger("set_domain success")
|
||||
|
177
appmanage_new/app/services/update.py
Normal file
177
appmanage_new/app/services/update.py
Normal file
@ -0,0 +1,177 @@
|
||||
def get_release_url():
|
||||
preview = db.AppSearchPreview().get("preview")
|
||||
myLogger.info_logger(preview)
|
||||
if preview == "false":
|
||||
return const.ARTIFACT_URL
|
||||
else:
|
||||
return const.ARTIFACT_URL_DEV
|
||||
|
||||
def appstore_update():
|
||||
myLogger.info_logger("appstore update start...")
|
||||
# 当点击appstore升级时,是无条件升级,不需要做版本的判定
|
||||
release_url = get_release_url()
|
||||
download_url = release_url + "/plugin/appstore/appstore-latest.zip"
|
||||
cmd = "cd /opt && rm -rf /opt/appstore* && wget -q " + download_url + " && unzip -q appstore-latest.zip "
|
||||
shell_execute.execute_command_output_all(cmd)
|
||||
|
||||
shell_execute.execute_command_output_all("rm -rf /usr/share/cockpit/appstore && cp -r /opt/appstore /usr/share/cockpit")
|
||||
shell_execute.execute_command_output_all("rm -rf /opt/appstore*")
|
||||
|
||||
library_url = release_url + "/plugin/library/library-latest.zip"
|
||||
library_cmd = "cd /opt && rm -rf /opt/library* && wget -q " + library_url + " && unzip -q library-latest.zip "
|
||||
shell_execute.execute_command_output_all(library_cmd)
|
||||
shell_execute.execute_command_output_all("rm -rf /data/library && cp -r /opt/library /data")
|
||||
shell_execute.execute_command_output_all("rm -rf /opt/library*")
|
||||
myLogger.info_logger("auto update success...")
|
||||
|
||||
def AppStoreUpdate():
|
||||
core_support = AppStoreCore()
|
||||
release_url = get_release_url()
|
||||
if core_support == "-1":
|
||||
raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NEEDUPGRADE, "You must upgrade websoft9 core", "You must upgrade websoft9 core")
|
||||
elif core_support == "1":
|
||||
raise CommandException(const.ERRORMESSAGE_SERVER_VERSION_NOTSUPPORT, "core not support,can not upgrade", "core not support,can not upgrade")
|
||||
local_path = '/usr/share/cockpit/appstore/appstore.json'
|
||||
local_version = "0"
|
||||
try:
|
||||
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
|
||||
local_version = json.loads(op)['Version']
|
||||
except:
|
||||
local_version = "0.0.0"
|
||||
|
||||
version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
|
||||
latest = shell_execute.execute_command_output_all(version_cmd)['result']
|
||||
version = json.loads(latest)['Version']
|
||||
if local_version < version:
|
||||
appstore_update()
|
||||
else:
|
||||
myLogger.info_logger("You click update appstore, but not need to update")
|
||||
|
||||
|
||||
|
||||
def AppPreviewUpdate(preview):
|
||||
myLogger.info_logger("AppPreviewUpdate")
|
||||
if preview == "true" or preview == "True":
|
||||
db.AppUpdatePreview(preview)
|
||||
return "true"
|
||||
elif preview == "false" or preview == "False":
|
||||
db.AppUpdatePreview(preview)
|
||||
return "false"
|
||||
elif preview == None or preview == "" or preview == "undefine":
|
||||
return db.AppSearchPreview().get("preview")
|
||||
else:
|
||||
raise CommandException(const.ERROR_CLIENT_PARAM_NOTEXIST, "preview is true,false,blank", "preview is true,false,blank")
|
||||
|
||||
#检查内核VERSION 是否支持Appstore的更新
|
||||
def AppStoreCore():
|
||||
release_url = get_release_url()
|
||||
version_cmd = "wget -O appstore.json " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
|
||||
latest = shell_execute.execute_command_output_all(version_cmd)['result']
|
||||
most_version = json.loads(latest)['Requires at most']
|
||||
least_version = json.loads(latest)['Requires at least']
|
||||
now = shell_execute.execute_command_output_all("cat /data/apps/websoft9/version.json")['result']
|
||||
now_version = json.loads(now)['VERSION']
|
||||
version_str = "now_version:" + now_version + " least_version:" + least_version + " most_version:" + most_version
|
||||
myLogger.info_logger(version_str)
|
||||
if now_version >= least_version and now_version <= most_version:
|
||||
return "0"
|
||||
elif now_version < least_version:
|
||||
return "-1"
|
||||
elif now_version > most_version:
|
||||
return "1"
|
||||
return "0"
|
||||
|
||||
# 获取 核心更新日志
|
||||
def get_update_list(url: str=None):
|
||||
local_path = '/data/apps/websoft9/version.json'
|
||||
artifact_url = const.ARTIFACT_URL
|
||||
if url:
|
||||
artifact_url = url
|
||||
|
||||
try:
|
||||
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
|
||||
local_version = json.loads(op)['VERSION']
|
||||
except:
|
||||
local_version = "0.0.0"
|
||||
version_cmd = f"wget -O version.json {artifact_url}/version.json && cat version.json"
|
||||
latest = shell_execute.execute_command_output_all(version_cmd)['result']
|
||||
version = json.loads(latest)['VERSION']
|
||||
ret = {}
|
||||
ret['local_version'] = local_version
|
||||
ret['target_version'] = version
|
||||
content = []
|
||||
date = ""
|
||||
|
||||
if compared_version(local_version, version) == -1:
|
||||
ret['update'] = True
|
||||
cmd = f"wget -O CHANGELOG.md {artifact_url}/CHANGELOG.md && head -n 20 CHANGELOG.md"
|
||||
change_log_contents = shell_execute.execute_command_output(cmd)
|
||||
change_log = change_log_contents.split('## ')[1].split('\n')
|
||||
date = change_log[0].split()[-1]
|
||||
for change in change_log[1:]:
|
||||
if change != '':
|
||||
content.append(change)
|
||||
else:
|
||||
ret['update'] = False
|
||||
ret['date'] = date
|
||||
ret['content'] = content
|
||||
return ret
|
||||
|
||||
# 获取 appstore 更新日志
|
||||
def get_appstore_update_list():
|
||||
release_url = get_release_url()
|
||||
local_path = '/usr/share/cockpit/appstore/appstore.json'
|
||||
local_version = "0"
|
||||
try:
|
||||
op = shell_execute.execute_command_output_all("cat " + local_path)['result']
|
||||
local_version = json.loads(op)['Version']
|
||||
except:
|
||||
local_version = "0.0.0"
|
||||
|
||||
|
||||
version_cmd = "wget -O appstore.json -N " + release_url + "/plugin/appstore/appstore.json && cat appstore.json"
|
||||
latest = shell_execute.execute_command_output_all(version_cmd)['result']
|
||||
version = json.loads(latest)['Version']
|
||||
ret = {}
|
||||
ret['local_version'] = local_version
|
||||
ret['target_version'] = version
|
||||
content = []
|
||||
date = ""
|
||||
core_compare = ""
|
||||
|
||||
if compared_version(local_version, version) == -1:
|
||||
ret['update'] = True
|
||||
cmd = "wget -O CHANGELOG.md " + release_url + "/plugin/appstore/CHANGELOG.md && cat CHANGELOG.md"
|
||||
change_log_contents = shell_execute.execute_command_output_all(cmd)['result']
|
||||
change_log = change_log_contents.split('## ')[1].split('\n')
|
||||
date = change_log[0].split()[-1]
|
||||
for change in change_log[1:]:
|
||||
if change != '':
|
||||
content.append(change)
|
||||
core_compare = AppStoreCore()
|
||||
else:
|
||||
ret['update'] = False
|
||||
ret['date'] = date
|
||||
ret['content'] = content
|
||||
ret['core_compare'] = core_compare
|
||||
return ret
|
||||
|
||||
|
||||
def compared_version(ver1, ver2):
|
||||
list1 = str(ver1).split(".")
|
||||
list2 = str(ver2).split(".")
|
||||
# 循环次数为短的列表的len
|
||||
for i in range(len(list1)) if len(list1) < len(list2) else range(len(list2)):
|
||||
if int(list1[i]) == int(list2[i]):
|
||||
pass
|
||||
elif int(list1[i]) < int(list2[i]):
|
||||
return -1
|
||||
else:
|
||||
return 1
|
||||
# 循环结束,哪个列表长哪个版本号高
|
||||
if len(list1) == len(list2):
|
||||
return 0
|
||||
elif len(list1) < len(list2):
|
||||
return -1
|
||||
else:
|
||||
return 1
|
41
appmanage_new/app/utils/getIP.py
Normal file
41
appmanage_new/app/utils/getIP.py
Normal file
@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
url_list=(
|
||||
api.ipify.org
|
||||
bot.whatismyipaddress.com
|
||||
icanhazip.com
|
||||
ifconfig.co
|
||||
ident.me
|
||||
ifconfig.me
|
||||
icanhazip.com
|
||||
ipecho.net/plain
|
||||
ipinfo.io/ip
|
||||
ip.sb
|
||||
whatismyip.akamai.com
|
||||
inet-ip.info
|
||||
)
|
||||
|
||||
curl_ip(){
|
||||
curl --connect-timeout 1 -m 2 $1 2>/dev/null
|
||||
return $?
|
||||
}
|
||||
|
||||
debug(){
|
||||
for x in ${url_list[*]}
|
||||
do
|
||||
curl_ip $x
|
||||
done
|
||||
}
|
||||
|
||||
print_ip(){
|
||||
for n in ${url_list[*]}
|
||||
do
|
||||
public_ip=`curl_ip $n`
|
||||
check_ip=`echo $public_ip | awk -F"." '{print NF}'`
|
||||
if [ ! -z "$public_ip" -a $check_ip -eq "4" ]; then
|
||||
echo $public_ip
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
#debug
|
||||
print_ip
|
6
appmanage_new/app/utils/helper.py
Normal file
6
appmanage_new/app/utils/helper.py
Normal file
@ -0,0 +1,6 @@
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
46
appmanage_new/app/utils/runshell.py
Normal file
46
appmanage_new/app/utils/runshell.py
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/python3
|
||||
import subprocess
|
||||
|
||||
from api.utils.log import myLogger
|
||||
from api.exception.command_exception import CommandException
|
||||
from api.utils import const
|
||||
|
||||
|
||||
# This fuction is for running shell commands on container
|
||||
# cmd_str e.g: "ls -a"
|
||||
# return string limit: 4000 chars? to do
|
||||
def execute_command_output(cmd_str):
|
||||
print(cmd_str)
|
||||
out_str = subprocess.getoutput(cmd_str)
|
||||
print(out_str)
|
||||
return out_str
|
||||
|
||||
|
||||
# This fuction is for running shell commands on host machine
|
||||
# cmd_str e.g: "ls -a"
|
||||
# return string limit: 4000 chars
|
||||
def execute_command_output_all(cmd_str):
|
||||
|
||||
myLogger.info_logger("Start to execute cmd: " + cmd_str)
|
||||
|
||||
process = subprocess.run(f'nsenter -m -u -i -n -p -t 1 sh -c "{cmd_str}"', capture_output=True, bufsize=65536, check=False, text=True, shell=True)
|
||||
|
||||
if process.returncode == 0 and 'Fail' not in process.stdout and 'fail' not in process.stdout and 'Error' not in process.stdout and 'error' not in process.stdout:
|
||||
|
||||
return {"code": "0", "result": process.stdout}
|
||||
else:
|
||||
myLogger.info_logger("Failed to execute cmd, output failed result")
|
||||
myLogger.info_logger(process)
|
||||
raise CommandException(const.ERROR_SERVER_COMMAND, "Docker returns the original error", process.stderr)
|
||||
|
||||
|
||||
|
||||
# This fuction is convert container commands to host machine commands
|
||||
def convert_command(cmd_str):
|
||||
convert_cmd = ""
|
||||
if cmd_str == "":
|
||||
convert_cmd=cmd_str
|
||||
else:
|
||||
convert_cmd="nsenter -m -u -i -n -p -t 1 sh -c " + "'"+cmd_str+"'"
|
||||
|
||||
return convert_cmd
|
68
appmanage_new/app/utils/settings_file.py
Normal file
68
appmanage_new/app/utils/settings_file.py
Normal file
@ -0,0 +1,68 @@
|
||||
from api.utils.log import myLogger
|
||||
from api.utils.helper import Singleton
|
||||
|
||||
|
||||
# This class is add/modify/list/delete item to item=value(键值对) model settings file
|
||||
|
||||
class SettingsFile(object):
|
||||
|
||||
__metaclass__ = Singleton
|
||||
|
||||
def __init__(self, path):
|
||||
self._config = {}
|
||||
self.config_file = path
|
||||
|
||||
def build_config(self):
|
||||
try:
|
||||
with open(self.config_file, 'r') as f:
|
||||
data = f.readlines()
|
||||
except Exception as e:
|
||||
data = []
|
||||
for i in data:
|
||||
if i.startswith('#'):
|
||||
continue
|
||||
i = i.replace('\n', '').replace('\r\n', '')
|
||||
if not i:
|
||||
continue
|
||||
tmp = i.split('=')
|
||||
if len(tmp) != 2:
|
||||
myLogger.error_logger(f'invalid format {i}')
|
||||
continue
|
||||
|
||||
key, value = i.split('=')
|
||||
if self._config.get(key) != value:
|
||||
self._config[key] = value
|
||||
return self._config
|
||||
|
||||
def init_config_from_file(self, config_file: str=None):
|
||||
if config_file:
|
||||
self.config_file = config_file
|
||||
self.build_config()
|
||||
|
||||
def update_setting(self, key: str, value: str):
|
||||
self._config[key] = value
|
||||
self.flush_config()
|
||||
|
||||
def get_setting(self, key: str, default=None):
|
||||
return self._config.get(key, default)
|
||||
|
||||
def list_all_settings(self) -> dict:
|
||||
self.build_config()
|
||||
return self._config
|
||||
|
||||
def delete_setting(self, key: str, value: str):
|
||||
if self._config.get(key) == value:
|
||||
del self._config[key]
|
||||
self.flush_config()
|
||||
|
||||
def flush_config(self):
|
||||
try:
|
||||
with open(self.config_file, 'w') as f:
|
||||
for key, value in self._config.items():
|
||||
f.write(f'{key}={value}\n')
|
||||
except Exception as e:
|
||||
myLogger.error_logger(e)
|
||||
|
||||
|
||||
# This class is add/modify/cat/delete content from file
|
||||
# src: path | URL
|
42
appmanage_new/docs/MAINTAINERS.md
Normal file
42
appmanage_new/docs/MAINTAINERS.md
Normal file
@ -0,0 +1,42 @@
|
||||
# Documentation for core maintainers
|
||||
|
||||
This documentaion is from [jenkins MAINTAINERS](https://github.com/jenkinsci/jenkins/blob/master/docs/MAINTAINERS.adoc) which have a paradigm of rigorous open source project maintenance
|
||||
|
||||
## Scope
|
||||
|
||||
This document applies to the following components:
|
||||
|
||||
- Websoft9 core
|
||||
- Websoft9 core plugins
|
||||
- docker-library
|
||||
|
||||
## Roles
|
||||
|
||||
| Role/job | submit pr | review pr | assign pr | merge pr | close pr | create issue | manage issue | release |
|
||||
| ------------ | --------- | --------- | --------- | -------- | -------- | ------------ | ------------ | ------- |
|
||||
| Contributor | √ | | | | | √ | | |
|
||||
| Issue Team | √ | | | | | √ | √ | |
|
||||
| PR Reviewer | √ | √ | | | | √ | | |
|
||||
| Release Team | √ | | | | | √ | | √ |
|
||||
| Maintainer | √ | √ | √ | √ | √ | √ | | |
|
||||
| PR Assignee | | | | √ | | √ | | |
|
||||
|
||||
|
||||
* **Contributor**: submit pull requests to the Jenkins core and review changes submitted by others. There are no special preconditions to do so. Anyone is welcome to contribute.
|
||||
* **Issue Triage Team Member**: review the incoming issues: bug reports, requests for enhancement, etc. Special permissions are not required to take this role or to contribute.
|
||||
* **Core Pull Request Reviewer**: A team for contributors who are willing to regularly review pull requests and eventually become core maintainers.
|
||||
* **Core Maintainer**: Get permissions in the repository, and hence they are able to merge pull requests.Their responsibility is to perform pull request reviews on a regular basis and to bring pull requests to closure, either by merging ready pull requests towards weekly releases ( branch) or by closing pull requests that are not ready for merge because of submitter inaction after an extended period of time.
|
||||
* **Pull Request Assignee**: Core maintainers make a commitment to bringing a pull request to closure by becoming an Assignee. They are also responsible to monitor the weekly release status and to perform triage of critical issues.
|
||||
* **Release Team Member**: Responsible for Websoft9 weekly and LTS releases
|
||||
|
||||
## Pull request review process
|
||||
|
||||
## Pull request Merge process
|
||||
|
||||
## Issue triage
|
||||
|
||||
## Release process
|
||||
|
||||
## Tools
|
||||
|
||||
## Communication
|
24
appmanage_new/docs/architecture.md
Normal file
24
appmanage_new/docs/architecture.md
Normal file
@ -0,0 +1,24 @@
|
||||
## Architecture
|
||||
|
||||
Websoft9 is very simple [architecture](https://www.canva.cn/design/DAFpI9loqzQ/hI_2vrtfoK7zJwauhJzipQ/view?utm_content=DAFpI9loqzQ&utm_campaign=designshare&utm_medium=link&utm_source=publishsharelink) which used [Redhat Cockpit ](https://cockpit-project.org/) for web framework and [Docker](https://www.docker.com/) for running [application](https://github.com/Websoft9/docker-library).
|
||||
|
||||
The benefits of this architecture means you don't have to learn new technology stacks or worry about the lack of maintenance this project.
|
||||
|
||||
![Alt text](image/archi.png)
|
||||
|
||||
|
||||
What we do is integrating below stacks's API or interfaces to Cockpit console by [Cockpit packages (Also known as plugin)](https://cockpit-project.org/guide/latest/packages.html) :
|
||||
|
||||
- [Nginx Proxy Manager](https://nginxproxymanager.com/): A web-based Nginx management
|
||||
- [Portainer](https://www.portainer.io/): Powerful container management for DevSecOps
|
||||
- [Duplicati](https://www.duplicati.com/): Backup software to store encrypted backups online
|
||||
- [Redis](https://redis.io/): The open source, in-memory data store
|
||||
- [Appmanage](https://github.com/Websoft9/websoft9/tree/main/appmanage): API for create and manage docker compose based application powered by Websoft9
|
||||
- [websoft9-plugins](https://github.com/websoft9?q=plugin&type=all&language=&sort=): Cockpit packages powered by Websoft9
|
||||
|
||||
As Websoft9 is a complete product, we also offer:
|
||||
|
||||
* API
|
||||
* CLI
|
||||
|
||||
And Websoft9 is more attractive to users is [200+ application templates](https://github.com/Websoft9/docker-library).
|
24
appmanage_new/docs/developer.md
Normal file
24
appmanage_new/docs/developer.md
Normal file
@ -0,0 +1,24 @@
|
||||
# Developer Guide
|
||||
|
||||
|
||||
|
||||
|
||||
## Release
|
||||
|
||||
|
||||
|
||||
#### 制品库自动化
|
||||
|
||||
- 插件制品管理:开发人员开发测试完成后,修改插件版本,触发 Action 构建 Github packages 制品
|
||||
- docker-libaray 库制品管理:开发人员测试完成后,修改 library 版本,触发 Action 构建 Github packages 制品
|
||||
- websoft9 制品管理:开发人员修改 appmanage 源码或微服务 docker-compose 测试完成后,修改 微服务 版本,触发 Action 构建 Dockerhub 镜像制品以及后台微服务 Github packages 制品
|
||||
|
||||
> Portainer,redis,nginxproxymanager 使用外部 dockerhub 镜像
|
||||
|
||||
### 自动化测试
|
||||
|
||||
当各个制品更新后,项目管理者修改 version_test.json 对应的组件的版本,构建 Action 触发自动化系统测试。
|
||||
自动化测试失败,通知各开发人员,删除制品,修改后重新生成制品。
|
||||
自动化测试成功,同步 version_test.json 到 version.json, 新制品正式发布。
|
||||
|
||||
|
BIN
appmanage_new/docs/image/archi.png
Normal file
BIN
appmanage_new/docs/image/archi.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 73 KiB |
151
appmanage_new/docs/notes/PRD.md
Normal file
151
appmanage_new/docs/notes/PRD.md
Normal file
@ -0,0 +1,151 @@
|
||||
# 需求
|
||||
|
||||
从两个主线理解 stackhub 的需求:
|
||||
|
||||
- 应用生命周期管理:寻找、安装、发布、停止、卸载、升级等软件全生命周期。
|
||||
- 基础设施运维管理:安全、存储、文件、容器、监控等系统管理
|
||||
|
||||
## 应用生命周期
|
||||
|
||||
### 业务需求
|
||||
|
||||
#### 寻找
|
||||
|
||||
用户可以通过两个入口寻找应用:
|
||||
|
||||
- 应用商店:采用一级分类的方式展现应用,并支持**筛选+搜索**的方式以便于用户检索
|
||||
- Docker 镜像仓库:检索 Docker 镜像仓库,找到对应的应用
|
||||
|
||||
#### 安装
|
||||
|
||||
- 用户自主安装应用,后端按顺序依次启动目标应用
|
||||
- 启动应用之前先进行资源约束判断,不符合条件的目标应用不予安装
|
||||
- 与安装有关的状态:安装中、运行中、安装失败、反复重启、已停止
|
||||
|
||||
#### 发布
|
||||
|
||||
- 以域名或端口的方式,将运行中的应用发布出去,供外部用户访问。
|
||||
- 自助设置 HTTPS,上传或更新证书
|
||||
|
||||
#### 停止
|
||||
|
||||
将应用服务停止
|
||||
|
||||
#### 卸载
|
||||
|
||||
卸载应用并删除数据
|
||||
|
||||
#### 升级
|
||||
|
||||
升级应用,如果升级失败会自动回滚到升级之前的状态
|
||||
|
||||
#### 恢复
|
||||
|
||||
在已有的完整备份的基础,恢复应用。
|
||||
|
||||
可能存在两种情况:
|
||||
|
||||
- 覆盖现有应用
|
||||
- 恢复成一个新的应用
|
||||
|
||||
#### 克隆
|
||||
|
||||
克隆一个已经存在的应用,命名为新应用
|
||||
|
||||
### 技术需求
|
||||
|
||||
#### 模板编排
|
||||
|
||||
应用的底层编排 100% 以 Docker Compose 语法作为编排语言
|
||||
|
||||
#### 多语言
|
||||
|
||||
- 前端支持 i18n
|
||||
- 后端接口支持英文
|
||||
|
||||
#### 用户管理
|
||||
|
||||
- 支持多个用户,用户角色分为普通用户和管理员用户
|
||||
- 普通用户可以创建和管理自己的应用,不可以删除他人的应用
|
||||
|
||||
#### UI 自适应
|
||||
|
||||
UI 自适应各种屏幕尺寸
|
||||
|
||||
#### 2FA
|
||||
|
||||
引入一种双重登录策略
|
||||
|
||||
#### 商店基础设置
|
||||
|
||||
- 商店 Logo 可自定义
|
||||
- 语言、时区可选
|
||||
- 绑定域名
|
||||
- SMTP 信息填写
|
||||
|
||||
#### 通知
|
||||
|
||||
- SMTP 邮件通知
|
||||
|
||||
#### 商店更新
|
||||
|
||||
商店支持在线更新提示和在线更新
|
||||
|
||||
#### API
|
||||
|
||||
支持生成 API Tokens
|
||||
|
||||
#### CLI
|
||||
|
||||
基于 API 的 CLI
|
||||
|
||||
#### 仓库管理
|
||||
|
||||
默认以 DockerHub 作为镜像仓库,支持自建仓库并同步 DockerHub 镜像
|
||||
|
||||
#### 安装程序
|
||||
|
||||
一键自动化安装程序,类似:
|
||||
|
||||
```
|
||||
curl https://websoft9.github.io/stackhub/install/install.sh | bash
|
||||
```
|
||||
|
||||
主要步骤包括:
|
||||
|
||||
1. Check 硬件、操作系统、cpu 架构
|
||||
2. 安装依赖包
|
||||
3. 安装 docker
|
||||
4. 下载各源码包
|
||||
5. 启动个源码对应服务
|
||||
|
||||
## 基础设施运维
|
||||
|
||||
### SSH 终端
|
||||
|
||||
Web-Based SSH 终端
|
||||
|
||||
### 文件管理器
|
||||
|
||||
Web-Based 文件管理器
|
||||
|
||||
### 存储管理
|
||||
|
||||
- 支持接入第三方对象存储
|
||||
|
||||
### 备份
|
||||
|
||||
备份完整的应用数据:
|
||||
|
||||
- 自定义备份时间区间
|
||||
- 自动备份可取消
|
||||
- 备份可以管理:删除、下载等
|
||||
|
||||
### 容器管理
|
||||
|
||||
可视化的容器管理,包括:拉镜像、创建/删除/停止容器、SSH 进入容器、向容器上传文件等
|
||||
|
||||
### 系统监控
|
||||
|
||||
- 监控容器的 CPU,内存和存储消耗情况
|
||||
- 监控系统的 CPU,内存和存储消耗情况
|
57
appmanage_new/docs/notes/research.md
Normal file
57
appmanage_new/docs/notes/research.md
Normal file
@ -0,0 +1,57 @@
|
||||
# 概述
|
||||
|
||||
## 需求草稿
|
||||
|
||||
| | Cloudron | [casaos](https://www.casaos.io/) | umbrel | runtipi |
|
||||
| -------------- | -------- | -------------------------------------------------------- | ------------ | ------- |
|
||||
| 应用编排 | | 单一镜像 | | 多镜像,compose 编排 |
|
||||
| 市场应用来源 | | 官方+社区 | 官方+社区 | |
|
||||
| 一键安装程度 | | 不需任何配置 | 不需任何配置 | |
|
||||
| 应用访问方式 | | 端口 | 端口 | |
|
||||
| 自定义安装应用 | | Y | N | N |
|
||||
| Web 管理容器 | | Y | N | |
|
||||
| 默认镜像仓库 | | DockerHub | | |
|
||||
| 自适应 | | Y | Y | |
|
||||
| 多语言 | | Y | N | |
|
||||
| 用户管理 | | 单一用户 | 单一用户 | |
|
||||
| 自带应用 | | 文件,服务器终端,容器终端,监控,日志 | 监控,日志 | |
|
||||
| 应用管理 | | 完整容器参数设置,克隆,绑定域名?备份?证书? | 无 | |
|
||||
| 应用更新 | | N | | |
|
||||
| 后端语言 | | Go | | |
|
||||
| API | | HTTP API | | |
|
||||
| 前端 | | vue.js | | |
|
||||
| CLI | | Y | | |
|
||||
| HTTP 服务器 | | 无,端口访问应用 | | traefik |
|
||||
| 公共数据库 | | 无 | | |
|
||||
| 开发文档 | | [wiki](https://wiki.casaos.io/en/contribute/development) | | |
|
||||
| 2FA | | N | Y | |
|
||||
| 安装方式 | | 服务器安装 | 容器安装 | |
|
||||
| 商店更新 | | N | Y | Y |
|
||||
| 商店绑定域名 | Y | N | N | |
|
||||
| DNS服务 | Y | N | | |
|
||||
|
||||
* 应用自动分配4级域名后,如何再 CNAME 二级域名?
|
||||
|
||||
### casaos 架构分析
|
||||
|
||||
#### 安装脚本
|
||||
|
||||
1. Check硬件、操作系统、cpu架构
|
||||
2. 安装依赖包
|
||||
3. 安装docker
|
||||
4. 下载各源码包
|
||||
5. 启动个源码对应服务
|
||||
|
||||
#### 源码解析
|
||||
|
||||
| 运行时项目 | 对应项目源码 | 说明 |
|
||||
| -------------- | -------- | -------------------------------------------------------- |
|
||||
| casaos | CasaOS | 每隔5秒通过websocekt推送内存/CPU/网络等系统信息;提供ssh登录操作的http接口;提供"sys", "port", "file", "folder", "batch", "image", "samba", "notify"这些http接口的访问|
|
||||
| casaos-message-bus | CasaOS-MessageBus | 类似一个MQ提供消息的发布/订阅 |
|
||||
| casaos-local-storage | CasaOS-LocalStorage | 每隔5S统计磁盘/USB信息,提供监控信息;提供http接口访问disk/usb/storage信息 |
|
||||
| casaos-user-service | CasaOS-UserService | 通过http server提供用户管理的接口 |
|
||||
| casaos-app-management | CasaOS-AppManagement | 使用CasaOS-AppStore中App的元数据;提供所有appList的分类/列表/详细信息;通过docker来管理app,提供安装/启动/关闭/重启/日志查看等相关接口;docker-compose管理(V2);|
|
||||
| casaos-gateway | CasaOS-Gateway | 提供Gateway自身管理接口,比如切换Gateway的port的接口,查看所有路由的接口;提供CasaOS-UI的静态资源访问服务;根据请求的PATH将请求代理转发至其它模块 |
|
||||
| casaos-cli | CasaOS-CLI | 通过命令行的方式来调用CasaOS-Gateway的接口,该模块未完全实现,实现了部分命令 |
|
||||
| linux-all-casaos | CasaOS-UI | VUE2,CasaOS的Web源码,编译后的html/js/image/css等由CasaOS-Gateway提供访问入口,所有API接口指向CasaOS-Gateway |
|
||||
| - | CasaOS-Common | Common structs and functions for CasaOS |
|
37
appmanage_new/docs/notes/软件工厂.md
Normal file
37
appmanage_new/docs/notes/软件工厂.md
Normal file
@ -0,0 +1,37 @@
|
||||
# 软件工厂
|
||||
|
||||
由 Websoft9 自主研发的面向高校的【软件工厂】解决方案,学生和老师可以自由使用镜像库用于教学。
|
||||
|
||||
## 商业需求
|
||||
|
||||
高校老师和学生在教学中需要使用大量的开源软件作为教学的载体,以及通过使用开源软件学习实战的经验,打开商业化软件领域的大门。
|
||||
目前,老师和学生受制于眼界以及技术原因,无法很方便的搭建和使用各种开源软件,大大的制约了教学的发展。
|
||||
|
||||
我们目前的方案只需要加以【盒子化】即可满足用户的需要。
|
||||
|
||||
## 业务模式
|
||||
|
||||
对我们既有的方案进行盒子化之后,通过如下方式盈利:
|
||||
|
||||
- 售卖软件解决方案以及技术支持
|
||||
- 云资源分成
|
||||
- 镜像按小时付费
|
||||
- 知识库付费
|
||||
- 课程合作付费
|
||||
|
||||
## 功能需求
|
||||
|
||||
盒子化的解决方案包括:
|
||||
|
||||
### 业务功能
|
||||
|
||||
- 可以一键使用的软件库(提供 300+场景方案)
|
||||
- 可以在线使用的工具库(基于 Web 的工具库,学生在上课中无需安装大量的客户端工具即可完成任务)
|
||||
- 可以管理教学过程的慕课系统
|
||||
|
||||
### 系统功能
|
||||
|
||||
- 账号管理
|
||||
- 日志管理
|
||||
- 安全管理
|
||||
- 资源消耗管理
|
8
appmanage_new/docs/plugin-developer.md
Normal file
8
appmanage_new/docs/plugin-developer.md
Normal file
@ -0,0 +1,8 @@
|
||||
# Developer Guide
|
||||
|
||||
## Mulitiple language
|
||||
|
||||
Below points you should know if you want to tranlate:
|
||||
|
||||
- Every plugin's po.zh_CN.js can be used for other Cockpit plugin
|
||||
- po.zh_CN.js.gz at base1 is the system language file
|
29
appmanage_new/docs/recruit.md
Normal file
29
appmanage_new/docs/recruit.md
Normal file
@ -0,0 +1,29 @@
|
||||
# recruit
|
||||
|
||||
In order to optimize the app management architecture and code specifications, and perform daily maintenance on new features and bugs, Websoft9 recruits a senior Python development expert.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. Proficient in Python and have architectural experience in Python web projects
|
||||
|
||||
2. Have experience in developing distributed (caching, message middleware)
|
||||
|
||||
3. Familiar with Docker and other container technologies
|
||||
|
||||
4. Love coding and willing to continuously optimize code at work
|
||||
|
||||
5. Strong document reading and understanding skills as well as document writing experience
|
||||
|
||||
## Job Description
|
||||
|
||||
1. Complete additional features and modify bugs for existing projects
|
||||
|
||||
2. Provide reasons and solutions for optimizing the project architecture and API methods
|
||||
|
||||
## Work form
|
||||
|
||||
Remote, must complete 40 hours of work per month
|
||||
|
||||
## Remuneration and payment
|
||||
|
||||
Pay 4000 yuan before the 10th of each month
|
18
appmanage_new/docs/user.md
Normal file
18
appmanage_new/docs/user.md
Normal file
@ -0,0 +1,18 @@
|
||||
# User Guide
|
||||
|
||||
## FAQ
|
||||
|
||||
#### user can not sudo?
|
||||
|
||||
```
|
||||
# add user to sudo/admin group (select one command)
|
||||
usermod -aG wheel username
|
||||
usermod -aG sudo username
|
||||
|
||||
# sudo not need to input password
|
||||
|
||||
```
|
||||
|
||||
#### Can not login when I reinstall my Instance?
|
||||
|
||||
Need to clear all cookie at you browser
|
7
appmanage_new/main.py
Normal file
7
appmanage_new/main.py
Normal file
@ -0,0 +1,7 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.routing import APIRouter
|
||||
from api.v1 import main as v1_router
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
app.include_router(v1_router.router, prefix="/api/v1")
|
10
appmanage_new/requirements.txt
Normal file
10
appmanage_new/requirements.txt
Normal file
@ -0,0 +1,10 @@
|
||||
fastapi==0.98.0
|
||||
uvicorn
|
||||
rq
|
||||
apscheduler
|
||||
docker
|
||||
psutil
|
||||
gunicorn
|
||||
python-dotenv
|
||||
sqlalchemy
|
||||
databases[sqlite]
|
0
appmanage_new/tests/README.md
Normal file
0
appmanage_new/tests/README.md
Normal file
Loading…
Reference in New Issue
Block a user