openxlab / new.py
ACCA225's picture
Update new.py
0804f9b verified
# 配置
import os
install_path = '/home/xlab-app-center'
webui_repo = 'AUTOMATIC1111/stable-diffusion-webui --branch v1.9.4'
rename_repo = 'stable-diffusion-webui'
git_url = 'gitcode.com'
webui_port = 7860
package_envs = [
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")},
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")},
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")},
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")},
{"env": "CLIP_PACKAGE", "url": os.environ.get('CLIP_REPO', "https://kkgithub.com/openai/CLIP")},
{"env": "ASSETS_REPO", "url": os.environ.get('CLIP_REPO', "https://kkgithub.com/AUTOMATIC1111/stable-diffusion-webui-assets.git")},
]
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/"
for i in package_envs:
os.environ[i["env"]] = i["url"]
api_auth = 'Echoflare:Tt25faj8'
download_tool = 'aria2c --console-log-level=error -c -x 16 -s 16 -k 1M'
webui_args = [
'--api',
'--xformers',
'--no-hashing',
'--disable-nan-check',
'--disable-console-progressbars',
'--enable-console-prompts',
#'--no-gradio-queue',
'--no-half-vae',
"--skip-torch-cuda-test",
'--lowram',
"--allow-code"
# f'--api-auth={api_auth}',
# '--freeze-settings',
]
extensions = [
'https://gitcode.com/zanllp/sd-webui-infinite-image-browsing',
'https://gitcode.com/dtlnor/stable-diffusion-webui-localization-zh_CN', # 汉化
'https://gitcode.com/DominikDoom/a1111-sd-webui-tagcomplete', # 提示词提示器
'https://gitcode.com/Mikubill/sd-webui-controlnet', # ControlNet
"https://gitcode.net/overbill1683/stable-diffusion-webui-localization-zh_Hans",
#"https://gitcode.net/ranting8323/multidiffusion-upscaler-for-automatic1111",
"https://gitcode.net/ranting8323/adetailer",
"https://gitcode.net/ranting8323/sd-webui-inpaint-anything",
"https://gitcode.net/ranting8323/a1111-sd-webui-tagcomplete",
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info",
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui",
'https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-localization-zh_CN',
'https://openi.pcl.ac.cn/2575044704/sd-webui-lora-block-weight',
'https://openi.pcl.ac.cn/2575044704/sd-skin-extension',
"https://kkgithub.com/thygate/stable-diffusion-webui-depthmap-script.git",
#"https://gitcode.net/ranting8323/sd-webui-controlnet",
#"https://kkgithub.com/SignalFlagZ/sd-webui-civbrowser.git",
#"https://kkgithub.com/continue-revolution/sd-webui-animatediff.git",
#"https://kkkkgithub.com/aigc-apps/sd-webui-EasyPhoto.git",
"https://kkgithub.com/Iyashinouta/sd-model-downloader.git",
"https://kkgithub.com/fkunn1326/openpose-editor.git",
"https://kkgithub.com/zero01101/openOutpaint-webUI-extension.git",
"https://kkgithub.com/LonicaMewinsky/gif2gif.git",
#"https://kkgithub.com/modelscope/facechain.git",
"https://openi.pcl.ac.cn/2575044704/sd-webui-agent-scheduler",
"https://openi.pcl.ac.cn/2575044704/sd-webui-depth-lib",
"https://openi.pcl.ac.cn/Echoflare/letest"
]
sd_models = [
#"kohakuXLDelta_rev1.safetensors@https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/478208/kohakuXlDeltaRev1.OzRM.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22kohakuXLDelta_rev1.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20240323/us-east-1/s3/aws4_request&X-Amz-Date=20240323T165433Z&X-Amz-SignedHeaders=host&X-Amz-Signature=7a14ba6161f355f0dd93229089f503fbbe94fbe994d42ba1e7efc1566ba584a6",
]
lora_models = [
"https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/ba.safetensors",
"https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/racaco2.safetensors",
]
vae_models = [
"https://hf-mirror.com/datasets/VASVASVAS/vae/resolve/main/pastel-waifu-diffusion.vae.pt"
]
ControlNet = True
controlnet_models = [
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_canny_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_lineart_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_softedge_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors',
'https://hf-mirror.com/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11u_sd15_tile_fp16.safetensors',
'https://hf-mirror.com/DionTimmer/controlnet_qrcode-control_v1p_sd15/resolve/main/control_v1p_sd15_qrcode.safetensors',
]
embedding_models = [
"unaestheticXL_Alb2.safetensors@https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/61235/unaestheticxlAlb2.7VDr.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22unaestheticXL_Alb2.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20240324/us-east-1/s3/aws4_request&X-Amz-Date=20240324T042909Z&X-Amz-SignedHeaders=host&X-Amz-Signature=9d4dd419dd2e579ca1ae88fe3315f4245edc5dbfb82d38e6aaa0cd383b8889c0",
]
hypernetwork_models = []
esrgan_models = []
custom_commands = [
f'rm -rf {install_path}/{rename_repo}/config.json',
f'rm -rf {install_path}/{rename_repo}/ui-config.json',
f'rm -rf {install_path}/{rename_repo}/modules/ui_settings.py',
f"{download_tool} https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json -d {install_path}/{rename_repo} -o config.json --allow-overwrite=true",
f"{download_tool} https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub3.json -d {install_path}/{rename_repo} -o ui-config.json --allow-overwrite=true",
f"{download_tool} https://hf-mirror.com/datasets/Mira-LeafTown/sd-webui-openxlab/raw/main/ui_settings.py -d {install_path}/{rename_repo}/modules -o ui_settings.py --allow-overwrite=true",
]
# WebUI部署
def monitor_gpu():
import os
import re
import os
import time
import threading
import wandb
import pynvml as nvidia_smi
# WandB登录
os.system('wandb login 5c00964de1bb95ec1ab24869d4c523c59e0fb8e3')
# 初始化 NVML
nvidia_smi.nvmlInit()
# 初始化WandB项目
wandb.init(project="gpu-temperature-monitor")
while True:
try:
# 获取 GPU 句柄
handle = nvidia_smi.nvmlDeviceGetHandleByIndex(0) # 0 表示第一个 GPU
# 获取 GPU 温度
gpu_temperature = nvidia_smi.nvmlDeviceGetTemperature(handle, nvidia_smi.NVML_TEMPERATURE_GPU)
# 获取 GPU 使用率
utilization = nvidia_smi.nvmlDeviceGetUtilizationRates(handle)
gpu_usage = utilization.gpu
# 使用 WandB 记录 GPU 温度和使用率
wandb.log({"GPU 温度": gpu_temperature, "GPU 使用率": gpu_usage})
except Exception as e:
print(f"Error: {e}")
time.sleep(60)
def download_extensions(extensions):
os.chdir(f'{install_path}/{rename_repo}/extensions')
for extension in extensions:
os.system(f'git clone {extension}')
def model_download(models, type_w):
for model in models:
download_files(model, type_w)
def remove_restart():
os.chdir("/home/xlab-app-center/stable-diffusion-webui/html")
os.system("rm ./footer.html && wget -O footer.html https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/footer.html")
#os.chdir("/home/xlab-app-center/stable-diffusion-webui/modules")
#os.system("rm ./ui_settings.py && wget -O ui_settings.py https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui_settings.py")
def download_files(url, source):
if '@' in url and (not url.startswith('http://') and not url.startswith('https://')):
parts = url.split('@', 1)
name = parts[0]
url = parts[1]
rename = f"-o '{name}'"
if 'huggingface.co' in url:
url = url.replace("huggingface.co", "hf-mirror.com")
else:
if ('huggingface.co' or 'hf-mirror.com' or 'huggingface.sukaka.top') in url:
url = url.replace("huggingface.co", "hf-mirror.com")
match_name = re.search(r'/([^/?]+)(?:\?download=true)?$', url).group(1)
if match_name:
rename = f"-o '{match_name}'"
else:
rename = ''
else:
rename = ''
source_dir = f'{install_path}/{rename_repo}/{source}'
os.makedirs(source_dir, exist_ok=True)
os.chdir(source_dir)
os.system(f"{download_tool} '{url}' {rename}")
def run_webui():
os.system("pip install nvidia-ml-py3 wandb")
# 创建并启动监控线程
monitor_thread = threading.Thread(target=monitor_gpu)
monitor_thread.start()
os.chdir(install_path)
if not os.path.exists(f'{install_path}/{rename_repo}'):
os.system(f"git clone https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-v1.8.0 {install_path}/{rename_repo}")
remove_restart()
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/kaggle/input/museum/131-half.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o [萌二次元]131-half.safetensors")
if not os.path.exists(f'{install_path}/{rename_repo}'):
print(f'在克隆 https://{git_url}/{webui_repo} 时出错')
run_webui()
download_extensions(extensions)
model_download(sd_models, 'models/Stable-diffusion')
model_download(lora_models, 'models/Lora')
model_download(vae_models, 'models/VAE')
if ControlNet:
model_download(controlnet_models, 'extensions/sd-webui-controlnet/models')
model_download(hypernetwork_models, 'models/hypernetworks')
model_download(embedding_models, 'embeddings')
model_download(esrgan_models, 'models/ESRGAN')
os.chdir(f"{install_path}/{rename_repo}")
package_envs = [
{"env": "STABLE_DIFFUSION_REPO", "url": os.environ.get('STABLE_DIFFUSION_REPO', "https://gitcode.net/overbill1683/stablediffusion")},
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")},
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")},
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")},
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")},
]
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/"
for i in package_envs:
os.environ[i["env"]] = i["url"]
os.chdir(install_path)
for custom_command in custom_commands:
os.system(custom_command)
os.chdir(f"{install_path}/{rename_repo}")
os.system(f"python launch.py {' '.join(webui_args)} --port {webui_port}")
# 实例保活
import time
def session_saver():
try:
import cupy as cp
except ImportError:
print("cupy模块未安装,正在安装...")
try:
import pip
pip.main(['install', 'cupy'])
import cupy as cp
except ImportError:
print("无法安装模块,请确保已正确安装pip。")
return
while True:
for _ in range(1):
matrix_a = cp.random.rand(2000, 2000)
matrix_b = cp.random.rand(2000, 2000)
result = cp.dot(matrix_a, matrix_b)
print("实例保活:", result)
del matrix_a, matrix_b, result
cp.cuda.Stream.null.synchronize()
time.sleep(600)
# 启动
import threading
import subprocess
import time
import threading
def setup_and_run_services():
packages = ["pyngrok", "jupyterlab"]
ngrok_token = "*"
http_port = "8083"
# 安装Python包
subprocess.Popen(["pip", "install"] + packages, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
# 等待包安装完成
time.sleep(20)
# 启动ngrok进程
ngrok_command = ["ngrok", "http", http_port, "--authtoken=" + ngrok_token]
ngrok_process = subprocess.Popen(ngrok_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# 启动jupyter-lab进程
jupyter_command = [
"jupyter-lab",
"--no-browser",
"--ip=0.0.0.0",
"--allow-root",
"--notebook-dir=/",
"--port=" + http_port,
"--LabApp.allow_origin=*",
"--LabApp.token=",
"--LabApp.base_url="
]
jupyter_process = subprocess.Popen(jupyter_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# 等待进程完成
ngrok_out, ngrok_err = ngrok_process.communicate()
jupyter_out, jupyter_err = jupyter_process.communicate()
# 打印输出和错误信息
print("Ngrok Output:", ngrok_out.decode())
print("Ngrok Error:", ngrok_err.decode())
print("Jupyter Output:", jupyter_out.decode())
print("Jupyter Error:", jupyter_err.decode())
def run_in_background():
"""在后台线程中运行服务设置函数"""
thread = threading.Thread(target=setup_and_run_services)
thread.start()
return thread
def run():
background_thread = run_in_background()
print("jupyterlab服务正在后台运行...")
webui = threading.Thread(target=run_webui)
#saver = threading.Thread(target=session_saver)
webui.start()
#saver.start()
time.sleep(99999999)