ACCC1380's picture
Upload face004/logs/GPU-使用率-温度检测20240529141058/wandb/run-20240529_141120-82eq2cmy/files/wandb-metadata.json with huggingface_hub
073a0a1 verified
raw
history blame
No virus
1.98 kB
{
"os": "Linux-5.15.133+-x86_64-with-glibc2.35",
"python": "3.10.12",
"heartbeatAt": "2024-05-29T14:11:21.329856",
"startedAt": "2024-05-29T14:11:20.968446",
"docker": null,
"cuda": null,
"args": [
"--config_file",
"/root/lora-scripts/frontend/lora-scripts/frontend/lora-scripts/config/autosave/20240529-141049.toml"
],
"state": "running",
"program": "/root/lora-scripts/frontend/lora-scripts/frontend/lora-scripts/./sd-scripts/train_network.py",
"codePathLocal": "sd-scripts/train_network.py",
"codePath": "sd-scripts/train_network.py",
"git": {
"remote": "https://github.com/Akegarasu/lora-scripts",
"commit": "9c1d6a61b2f72e72aeb51f8a1e133dc975693035"
},
"email": null,
"root": "/root/lora-scripts/frontend/lora-scripts/frontend/lora-scripts",
"host": "4ac664d71f2b",
"username": "root",
"executable": "/kaggle/opt/conda/envs/venv/venv/bin/python3",
"cpu_count": 2,
"cpu_count_logical": 4,
"cpu_freq": {
"current": 2000.152,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 2000.152,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.152,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.152,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.152,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 8062.387607574463,
"used": 5607.320209503174
}
},
"gpu": "Tesla T4",
"gpu_count": 2,
"gpu_devices": [
{
"name": "Tesla T4",
"memory_total": 16106127360
},
{
"name": "Tesla T4",
"memory_total": 16106127360
}
],
"memory": {
"total": 31.357555389404297
}
}