licyk commited on
Commit
7c515f1
1 Parent(s): e76f682
Files changed (1) hide show
  1. 1714571107277.toml +45 -0
1714571107277.toml ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_train_type = "sd-lora"
2
+ pretrained_model_name_or_path = "E:/Softwares/lora-scripts/sd-models/animefull-final-pruned.safetensors"
3
+ v2 = false
4
+ train_data_dir = "E:/Softwares/lora-scripts/train/Matchach"
5
+ prior_loss_weight = 1
6
+ resolution = "512,512"
7
+ enable_bucket = true
8
+ min_bucket_reso = 256
9
+ max_bucket_reso = 1024
10
+ bucket_reso_steps = 64
11
+ output_name = "matchach_1"
12
+ output_dir = "./output"
13
+ save_model_as = "safetensors"
14
+ save_precision = "fp16"
15
+ save_every_n_epochs = 2
16
+ max_train_epochs = 10
17
+ train_batch_size = 1
18
+ gradient_checkpointing = false
19
+ network_train_unet_only = false
20
+ network_train_text_encoder_only = false
21
+ learning_rate = 0.0001
22
+ unet_lr = 0.0001
23
+ text_encoder_lr = 0.00001
24
+ lr_scheduler = "cosine_with_restarts"
25
+ lr_warmup_steps = 0
26
+ lr_scheduler_num_cycles = 1
27
+ optimizer_type = "AdamW8bit"
28
+ network_module = "lycoris.kohya"
29
+ network_dim = 64
30
+ network_alpha = 32
31
+ log_with = "tensorboard"
32
+ logging_dir = "./logs"
33
+ caption_extension = ".txt"
34
+ shuffle_caption = true
35
+ keep_tokens = 0
36
+ max_token_length = 255
37
+ seed = 1337
38
+ clip_skip = 2
39
+ mixed_precision = "fp16"
40
+ xformers = true
41
+ lowram = false
42
+ cache_latents = true
43
+ cache_latents_to_disk = true
44
+ persistent_data_loader_workers = true
45
+ network_args = [ "conv_dim=4", "conv_alpha=1", "dropout=0", "algo=locon" ]