final-lora-426 / adapter_config.json
ycchen's picture
Upload folder using huggingface_hub
3ff5fc0
{
"auto_mapping": null,
"base_model_name_or_path": "ycchen/yc-test1",
"bias": "none",
"fan_in_fan_out": false,
"inference_mode": true,
"init_lora_weights": true,
"layers_pattern": null,
"layers_to_transform": null,
"lora_alpha": 16,
"lora_dropout": 0.05,
"modules_to_save": null,
"peft_type": "LORA",
"r": 32,
"revision": null,
"target_modules": [
"28.attn.c_attn",
"28.attn.c_proj",
"28.mlp.c_proj",
"28.mlp.w1",
"28.mlp.w2",
"29.attn.c_attn",
"29.attn.c_proj",
"29.mlp.c_proj",
"29.mlp.w1",
"29.mlp.w2",
"30.attn.c_attn",
"30.attn.c_proj",
"30.mlp.c_proj",
"30.mlp.w1",
"30.mlp.w2",
"31.attn.c_attn",
"31.attn.c_proj",
"31.mlp.c_proj",
"31.mlp.w1",
"31.mlp.w2",
"32.attn.c_attn",
"32.attn.c_proj",
"32.mlp.c_proj",
"32.mlp.w1",
"32.mlp.w2",
"33.attn.c_attn",
"33.attn.c_proj",
"33.mlp.c_proj",
"33.mlp.w1",
"33.mlp.w2",
"34.attn.c_attn",
"34.attn.c_proj",
"34.mlp.c_proj",
"34.mlp.w1",
"34.mlp.w2",
"35.attn.c_attn",
"35.attn.c_proj",
"35.mlp.c_proj",
"35.mlp.w1",
"35.mlp.w2",
"36.attn.c_attn",
"36.attn.c_proj",
"36.mlp.c_proj",
"36.mlp.w1",
"36.mlp.w2",
"37.attn.c_attn",
"37.attn.c_proj",
"37.mlp.c_proj",
"37.mlp.w1",
"37.mlp.w2",
"38.attn.c_attn",
"38.attn.c_proj",
"38.mlp.c_proj",
"38.mlp.w1",
"38.mlp.w2",
"39.attn.c_attn",
"39.attn.c_proj",
"39.mlp.c_proj",
"39.mlp.w1",
"39.mlp.w2"
],
"task_type": "CAUSAL_LM"
}