base_model: moreh/MoMo-72B-lora-1.8.7-DPO | |
dtype: float32 | |
merge_method: slerp | |
parameters: | |
t: | |
- filter: self_attn | |
value: | |
- 0 | |
- 0.5 | |
- 0.3 | |
- 0.7 | |
- 1 | |
- filter: mlp | |
value: | |
- 1 | |
- 0.5 | |
- 0.7 | |
- 0.3 | |
- 0 | |
- value: 0.5 | |
slices: | |
- sources: | |
- layer_range: | |
- 0 | |
- 80 | |
model: moreh/MoMo-72B-lora-1.8.7-DPO | |
- layer_range: | |
- 0 | |
- 80 | |
model: ibivibiv/alpaca-dragon-72b-v1 | |