models: - model: eren23/ogno-monarch-jaskier-merge-7b-OH-PREF-DPO-v2 parameters: weight: 0.35 - model: yam-peleg/Experiment26-7B parameters: weight: 0.65 base_model: yam-peleg/Experiment26-7B merge_method: task_arithmetic dtype: bfloat16