models: - model: mlabonne/ChimeraLlama-3-8B # No parameters necessary for base model - model: mlabonne/ChimeraLlama-3-8B parameters: density: 0.6 weight: 0.2 - model: nbeerbower/llama-3-dragonmaid-8B parameters: density: 0.55 weight: 0.4 - model: cognitivecomputations/dolphin-2.9-llama3-8b parameters: density: 0.55 weight: 0.2 - model: WesPro/F2PhenotypeDPO parameters: density: 0.55 weight: 0.2 merge_method: dare_ties base_model: mlabonne/ChimeraLlama-3-8B parameters: int8_mask: true dtype: float16