metadata
base_model:
- Sao10K/L3-8B-Stheno-v3.2
- Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
- bluuwhale/L3-SthenoMaidBlackroot-8B-V1
- Cas-Warehouse/Llama-3-Mopeyfied-Psychology-v2
- migtissera/Llama-3-8B-Synthia-v3.5
- tannedbum/L3-Nymeria-Maid-8B
- Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
- tannedbum/L3-Nymeria-8B
- ChaoticNeutrals/Hathor_RP-v.01-L3-8B
- cgato/L3-TheSpice-8b-v0.8.3
- Sao10K/L3-8B-Stheno-v3.1
- Nitral-AI/Hathor_Stable-v0.2-L3-8B
- aifeifei798/llama3-8B-DarkIdol-1.0
- ChaoticNeutrals/Poppy_Porpoise-1.4-L3-8B
- ResplendentAI/Nymph_8B
tags:
- merge
- mergekit
- lazymergekit
- not-for-all-audiences
- nsfw
- rp
- roleplay
- role-play
language:
- en
L3-Uncen-Merger-Omelette-RP-v0.2-8B
L3-Uncen-Merger-Omelette-RP-v0.2-8B is a merge of the following models using LazyMergekit:
- Sao10K/L3-8B-Stheno-v3.2
- Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
- bluuwhale/L3-SthenoMaidBlackroot-8B-V1
- Cas-Warehouse/Llama-3-Mopeyfied-Psychology-v2
- migtissera/Llama-3-8B-Synthia-v3.5
- tannedbum/L3-Nymeria-Maid-8B
- Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
- tannedbum/L3-Nymeria-8B
- ChaoticNeutrals/Hathor_RP-v.01-L3-8B
- cgato/L3-TheSpice-8b-v0.8.3
- Sao10K/L3-8B-Stheno-v3.1
- Nitral-AI/Hathor_Stable-v0.2-L3-8B
- aifeifei798/llama3-8B-DarkIdol-1.0
- ChaoticNeutrals/Poppy_Porpoise-1.4-L3-8B
- ResplendentAI/Nymph_8B
Secret Sauce
Scrambled-Egg-1
models:
- model: Sao10K/L3-8B-Stheno-v3.2
- model: Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
parameters:
density: 0.45
weight: 0.33
- model: bluuwhale/L3-SthenoMaidBlackroot-8B-V1
parameters:
density: 0.75
weight: 0.33
merge_method: dare_ties
base_model: Sao10K/L3-8B-Stheno-v3.2
parameters:
int8_mask: true
dtype: bfloat16
Scrambled-Egg-2
models:
- model: Cas-Warehouse/Llama-3-Mopeyfied-Psychology-v2
- model: migtissera/Llama-3-8B-Synthia-v3.5
parameters:
density: 0.35
weight: 0.25
- model: tannedbum/L3-Nymeria-Maid-8B
parameters:
density: 0.65
weight: 0.25
merge_method: dare_ties
base_model: Cas-Warehouse/Llama-3-Mopeyfied-Psychology-v2
parameters:
int8_mask: true
dtype: bfloat16
Scrambled-Egg-3
models:
- model: Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
- model: tannedbum/L3-Nymeria-8B
parameters:
density: 0.5
weight: 0.35
- model: ChaoticNeutrals/Hathor_RP-v.01-L3-8B
parameters:
density: 0.4
weight: 0.2
merge_method: dare_ties
base_model: Casual-Autopsy/L3-Umbral-Mind-RP-v0.3-8B
parameters:
int8_mask: true
dtype: bfloat16
Omelette-1
models:
- model: Casual-Autopsy/Scrambled-Egg-1
- model: Casual-Autopsy/Scrambled-Egg-3
merge_method: slerp
base_model: Casual-Autopsy/Scrambled-Egg-1
parameters:
t:
- value: [0.1, 0.15, 0.2, 0.4, 0.6, 0.4, 0.2, 0.15, 0.1]
embed_slerp: true
dtype: bfloat16
Omelette-2
models:
- model: Casual-Autopsy/Omelette-1
- model: Casual-Autopsy/Scrambled-Egg-2
merge_method: slerp
base_model: Casual-Autopsy/Omelette-1
parameters:
t:
- value: [0.7, 0.5, 0.3, 0.25, 0.2, 0.25, 0.3, 0.5, 0.7]
embed_slerp: true
dtype: bfloat16
L3-Uncen-Merger-Omelette-RP-v0.2-8B
models:
- model: Casual-Autopsy/Omelette-2
- model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
weight: 0.01
- model: Sao10K/L3-8B-Stheno-v3.1
parameters:
weight: 0.01
- model: Nitral-AI/Hathor_Stable-v0.2-L3-8B
parameters:
weight: 0.01
- model: aifeifei798/llama3-8B-DarkIdol-1.0
parameters:
weight: 0.02
- model: ChaoticNeutrals/Poppy_Porpoise-1.4-L3-8B
parameters:
weight: 0.025
- model: ResplendentAI/Nymph_8B
parameters:
weight: 0.025
merge_method: task_arithmetic
base_model: Casual-Autopsy/Omelette-2
dtype: bfloat16