mixtral-ja-base-8x7b-v0.1 / mergekit_moe_config.yml
Naive
Upload folder using huggingface_hub
a1d88b0
raw
history blame contribute delete
890 Bytes
base_model: stabilityai/japanese-stablelm-base-gamma-7b
gate_mode: random
experts:
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""