jtatman commited on
Commit
17295d6
1 Parent(s): 37d2374

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -19,40 +19,23 @@ SciPhi-Mistral-7B-32k-sliced is a merge of the following models using [LazyMerge
19
  slices:
20
  - sources:
21
  - model: SciPhi/SciPhi-Mistral-7B-32k
22
- layer_range: [0, 3]
23
  - sources:
24
- - model: SciPhi/SciPhi-Mistral-7B-32k
25
- layer_range: [5, 7]
26
- - sources:
27
- - model: SciPhi/SciPhi-Mistral-7B-32k
28
- layer_range: [10, 10]
29
  - sources:
30
- - model: SciPhi/SciPhi-Mistral-7B-32k
31
- layer_range: [17, 23]
32
- - sources:
33
- - model: SciPhi/SciPhi-Mistral-7B-32k
34
- layer_range: [31, 32]
35
- - sources:
36
- - model: Locutusque/TinyMistral-248M-v2.5
37
- layer_range: [0, 1]
38
- - sources:
39
- - model: Locutusque/TinyMistral-248M-v2.5
40
- layer_range: [11, 12]
41
- - sources:
42
- - model: Locutusque/TinyMistral-248M-v2.5-Instruct
43
- layer_range: [0, 1]
44
- - sources:
45
- - model: Locutusque/TinyMistral-248M-v2.5-Instruct
46
- layer_range: [11, 12]
47
  merge_method: slerp
48
- base_model: Locutusque/TinyMistral-248M-v2.5-Instruct
49
  parameters:
50
  t:
51
  - filter: self_attn
52
  value: [0, 0.5, 0.3, 0.7, 1]
53
  - filter: mlp
54
  value: [1, 0.5, 0.7, 0.3, 0]
55
- - value: 0.3
56
  dtype: float16
57
  tokenizer_source: base
58
 
 
19
  slices:
20
  - sources:
21
  - model: SciPhi/SciPhi-Mistral-7B-32k
22
+ layer_range: [0, 12]
23
  - sources:
24
+ - model: NousResearch/Nous-Hermes-2-Mistral-7B-DPO
25
+ layer_range: [0, 12]
 
 
 
26
  - sources:
27
+ - model: teknium/OpenHermes-2.5-Mistral-7B
28
+ layer_range: [0, 12]
29
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  merge_method: slerp
31
+ base_model: teknium/OpenHermes-2.5-Mistral-7B
32
  parameters:
33
  t:
34
  - filter: self_attn
35
  value: [0, 0.5, 0.3, 0.7, 1]
36
  - filter: mlp
37
  value: [1, 0.5, 0.7, 0.3, 0]
38
+ - value: 0.5
39
  dtype: float16
40
  tokenizer_source: base
41
 
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
config.json CHANGED
@@ -1,26 +1,26 @@
1
  {
2
- "_name_or_path": "Locutusque/TinyMistral-248M-v2.5-Instruct",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
- "eos_token_id": 2,
9
  "hidden_act": "silu",
10
- "hidden_size": 1024,
11
  "initializer_range": 0.02,
12
- "intermediate_size": 4096,
13
  "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
- "num_hidden_layers": 16,
17
  "num_key_value_heads": 8,
18
- "rms_norm_eps": 1e-06,
19
  "rope_theta": 10000.0,
20
- "sliding_window": 32,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "float16",
23
  "transformers_version": "4.37.2",
24
- "use_cache": true,
25
- "vocab_size": 32005
26
  }
 
1
  {
2
+ "_name_or_path": "teknium/OpenHermes-2.5-Mistral-7B",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
  "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
  "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
+ "num_hidden_layers": 36,
17
  "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
  "rope_theta": 10000.0,
20
+ "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "float16",
23
  "transformers_version": "4.37.2",
24
+ "use_cache": false,
25
+ "vocab_size": 32002
26
  }
mergekit_config.yml CHANGED
@@ -2,40 +2,23 @@
2
  slices:
3
  - sources:
4
  - model: SciPhi/SciPhi-Mistral-7B-32k
5
- layer_range: [0, 3]
6
  - sources:
7
- - model: SciPhi/SciPhi-Mistral-7B-32k
8
- layer_range: [5, 7]
9
- - sources:
10
- - model: SciPhi/SciPhi-Mistral-7B-32k
11
- layer_range: [10, 10]
12
  - sources:
13
- - model: SciPhi/SciPhi-Mistral-7B-32k
14
- layer_range: [17, 23]
15
- - sources:
16
- - model: SciPhi/SciPhi-Mistral-7B-32k
17
- layer_range: [31, 32]
18
- - sources:
19
- - model: Locutusque/TinyMistral-248M-v2.5
20
- layer_range: [0, 1]
21
- - sources:
22
- - model: Locutusque/TinyMistral-248M-v2.5
23
- layer_range: [11, 12]
24
- - sources:
25
- - model: Locutusque/TinyMistral-248M-v2.5-Instruct
26
- layer_range: [0, 1]
27
- - sources:
28
- - model: Locutusque/TinyMistral-248M-v2.5-Instruct
29
- layer_range: [11, 12]
30
  merge_method: slerp
31
- base_model: Locutusque/TinyMistral-248M-v2.5-Instruct
32
  parameters:
33
  t:
34
  - filter: self_attn
35
  value: [0, 0.5, 0.3, 0.7, 1]
36
  - filter: mlp
37
  value: [1, 0.5, 0.7, 0.3, 0]
38
- - value: 0.3
39
  dtype: float16
40
  tokenizer_source: base
41
 
 
2
  slices:
3
  - sources:
4
  - model: SciPhi/SciPhi-Mistral-7B-32k
5
+ layer_range: [0, 12]
6
  - sources:
7
+ - model: NousResearch/Nous-Hermes-2-Mistral-7B-DPO
8
+ layer_range: [0, 12]
 
 
 
9
  - sources:
10
+ - model: teknium/OpenHermes-2.5-Mistral-7B
11
+ layer_range: [0, 12]
12
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  merge_method: slerp
14
+ base_model: teknium/OpenHermes-2.5-Mistral-7B
15
  parameters:
16
  t:
17
  - filter: self_attn
18
  value: [0, 0.5, 0.3, 0.7, 1]
19
  - filter: mlp
20
  value: [1, 0.5, 0.7, 0.3, 0]
21
+ - value: 0.5
22
  dtype: float16
23
  tokenizer_source: base
24
 
model-00001-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fc3c25bccc6c95ddbcfb48264d5c7671a144fe37b78b0c227d41040d57e941b
3
+ size 1979781432
model-00002-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7dd976fe122617290538ce293a88aa61fce5792f410941410a3e8afbffd6051
3
+ size 1946243944
model-00003-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79a54ad23c20076982c3cbd0233cc70695f5366ab1e6e0ada479055f8a2b5a65
3
+ size 1979773128
model-00004-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdcaeec68e32f0dc5ce5d0fb607dd3fdff898a96ed35218fc4cad059024eeff0
3
+ size 1946235640
model-00005-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2be50d616761620e7b88252f8cc87fe5b9b7b89937bae7199096bbe5e756a85
3
+ size 1979789736
model-00006-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb772843404bb698829a1370b83c4563dd079beca3eb7f46a320d1dab7144deb
3
+ size 1946219000
model-00007-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf922c659a6ef0ccb5a65d3dadf8617c9da49df1ec4db0fd2f561037027b05b0
3
+ size 1946243896
model-00008-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d72c3500d50fa1eeb173eba39b223bc2310ac8c46530bdf4a19d3aa5d4e0ba8
3
+ size 1950445224
model-00009-of-00009.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bab050eeae0205c6d5b881dd52a216deecb87a9f97fa32705a4434ec8e970e24
3
+ size 553682288
model.safetensors.index.json CHANGED
@@ -1 +1 @@
1
- {"metadata": {"mergekit_version": "0.0.4.1"}, "weight_map": {"lm_head.weight": "model-00001-of-00003.safetensors", "model.norm.weight": "model-00001-of-00003.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.15.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.14.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.13.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.12.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00003.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00003.safetensors", "model.layers.0.input_layernorm.weight": "model-00002-of-00003.safetensors", "model.embed_tokens.weight": "model-00002-of-00003.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00003.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00003.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00003.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00003.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00003.safetensors", "model.layers.9.input_layernorm.weight": "model-00002-of-00003.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00003.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00003.safetensors", "model.layers.8.input_layernorm.weight": "model-00002-of-00003.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00002-of-00003.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00003.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00003-of-00003.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00003-of-00003.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors", "model.layers.7.input_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors", "model.layers.6.input_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors", "model.layers.5.input_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors", "model.layers.11.input_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00003-of-00003.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00003.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00003-of-00003.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00003.safetensors", "model.layers.10.input_layernorm.weight": "model-00003-of-00003.safetensors"}}
 
1
+ {"metadata": {"mergekit_version": "0.0.4.1"}, "weight_map": {"model.layers.35.mlp.down_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00001-of-00009.safetensors", "model.layers.35.input_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00001-of-00009.safetensors", "model.layers.34.input_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00001-of-00009.safetensors", "model.layers.33.input_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00001-of-00009.safetensors", "model.layers.32.input_layernorm.weight": "model-00001-of-00009.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00001-of-00009.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00001-of-00009.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00002-of-00009.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00009.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00009.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00009.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00009.safetensors", "model.layers.31.input_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00009.safetensors", "model.layers.30.input_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00009.safetensors", "model.layers.29.input_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00009.safetensors", "model.layers.28.input_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00009.safetensors", "model.layers.27.input_layernorm.weight": "model-00002-of-00009.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00009.safetensors", "model.layers.26.input_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00009.safetensors", "model.layers.25.input_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00009.safetensors", "model.layers.24.input_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00009.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00003-of-00009.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00009.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00009.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00009.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00009.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00003-of-00009.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00009.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00003-of-00009.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00009.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00004-of-00009.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00004-of-00009.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00004-of-00009.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00004-of-00009.safetensors", "model.layers.21.input_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00004-of-00009.safetensors", "model.layers.20.input_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00004-of-00009.safetensors", "model.layers.19.input_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00009.safetensors", "model.layers.18.input_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00009.safetensors", "model.layers.17.input_layernorm.weight": "model-00004-of-00009.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00004-of-00009.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00009.safetensors", "model.layers.16.input_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00005-of-00009.safetensors", "model.layers.15.input_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00005-of-00009.safetensors", "model.layers.14.input_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00005-of-00009.safetensors", "model.layers.13.input_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00005-of-00009.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00005-of-00009.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00005-of-00009.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00009.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00006-of-00009.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00006-of-00009.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00006-of-00009.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00006-of-00009.safetensors", "model.layers.12.input_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00006-of-00009.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00006-of-00009.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00006-of-00009.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00006-of-00009.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00006-of-00009.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00006-of-00009.safetensors", "model.layers.9.input_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00006-of-00009.safetensors", "model.layers.8.input_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00006-of-00009.safetensors", "model.layers.7.input_layernorm.weight": "model-00006-of-00009.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00006-of-00009.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00006-of-00009.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00007-of-00009.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00007-of-00009.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00007-of-00009.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00007-of-00009.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00007-of-00009.safetensors", "model.layers.6.input_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00007-of-00009.safetensors", "model.layers.5.input_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00007-of-00009.safetensors", "model.layers.4.input_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00007-of-00009.safetensors", "model.layers.3.input_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00007-of-00009.safetensors", "model.layers.2.input_layernorm.weight": "model-00007-of-00009.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00008-of-00009.safetensors", "model.layers.1.input_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00008-of-00009.safetensors", "model.layers.0.input_layernorm.weight": "model-00008-of-00009.safetensors", "model.embed_tokens.weight": "model-00008-of-00009.safetensors", "lm_head.weight": "model-00008-of-00009.safetensors", "model.norm.weight": "model-00008-of-00009.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00008-of-00009.safetensors", "model.layers.23.input_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00008-of-00009.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.22.input_layernorm.weight": "model-00008-of-00009.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00009-of-00009.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00009-of-00009.safetensors", "model.layers.11.input_layernorm.weight": "model-00009-of-00009.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00009-of-00009.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00009-of-00009.safetensors", "model.layers.10.input_layernorm.weight": "model-00009-of-00009.safetensors"}}
special_tokens_map.json CHANGED
@@ -1,20 +1,13 @@
1
  {
2
  "bos_token": {
3
- "content": "<|bos|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|endoftext|>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
1
  {
2
  "bos_token": {
3
+ "content": "<s>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|im_end|>",
 
 
 
 
 
 
 
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Left",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 32001,
16
- "pad_type_id": 0,
17
- "pad_token": "<|endoftext|>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
@@ -46,7 +32,7 @@
46
  },
47
  {
48
  "id": 32000,
49
- "content": "<|bos|>",
50
  "single_word": false,
51
  "lstrip": false,
52
  "rstrip": false,
@@ -55,34 +41,7 @@
55
  },
56
  {
57
  "id": 32001,
58
- "content": "<|endoftext|>",
59
- "single_word": false,
60
- "lstrip": false,
61
- "rstrip": false,
62
- "normalized": false,
63
- "special": true
64
- },
65
- {
66
- "id": 32002,
67
- "content": "[PAD]",
68
- "single_word": false,
69
- "lstrip": false,
70
- "rstrip": false,
71
- "normalized": false,
72
- "special": true
73
- },
74
- {
75
- "id": 32003,
76
- "content": "<|ASSISTANT|>",
77
- "single_word": false,
78
- "lstrip": false,
79
- "rstrip": false,
80
- "normalized": false,
81
- "special": true
82
- },
83
- {
84
- "id": 32004,
85
- "content": "<|USER|>",
86
  "single_word": false,
87
  "lstrip": false,
88
  "rstrip": false,
@@ -112,7 +71,7 @@
112
  "single": [
113
  {
114
  "SpecialToken": {
115
- "id": "<|bos|>",
116
  "type_id": 0
117
  }
118
  },
@@ -126,7 +85,7 @@
126
  "pair": [
127
  {
128
  "SpecialToken": {
129
- "id": "<|bos|>",
130
  "type_id": 0
131
  }
132
  },
@@ -138,7 +97,7 @@
138
  },
139
  {
140
  "SpecialToken": {
141
- "id": "<|bos|>",
142
  "type_id": 1
143
  }
144
  },
@@ -150,13 +109,13 @@
150
  }
151
  ],
152
  "special_tokens": {
153
- "<|bos|>": {
154
- "id": "<|bos|>",
155
  "ids": [
156
- 32000
157
  ],
158
  "tokens": [
159
- "<|bos|>"
160
  ]
161
  }
162
  }
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
32
  },
33
  {
34
  "id": 32000,
35
+ "content": "<|im_end|>",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
 
41
  },
42
  {
43
  "id": 32001,
44
+ "content": "<|im_start|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  "single_word": false,
46
  "lstrip": false,
47
  "rstrip": false,
 
71
  "single": [
72
  {
73
  "SpecialToken": {
74
+ "id": "<s>",
75
  "type_id": 0
76
  }
77
  },
 
85
  "pair": [
86
  {
87
  "SpecialToken": {
88
+ "id": "<s>",
89
  "type_id": 0
90
  }
91
  },
 
97
  },
98
  {
99
  "SpecialToken": {
100
+ "id": "<s>",
101
  "type_id": 1
102
  }
103
  },
 
109
  }
110
  ],
111
  "special_tokens": {
112
+ "<s>": {
113
+ "id": "<s>",
114
  "ids": [
115
+ 1
116
  ],
117
  "tokens": [
118
+ "<s>"
119
  ]
120
  }
121
  }
tokenizer_config.json CHANGED
@@ -27,7 +27,7 @@
27
  "special": true
28
  },
29
  "32000": {
30
- "content": "<|bos|>",
31
  "lstrip": false,
32
  "normalized": false,
33
  "rstrip": false,
@@ -35,31 +35,7 @@
35
  "special": true
36
  },
37
  "32001": {
38
- "content": "<|endoftext|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "32002": {
46
- "content": "[PAD]",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "32003": {
54
- "content": "<|ASSISTANT|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "32004": {
62
- "content": "<|USER|>",
63
  "lstrip": false,
64
  "normalized": false,
65
  "rstrip": false,
@@ -68,22 +44,18 @@
68
  }
69
  },
70
  "additional_special_tokens": [],
71
- "bos_token": "<|bos|>",
 
72
  "clean_up_tokenization_spaces": false,
73
- "eos_token": "<|endoftext|>",
74
  "legacy": true,
75
- "max_length": 1536,
76
  "model_max_length": 1000000000000000019884624838656,
77
- "pad_to_multiple_of": null,
78
- "pad_token": "<|endoftext|>",
79
- "pad_token_type_id": 0,
80
- "padding_side": "left",
81
  "sp_model_kwargs": {},
82
  "spaces_between_special_tokens": false,
83
- "stride": 0,
84
  "tokenizer_class": "LlamaTokenizer",
85
- "truncation_side": "right",
86
- "truncation_strategy": "longest_first",
87
  "unk_token": "<unk>",
88
- "use_default_system_prompt": true
 
89
  }
 
27
  "special": true
28
  },
29
  "32000": {
30
+ "content": "<|im_end|>",
31
  "lstrip": false,
32
  "normalized": false,
33
  "rstrip": false,
 
35
  "special": true
36
  },
37
  "32001": {
38
+ "content": "<|im_start|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
 
44
  }
45
  },
46
  "additional_special_tokens": [],
47
+ "bos_token": "<s>",
48
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
49
  "clean_up_tokenization_spaces": false,
50
+ "eos_token": "<|im_end|>",
51
  "legacy": true,
 
52
  "model_max_length": 1000000000000000019884624838656,
53
+ "pad_token": null,
 
 
 
54
  "sp_model_kwargs": {},
55
  "spaces_between_special_tokens": false,
 
56
  "tokenizer_class": "LlamaTokenizer",
57
+ "trust_remote_code": false,
 
58
  "unk_token": "<unk>",
59
+ "use_default_system_prompt": true,
60
+ "use_fast": true
61
  }