Upload folder using huggingface_hub
Browse files
h2o-danube2-1.8b-chat-q4f16_1-android.tar
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69301c19ec4db6a0a49e45da0e006c7c5dc6625d04606813b29fce09263467a6
|
3 |
+
size 359961
|
mlc-chat-config.json
CHANGED
@@ -8,20 +8,21 @@
|
|
8 |
"num_hidden_layers": 24,
|
9 |
"rms_norm_eps": 1e-05,
|
10 |
"vocab_size": 32000,
|
11 |
-
"position_embedding_base": 10000,
|
12 |
"num_key_value_heads": 8,
|
13 |
"head_dim": 80,
|
14 |
-
"
|
15 |
-
"
|
16 |
-
"
|
|
|
17 |
"tensor_parallel_shards": 1,
|
18 |
"max_batch_size": 80
|
19 |
},
|
20 |
"vocab_size": 32000,
|
21 |
-
"context_window_size":
|
22 |
-
"sliding_window_size":
|
23 |
-
"prefill_chunk_size":
|
24 |
-
"attention_sink_size":
|
25 |
"tensor_parallel_shards": 1,
|
26 |
"mean_gen_len": 128,
|
27 |
"max_gen_len": 512,
|
@@ -32,15 +33,14 @@
|
|
32 |
"repetition_penalty": 1.1,
|
33 |
"top_p": 0.95,
|
34 |
"conv_template": {
|
35 |
-
"name": "
|
36 |
-
"system_template": "",
|
37 |
"system_message": "",
|
38 |
"system_prefix_token_ids": [],
|
39 |
-
"add_role_after_system_message":
|
40 |
"roles": {
|
41 |
"user": "<|prompt|>",
|
42 |
-
"assistant": "<|
|
43 |
-
"tool": "<|prompt|>"
|
44 |
},
|
45 |
"role_templates": {
|
46 |
"user": "{user_message}",
|
@@ -49,13 +49,12 @@
|
|
49 |
},
|
50 |
"messages": [],
|
51 |
"seps": [
|
52 |
-
"
|
53 |
-
|
54 |
-
"role_content_sep": " ",
|
55 |
-
"role_empty_sep": " ",
|
56 |
-
"stop_str": [
|
57 |
-
"[INST]"
|
58 |
],
|
|
|
|
|
|
|
59 |
"stop_token_ids": [
|
60 |
2
|
61 |
],
|
@@ -71,4 +70,4 @@
|
|
71 |
"tokenizer_config.json"
|
72 |
],
|
73 |
"version": "0.1.0"
|
74 |
-
}
|
|
|
8 |
"num_hidden_layers": 24,
|
9 |
"rms_norm_eps": 1e-05,
|
10 |
"vocab_size": 32000,
|
11 |
+
"position_embedding_base": 10000.0,
|
12 |
"num_key_value_heads": 8,
|
13 |
"head_dim": 80,
|
14 |
+
"context_window_size": 8192,
|
15 |
+
"sliding_window_size": 8192,
|
16 |
+
"prefill_chunk_size": -1,
|
17 |
+
"attention_sink_size": 0,
|
18 |
"tensor_parallel_shards": 1,
|
19 |
"max_batch_size": 80
|
20 |
},
|
21 |
"vocab_size": 32000,
|
22 |
+
"context_window_size": 8192,
|
23 |
+
"sliding_window_size": 8192,
|
24 |
+
"prefill_chunk_size": 8192,
|
25 |
+
"attention_sink_size": 0,
|
26 |
"tensor_parallel_shards": 1,
|
27 |
"mean_gen_len": 128,
|
28 |
"max_gen_len": 512,
|
|
|
33 |
"repetition_penalty": 1.1,
|
34 |
"top_p": 0.95,
|
35 |
"conv_template": {
|
36 |
+
"name": "LM",
|
37 |
+
"system_template": "{system_message}",
|
38 |
"system_message": "",
|
39 |
"system_prefix_token_ids": [],
|
40 |
+
"add_role_after_system_message": true,
|
41 |
"roles": {
|
42 |
"user": "<|prompt|>",
|
43 |
+
"assistant": "<|answer|>"
|
|
|
44 |
},
|
45 |
"role_templates": {
|
46 |
"user": "{user_message}",
|
|
|
49 |
},
|
50 |
"messages": [],
|
51 |
"seps": [
|
52 |
+
"</s>",
|
53 |
+
"</s>"
|
|
|
|
|
|
|
|
|
54 |
],
|
55 |
+
"role_content_sep": "",
|
56 |
+
"role_empty_sep": "",
|
57 |
+
"stop_str": ["</s>"],
|
58 |
"stop_token_ids": [
|
59 |
2
|
60 |
],
|
|
|
70 |
"tokenizer_config.json"
|
71 |
],
|
72 |
"version": "0.1.0"
|
73 |
+
}
|