Upload folder using huggingface_hub
Browse files
pytorch_model-00001-of-00004.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4995511092
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:90d2f6e107e9589ad7c6dfeaad28da51f452e7eac6045526da433cbd19cc39ea
|
3 |
size 4995511092
|
pytorch_model-00002-of-00004.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4982972660
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:37ba5c3782286b8f62dc1ae580a3f7a84c502a6c73a47cde7184ea4e25f0bae4
|
3 |
size 4982972660
|
pytorch_model-00003-of-00004.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4982972724
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6cada0a8a5646d0e6ca4f45ea75b1ff87aadca892c0d5b4cd93bb5e0f49bd8fd
|
3 |
size 4982972724
|
pytorch_model-00004-of-00004.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2113996412
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:79e790b6d7d627aad92e57d65ff126ff680204e7668ba29d4e99854861e31548
|
3 |
size 2113996412
|
special_tokens_map.json
CHANGED
@@ -1,8 +1,4 @@
|
|
1 |
{
|
2 |
-
"additional_special_tokens": [
|
3 |
-
"<|im_start|>",
|
4 |
-
"<|im_end|>"
|
5 |
-
],
|
6 |
"bos_token": {
|
7 |
"content": "<bos>",
|
8 |
"lstrip": false,
|
|
|
1 |
{
|
|
|
|
|
|
|
|
|
2 |
"bos_token": {
|
3 |
"content": "<bos>",
|
4 |
"lstrip": false,
|
tokenizer_config.json
CHANGED
@@ -51,10 +51,7 @@
|
|
51 |
"special": true
|
52 |
}
|
53 |
},
|
54 |
-
"additional_special_tokens": [
|
55 |
-
"<|im_start|>",
|
56 |
-
"<|im_end|>"
|
57 |
-
],
|
58 |
"bos_token": "<bos>",
|
59 |
"chat_template": "{% if messages[0]['role'] == 'user' or messages[0]['role'] == 'system' %}{{ bos_token }}{% endif %}{% for message in messages %}{{ '<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n' }}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% elif messages[-1]['role'] == 'assistant' %}{{ eos_token }}{% endif %}",
|
60 |
"clean_up_tokenization_spaces": false,
|
|
|
51 |
"special": true
|
52 |
}
|
53 |
},
|
54 |
+
"additional_special_tokens": [],
|
|
|
|
|
|
|
55 |
"bos_token": "<bos>",
|
56 |
"chat_template": "{% if messages[0]['role'] == 'user' or messages[0]['role'] == 'system' %}{{ bos_token }}{% endif %}{% for message in messages %}{{ '<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n' }}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% elif messages[-1]['role'] == 'assistant' %}{{ eos_token }}{% endif %}",
|
57 |
"clean_up_tokenization_spaces": false,
|