Locutusque commited on
Commit
a2cb81d
1 Parent(s): d46c3f7

Upload Qwen2ForCausalLM (#3)

Browse files

- Upload Qwen2ForCausalLM (14cadd4076d1d22e542b509c9a3ac6ba6e575eca)

config.json CHANGED
@@ -21,7 +21,7 @@
21
  "sliding_window": 131072,
22
  "tie_word_embeddings": true,
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.41.2",
25
  "use_cache": true,
26
  "use_sliding_window": false,
27
  "vocab_size": 151936
 
21
  "sliding_window": 131072,
22
  "tie_word_embeddings": true,
23
  "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.41.1",
25
  "use_cache": true,
26
  "use_sliding_window": false,
27
  "vocab_size": 151936
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "bos_token_id": 151643,
3
  "eos_token_id": 151643,
4
  "max_new_tokens": 2048,
5
- "transformers_version": "4.41.2"
6
  }
 
2
  "bos_token_id": 151643,
3
  "eos_token_id": 151643,
4
  "max_new_tokens": 2048,
5
+ "transformers_version": "4.41.1"
6
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6becacdaceb57418cc33dc42edfa1f31ceec3a996a45a740189032926bb67770
3
  size 1975314632
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8833d13a56ece580d6d0736f49d36429fd868e17f8024d8455102a61829d7d8b
3
  size 1975314632
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08c73213616564efe38315b5d382f2e1636f3a151adad0d486df0f050f6dd592
3
- size 1578899928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d50b51cbb3a02d938b3ecbb44428a282968dd90da25db6ac4e1c65cf879d1f16
3
+ size 1112152304
model.safetensors.index.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  "metadata": {
3
- "total_size": 3554176000
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 3087428608
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",