Solshine commited on
Commit
0fc8bd5
1 Parent(s): 886f89d

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - inceptionai/jais-family-590m
4
+ - inceptionai/jais-family-590m
5
+ tags:
6
+ - merge
7
+ - mergekit
8
+ - lazymergekit
9
+ - inceptionai/jais-family-590m
10
+ ---
11
+
12
+ # Jais-590m-merged
13
+
14
+ Jais-590m-merged is a merge of the following models using [LazyMergekit](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb?usp=sharing):
15
+ * [inceptionai/jais-family-590m](https://huggingface.co/inceptionai/jais-family-590m)
16
+ * [inceptionai/jais-family-590m](https://huggingface.co/inceptionai/jais-family-590m)
17
+
18
+ ## 🧩 Configuration
19
+
20
+ ```yaml
21
+ slices:
22
+ - sources:
23
+ - model: inceptionai/jais-family-590m
24
+ layer_range: [0, 18]
25
+ - model: inceptionai/jais-family-590m
26
+ layer_range: [0, 18]
27
+ merge_method: slerp
28
+ base_model: inceptionai/jais-family-590m
29
+ parameters:
30
+ t:
31
+ - filter: self_attn
32
+ value: [0, 0.5, 0.3, 0.7, 1]
33
+ - filter: mlp
34
+ value: [1, 0.5, 0.7, 0.3, 0]
35
+ - value: 0.5
36
+ dtype: bfloat16
37
+ ```
38
+
39
+ ## 💻 Usage
40
+
41
+ ```python
42
+ !pip install -qU transformers accelerate
43
+
44
+ from transformers import AutoTokenizer
45
+ import transformers
46
+ import torch
47
+
48
+ model = "Solshine/Jais-590m-merged"
49
+ messages = [{"role": "user", "content": "What is a large language model?"}]
50
+
51
+ tokenizer = AutoTokenizer.from_pretrained(model)
52
+ prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
53
+ pipeline = transformers.pipeline(
54
+ "text-generation",
55
+ model=model,
56
+ torch_dtype=torch.float16,
57
+ device_map="auto",
58
+ )
59
+
60
+ outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
61
+ print(outputs[0]["generated_text"])
62
+ ```
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "inceptionai/jais-family-590m",
3
+ "activation_function": "swiglu",
4
+ "alibi_scaling": null,
5
+ "architectures": [
6
+ "JAISLMHeadModel"
7
+ ],
8
+ "attn_pdrop": 0.0,
9
+ "auto_map": {
10
+ "AutoConfig": "inceptionai/jais-family-590m--configuration_jais.JAISConfig",
11
+ "AutoModel": "inceptionai/jais-family-590m--modeling_jais.JAISModel",
12
+ "AutoModelForCausalLM": "inceptionai/jais-family-590m--modeling_jais.JAISLMHeadModel",
13
+ "AutoModelForQuestionAnswering": "inceptionai/jais-family-590m--modeling_jais.JAISForQuestionAnswering",
14
+ "AutoModelForSequenceClassification": "inceptionai/jais-family-590m--modeling_jais.JAISForSequenceClassification",
15
+ "AutoModelForTokenClassification": "inceptionai/jais-family-590m--modeling_jais.JAISForTokenClassification"
16
+ },
17
+ "bos_token_id": 0,
18
+ "embd_pdrop": 0.0,
19
+ "eos_token_id": 0,
20
+ "initializer_range": 0.02,
21
+ "layer_norm_epsilon": 1e-05,
22
+ "model_type": "jais",
23
+ "mup_embeddings_scale": 9.1705785388303,
24
+ "mup_output_alpha": 1.09518349815769,
25
+ "mup_scale_qk_dot_by_d": true,
26
+ "mup_width_scale": 0.16666666666666666,
27
+ "n_embd": 1536,
28
+ "n_head": 12,
29
+ "n_inner": 4096,
30
+ "n_layer": 18,
31
+ "n_positions": 2048,
32
+ "pad_token_id": 0,
33
+ "position_embedding_type": "alibi",
34
+ "reorder_and_upcast_attn": false,
35
+ "resid_pdrop": 0.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "torch_dtype": "bfloat16",
39
+ "transformers_version": "4.44.2",
40
+ "use_cache": true,
41
+ "vocab_size": 84992
42
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ slices:
3
+ - sources:
4
+ - model: inceptionai/jais-family-590m
5
+ layer_range: [0, 18]
6
+ - model: inceptionai/jais-family-590m
7
+ layer_range: [0, 18]
8
+ merge_method: slerp
9
+ base_model: inceptionai/jais-family-590m
10
+ parameters:
11
+ t:
12
+ - filter: self_attn
13
+ value: [0, 0.5, 0.3, 0.7, 1]
14
+ - filter: mlp
15
+ value: [1, 0.5, 0.7, 0.3, 0]
16
+ - value: 0.5
17
+ dtype: bfloat16
model-00001-of-00001.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ceda4458d87153c16524199374065ee45878f191529f95c9e1bd5a65279dc10
3
+ size 1281136720
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.4.4", "total_size": 1281110040}, "weight_map": {"transformer.h.0.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.0.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.0.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.0.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.0.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.0.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.0.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.0.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.0.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.1.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.1.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.1.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.1.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.1.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.1.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.1.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.1.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.1.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.10.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.10.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.10.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.10.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.10.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.10.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.10.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.10.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.10.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.11.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.11.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.11.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.11.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.11.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.11.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.11.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.11.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.11.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.12.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.12.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.12.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.12.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.12.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.12.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.12.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.12.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.12.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.13.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.13.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.13.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.13.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.13.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.13.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.13.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.13.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.13.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.14.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.14.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.14.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.14.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.14.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.14.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.14.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.14.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.14.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.15.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.15.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.15.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.15.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.15.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.15.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.15.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.15.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.15.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.16.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.16.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.16.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.16.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.16.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.16.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.16.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.16.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.16.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.17.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.17.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.17.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.17.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.17.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.17.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.17.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.17.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.17.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.2.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.2.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.2.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.2.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.2.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.2.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.2.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.2.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.2.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.3.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.3.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.3.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.3.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.3.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.3.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.3.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.3.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.3.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.4.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.4.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.4.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.4.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.4.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.4.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.4.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.4.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.4.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.5.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.5.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.5.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.5.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.5.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.5.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.5.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.5.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.5.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.6.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.6.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.6.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.6.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.6.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.6.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.6.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.6.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.6.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.7.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.7.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.7.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.7.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.7.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.7.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.7.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.7.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.7.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.8.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.8.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.8.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.8.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.8.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.8.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.8.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.8.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.8.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.9.attn.c_attn.bias": "model-00001-of-00001.safetensors", "transformer.h.9.attn.c_attn.weight": "model-00001-of-00001.safetensors", "transformer.h.9.attn.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.9.attn.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.h.9.ln_1.bias": "model-00001-of-00001.safetensors", "transformer.h.9.ln_1.weight": "model-00001-of-00001.safetensors", "transformer.h.9.ln_2.bias": "model-00001-of-00001.safetensors", "transformer.h.9.ln_2.weight": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_fc.bias": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_fc.weight": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_fc2.bias": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_fc2.weight": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_proj.bias": "model-00001-of-00001.safetensors", "transformer.h.9.mlp.c_proj.weight": "model-00001-of-00001.safetensors", "transformer.ln_f.bias": "model-00001-of-00001.safetensors", "transformer.ln_f.weight": "model-00001-of-00001.safetensors", "transformer.relative_pe.slopes": "model-00001-of-00001.safetensors", "transformer.wte.weight": "model-00001-of-00001.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<|endoftext|>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ }
11
+ },
12
+ "bos_token": "<|endoftext|>",
13
+ "clean_up_tokenization_spaces": true,
14
+ "eos_token": "<|endoftext|>",
15
+ "model_max_length": 2048,
16
+ "pad_token": "<|endoftext|>",
17
+ "tokenizer_class": "PreTrainedTokenizerFast",
18
+ "unk_token": "<|endoftext|>"
19
+ }