Saideepthi55 commited on
Commit
9d19b63
1 Parent(s): be0d478

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +2 -2
  2. tokenizer_config.json +7 -0
special_tokens_map.json CHANGED
@@ -9,7 +9,7 @@
9
  "cls_token": {
10
  "content": "<s>",
11
  "lstrip": false,
12
- "normalized": true,
13
  "rstrip": false,
14
  "single_word": false
15
  },
@@ -37,7 +37,7 @@
37
  "sep_token": {
38
  "content": "</s>",
39
  "lstrip": false,
40
- "normalized": true,
41
  "rstrip": false,
42
  "single_word": false
43
  },
 
9
  "cls_token": {
10
  "content": "<s>",
11
  "lstrip": false,
12
+ "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
37
  "sep_token": {
38
  "content": "</s>",
39
  "lstrip": false,
40
+ "normalized": false,
41
  "rstrip": false,
42
  "single_word": false
43
  },
tokenizer_config.json CHANGED
@@ -55,11 +55,18 @@
55
  "do_lower_case": true,
56
  "eos_token": "</s>",
57
  "mask_token": "<mask>",
 
58
  "model_max_length": 512,
 
59
  "pad_token": "<pad>",
 
 
60
  "sep_token": "</s>",
 
61
  "strip_accents": null,
62
  "tokenize_chinese_chars": true,
63
  "tokenizer_class": "MPNetTokenizer",
 
 
64
  "unk_token": "[UNK]"
65
  }
 
55
  "do_lower_case": true,
56
  "eos_token": "</s>",
57
  "mask_token": "<mask>",
58
+ "max_length": 512,
59
  "model_max_length": 512,
60
+ "pad_to_multiple_of": null,
61
  "pad_token": "<pad>",
62
+ "pad_token_type_id": 0,
63
+ "padding_side": "right",
64
  "sep_token": "</s>",
65
+ "stride": 0,
66
  "strip_accents": null,
67
  "tokenize_chinese_chars": true,
68
  "tokenizer_class": "MPNetTokenizer",
69
+ "truncation_side": "right",
70
+ "truncation_strategy": "longest_first",
71
  "unk_token": "[UNK]"
72
  }