Update max_position_embeddings to 16384

#8
by cjdonahoe-ms - opened
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -9,7 +9,7 @@
9
  "hidden_size": 5120,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 13824,
12
- "max_position_embeddings": 4096,
13
  "max_sequence_length": 16384,
14
  "model_type": "llama",
15
  "num_attention_heads": 40,
 
9
  "hidden_size": 5120,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 13824,
12
+ "max_position_embeddings": 16384,
13
  "max_sequence_length": 16384,
14
  "model_type": "llama",
15
  "num_attention_heads": 40,