Update config.json
Browse files- config.json +4 -4
config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
"model_name": "or4cl3ai/Aiden_t5",
|
5 |
"text": "Hey my name is Mariama! How are you?",
|
6 |
"output_dir": "./output",
|
7 |
-
"max_length":
|
8 |
"temperature": 0.8,
|
9 |
"top_p": 0.9,
|
10 |
"num_return_sequences": 1,
|
@@ -19,9 +19,9 @@
|
|
19 |
"name": "self_reflection",
|
20 |
"type": "transformer",
|
21 |
"parameters": {
|
22 |
-
"num_layers":
|
23 |
-
"hidden_size":
|
24 |
-
"num_heads":
|
25 |
"attention_dropout": 0.1,
|
26 |
"relu_dropout": 0.1,
|
27 |
"layer_norm_epsilon": 1e-12,
|
|
|
4 |
"model_name": "or4cl3ai/Aiden_t5",
|
5 |
"text": "Hey my name is Mariama! How are you?",
|
6 |
"output_dir": "./output",
|
7 |
+
"max_length": 500,
|
8 |
"temperature": 0.8,
|
9 |
"top_p": 0.9,
|
10 |
"num_return_sequences": 1,
|
|
|
19 |
"name": "self_reflection",
|
20 |
"type": "transformer",
|
21 |
"parameters": {
|
22 |
+
"num_layers": 360,
|
23 |
+
"hidden_size": 7250,
|
24 |
+
"num_heads": 120,
|
25 |
"attention_dropout": 0.1,
|
26 |
"relu_dropout": 0.1,
|
27 |
"layer_norm_epsilon": 1e-12,
|