myownskyW7
commited on
Commit
•
4aa81f2
1
Parent(s):
5dcef1d
Update configuration_internlm_xcomposer2.py
Browse files
configuration_internlm_xcomposer2.py
CHANGED
@@ -94,7 +94,7 @@ class InternLMXcomposer2Config(PretrainedConfig):
|
|
94 |
bias=True,
|
95 |
rope_theta=10000,
|
96 |
rope_scaling=None,
|
97 |
-
attn_implementation="
|
98 |
**kwargs,
|
99 |
):
|
100 |
self.vocab_size = vocab_size
|
@@ -119,7 +119,7 @@ class InternLMXcomposer2Config(PretrainedConfig):
|
|
119 |
|
120 |
self.attn_implementation = attn_implementation
|
121 |
if self.attn_implementation is None:
|
122 |
-
self.attn_implementation = "
|
123 |
super().__init__(
|
124 |
pad_token_id=pad_token_id,
|
125 |
bos_token_id=bos_token_id,
|
|
|
94 |
bias=True,
|
95 |
rope_theta=10000,
|
96 |
rope_scaling=None,
|
97 |
+
attn_implementation="flash_attention_2",
|
98 |
**kwargs,
|
99 |
):
|
100 |
self.vocab_size = vocab_size
|
|
|
119 |
|
120 |
self.attn_implementation = attn_implementation
|
121 |
if self.attn_implementation is None:
|
122 |
+
self.attn_implementation = "flash_attention_2"
|
123 |
super().__init__(
|
124 |
pad_token_id=pad_token_id,
|
125 |
bos_token_id=bos_token_id,
|