fix: remap default rope_type to linear for newer transformers compat
Browse files- modeling_llada2_moe.py +3 -0
modeling_llada2_moe.py
CHANGED
|
@@ -101,6 +101,9 @@ class LLaDA2MoeRotaryEmbedding(nn.Module):
|
|
| 101 |
)
|
| 102 |
else:
|
| 103 |
self.rope_type = "linear"
|
|
|
|
|
|
|
|
|
|
| 104 |
self.max_seq_len_cached = config.max_position_embeddings
|
| 105 |
self.original_max_seq_len = config.max_position_embeddings
|
| 106 |
|
|
|
|
| 101 |
)
|
| 102 |
else:
|
| 103 |
self.rope_type = "linear"
|
| 104 |
+
# BC: "default" was removed from ROPE_INIT_FUNCTIONS in newer transformers
|
| 105 |
+
if self.rope_type == "default":
|
| 106 |
+
self.rope_type = "linear"
|
| 107 |
self.max_seq_len_cached = config.max_position_embeddings
|
| 108 |
self.original_max_seq_len = config.max_position_embeddings
|
| 109 |
|