We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 6a8172c commit 106a9d1Copy full SHA for 106a9d1
exllamav2/config.py
@@ -260,7 +260,7 @@ def prepare(self, no_tensors: bool = False):
260
if scaling_type == "linear":
261
assert "factor" in rs, "'factor' missing from 'rope_scaling' config"
262
self.scale_pos_emb = rs.get("factor", 1.0)
263
- if scaling_type == "su":
+ if scaling_type == "su" or scaling_type == "longrope":
264
assert "long_factor" in rs, "'long_factor' missing from 'rope_scaling' config"
265
assert "short_factor" in rs, "'short_factor' missing from 'rope_scaling' config"
266
assert "original_max_position_embeddings" in read_config, \
0 commit comments