We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 77908bb commit 89e9a6eCopy full SHA for 89e9a6e
vllm/transformers_utils/config.py
@@ -206,6 +206,8 @@ def recurse_elems(elem: Any):
206
config_dict["tie_word_embeddings"] = config_dict.get(
207
"tie_embeddings", False)
208
config_dict["max_seq_len"] = config_dict.get("max_seq_len", 128_000)
209
+ config_dict["max_position_embeddings"] = config_dict.get(
210
+ "max_position_embeddings", 128_000)
211
212
if config_dict.get("moe") is not None:
213
config_dict["architectures"] = ["MixtralForCausalLM"]
0 commit comments