diff --git a/requirements.txt b/requirements.txt
index 013896fafca..6c2278db4ab 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,9 +4,9 @@ psutil
 ray >= 2.9
 sentencepiece  # Required for LLaMA tokenizer.
 numpy
-torch == 2.1.2
+torch==2.2.1 torchvision==0.17.1 torchaudio==2.2.1 --index-url https://download.pytorch.org/whl/cu118
 transformers >= 4.39.1  # Required for StarCoder2 & Llava.
-xformers == 0.0.23.post1  # Required for CUDA 12.1.
+xformers == 0.0.25  # Required for CUDA 12.1.
 fastapi
 uvicorn[standard]
 pydantic >= 2.0  # Required for OpenAI server.
diff --git a/vllm/model_executor/models/qwen2.py b/vllm/model_executor/models/qwen2.py
index 9d1a338cf39..4fb97f876f3 100644
--- a/vllm/model_executor/models/qwen2.py
+++ b/vllm/model_executor/models/qwen2.py
@@ -413,7 +413,13 @@ def load_weights(self,
                 # Skip loading extra bias for GPTQ models.
                 if name.endswith(".bias") and name not in params_dict:
                     continue
-                param = params_dict[name]
+                if name in params_dict:
+                    param = params_dict[name]
+                else:
+                    # Initialize the missing parameter
+                    param = torch.nn.Parameter(torch.empty_like(loaded_weight))
+                    params_dict[name] = param
+                    param = params_dict[name]
                 weight_loader = getattr(param, "weight_loader",
                                         default_weight_loader)
                 weight_loader(param, loaded_weight)