Skip to content

Commit 4cf256a

Browse files
authored
[misc][distributed] fix pp missing layer condition (#6446)
1 parent 64fdc08 commit 4cf256a

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

vllm/model_executor/models/utils.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,10 @@ def get_pp_missing_layer_names(model: torch.nn.Module) -> List[str]:
8383
missing_layer_names = []
8484
for name, module in model.named_modules():
8585
if isinstance(module, PPMissingLayer):
86-
missing_layer_names.append(name)
86+
# NOTE: the trailing dot is used to match the prefix of the layer.
87+
# without the dot, we could match a layer that is not missing,
88+
# e.g., 'encoder.layer.1' would match 'encoder.layer.11'
89+
missing_layer_names.append(name + '.')
8790
_model_to_pp_missing_layer_names[model_id] = missing_layer_names
8891

8992
return missing_layer_names

0 commit comments

Comments
 (0)