Skip to content

Commit 1f64398

Browse files
Merge branch 'Lightning-AI:master' into local-downloader
2 parents a73b804 + 2e512d4 commit 1f64398

File tree

7 files changed

+4
-73
lines changed

7 files changed

+4
-73
lines changed

.azure/gpu-tests-pytorch.yml

+1-8
Original file line numberDiff line numberDiff line change
@@ -105,16 +105,9 @@ jobs:
105105
done
106106
displayName: "Adjust dependencies"
107107
108-
- bash: |
109-
pip install -q -r .actions/requirements.txt
110-
python .actions/assistant.py requirements_prune_pkgs \
111-
--packages="[lightning-colossalai]" \
112-
--req_files="[requirements/_integrations/strategies.txt]"
113-
displayName: "Prune packages" # these have installation issues
114-
115108
- bash: |
116109
extra=$(python -c "print({'lightning': 'pytorch-'}.get('$(PACKAGE_NAME)', ''))")
117-
pip install -e ".[${extra}dev]" -r requirements/_integrations/strategies.txt pytest-timeout -U --find-links="${TORCH_URL}"
110+
pip install -e ".[${extra}dev]" pytest-timeout -U --find-links="${TORCH_URL}"
118111
displayName: "Install package & dependencies"
119112
120113
- bash: pip uninstall -y lightning

requirements/_integrations/strategies.txt

-4
This file was deleted.

src/lightning/pytorch/callbacks/gradient_accumulation_scheduler.py

+1-8
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
import lightning.pytorch as pl
2828
from lightning.pytorch.callbacks.callback import Callback
2929
from lightning.pytorch.utilities.exceptions import MisconfigurationException
30-
from lightning.pytorch.utilities.imports import _LIGHTNING_COLOSSALAI_AVAILABLE
3130
from lightning.pytorch.utilities.model_helpers import is_overridden
3231
from lightning.pytorch.utilities.rank_zero import rank_zero_warn
3332

@@ -125,13 +124,7 @@ def on_train_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule")
125124
# local import to avoid circular import
126125
from lightning.pytorch.strategies import DeepSpeedStrategy
127126

128-
unsupported_strategies = [DeepSpeedStrategy]
129-
if _LIGHTNING_COLOSSALAI_AVAILABLE:
130-
from lightning_colossalai import ColossalAIStrategy
131-
132-
unsupported_strategies.append(ColossalAIStrategy)
133-
134-
if isinstance(trainer.strategy, tuple(unsupported_strategies)):
127+
if isinstance(trainer.strategy, DeepSpeedStrategy):
135128
raise RuntimeError(
136129
f"The `{type(trainer.strategy).__name__}` does not support `accumulate_grad_batches` changing"
137130
" between epochs."

src/lightning/pytorch/trainer/connectors/accelerator_connector.py

+1-20
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,7 @@
6262
)
6363
from lightning.pytorch.strategies.ddp import _DDP_FORK_ALIASES
6464
from lightning.pytorch.utilities.exceptions import MisconfigurationException
65-
from lightning.pytorch.utilities.imports import (
66-
_LIGHTNING_COLOSSALAI_AVAILABLE,
67-
_habana_available_and_importable,
68-
)
65+
from lightning.pytorch.utilities.imports import _habana_available_and_importable
6966
from lightning.pytorch.utilities.rank_zero import rank_zero_info, rank_zero_warn
7067

7168
log = logging.getLogger(__name__)
@@ -191,9 +188,6 @@ def _check_config_and_set_final_flags(
191188

192189
self._strategy_flag = strategy
193190

194-
if strategy == "colossalai" and not _LIGHTNING_COLOSSALAI_AVAILABLE:
195-
raise ModuleNotFoundError(str(_LIGHTNING_COLOSSALAI_AVAILABLE))
196-
197191
if strategy != "auto" and strategy not in self._registered_strategies and not isinstance(strategy, Strategy):
198192
raise ValueError(
199193
f"You selected an invalid strategy name: `strategy={strategy!r}`."
@@ -490,12 +484,6 @@ def _check_and_init_precision(self) -> Precision:
490484
if isinstance(self.accelerator, HPUAccelerator):
491485
return HPUPrecisionPlugin(self._precision_flag)
492486

493-
if _LIGHTNING_COLOSSALAI_AVAILABLE:
494-
from lightning_colossalai import ColossalAIPrecisionPlugin, ColossalAIStrategy
495-
496-
if isinstance(self.strategy, ColossalAIStrategy):
497-
return ColossalAIPrecisionPlugin(self._precision_flag)
498-
499487
if isinstance(self.strategy, (SingleDeviceXLAStrategy, XLAStrategy)):
500488
return XLAPrecision(self._precision_flag) # type: ignore
501489
if isinstance(self.strategy, DeepSpeedStrategy):
@@ -648,13 +636,6 @@ def _set_torch_flags(
648636

649637
def _register_external_accelerators_and_strategies() -> None:
650638
"""Registers all known strategies in other packages."""
651-
if _LIGHTNING_COLOSSALAI_AVAILABLE:
652-
from lightning_colossalai import ColossalAIStrategy
653-
654-
# TODO: Prevent registering multiple times
655-
if "colossalai" not in StrategyRegistry:
656-
ColossalAIStrategy.register_strategies(StrategyRegistry)
657-
658639
if _habana_available_and_importable():
659640
from lightning_habana import HPUAccelerator, HPUParallelStrategy, SingleHPUStrategy
660641

src/lightning/pytorch/utilities/imports.py

-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828

2929
_OMEGACONF_AVAILABLE = package_available("omegaconf")
3030
_TORCHVISION_AVAILABLE = RequirementCache("torchvision")
31-
_LIGHTNING_COLOSSALAI_AVAILABLE = RequirementCache("lightning-colossalai")
3231

3332

3433
@functools.lru_cache(maxsize=128)

tests/tests_pytorch/callbacks/test_gradient_accumulation_scheduler.py

+1-16
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,6 @@
2020
from lightning.pytorch.demos.boring_classes import BoringModel
2121
from lightning.pytorch.strategies import DeepSpeedStrategy
2222
from lightning.pytorch.utilities.exceptions import MisconfigurationException
23-
from lightning.pytorch.utilities.imports import _LIGHTNING_COLOSSALAI_AVAILABLE
24-
25-
if _LIGHTNING_COLOSSALAI_AVAILABLE:
26-
from lightning_colossalai import ColossalAIStrategy
27-
else:
28-
ColossalAIStrategy = None
2923

3024

3125
@pytest.mark.parametrize("accumulate_grad_batches", [1, 2, 3])
@@ -94,16 +88,7 @@ def test_invalid_values_for_grad_accum_scheduler(scheduling):
9488
_ = GradientAccumulationScheduler(scheduling=scheduling)
9589

9690

97-
@pytest.mark.parametrize(
98-
"strategy_class",
99-
[
100-
pytest.param(
101-
ColossalAIStrategy,
102-
marks=pytest.mark.skipif(not _LIGHTNING_COLOSSALAI_AVAILABLE, reason="Requires ColossalAI strategy"),
103-
),
104-
DeepSpeedStrategy,
105-
],
106-
)
91+
@pytest.mark.parametrize("strategy_class", [DeepSpeedStrategy])
10792
def test_unsupported_strategies(strategy_class):
10893
"""Test that an error is raised for strategies that require the gradient accumulation factor to be fixed."""
10994
scheduler = GradientAccumulationScheduler({1: 2})

tests/tests_pytorch/trainer/connectors/test_accelerator_connector.py

-16
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@
5959
from lightning.pytorch.utilities.imports import (
6060
_LIGHTNING_HABANA_AVAILABLE,
6161
)
62-
from lightning_utilities.core.imports import package_available
6362

6463
from tests_pytorch.conftest import mock_cuda_count, mock_mps_count, mock_tpu_available, mock_xla_available
6564
from tests_pytorch.helpers.runif import RunIf
@@ -845,21 +844,6 @@ def get_defaults(cls):
845844
assert connector_default == trainer_defaults[name]
846845

847846

848-
@RunIf(min_cuda_gpus=1) # trigger this test on our GPU pipeline, because we don't install the package on the CPU suite
849-
@pytest.mark.xfail(raises=ImportError, reason="Not updated to latest API")
850-
@pytest.mark.skipif(not package_available("lightning_colossalai"), reason="Requires Colossal AI Strategy")
851-
def test_colossalai_external_strategy(monkeypatch):
852-
with mock.patch(
853-
"lightning.pytorch.trainer.connectors.accelerator_connector._LIGHTNING_COLOSSALAI_AVAILABLE", False
854-
), pytest.raises(ModuleNotFoundError):
855-
Trainer(strategy="colossalai")
856-
857-
from lightning_colossalai import ColossalAIStrategy
858-
859-
trainer = Trainer(strategy="colossalai", precision="16-mixed")
860-
assert isinstance(trainer.strategy, ColossalAIStrategy)
861-
862-
863847
class DeviceMock(Mock):
864848
def __instancecheck__(self, instance):
865849
return True

0 commit comments

Comments
 (0)