Skip to content

Commit

Permalink
Move _POPTORCH_AVAILABLE and _IPU_AVAILABLE (#16509)
Browse files Browse the repository at this point in the history
Co-authored-by: Nikhil Shenoy <nikhilshenoy@dhcp-128-189-225-225.ubcsecure.wireless.ubc.ca>
Co-authored-by: Carlos Mocholí <carlossmocholi@gmail.com>
Co-authored-by: Jirka Borovec <6035284+Borda@users.noreply.github.com>
  • Loading branch information
4 people authored Feb 6, 2023
1 parent 770b792 commit 7bbbe22
Show file tree
Hide file tree
Showing 9 changed files with 23 additions and 13 deletions.
12 changes: 11 additions & 1 deletion src/lightning/pytorch/accelerators/ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,20 @@
from typing import Any, Dict, List

import torch
from lightning_utilities.core.imports import package_available

from lightning.fabric.utilities.types import _DEVICE
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.utilities.imports import _IPU_AVAILABLE

_POPTORCH_AVAILABLE = package_available("poptorch")

if _POPTORCH_AVAILABLE:
import poptorch

_IPU_AVAILABLE = poptorch.ipuHardwareIsAvailable()
else:
poptorch = None
_IPU_AVAILABLE = False


class IPUAccelerator(Accelerator):
Expand Down
3 changes: 2 additions & 1 deletion src/lightning/pytorch/strategies/ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,14 @@
import lightning.pytorch as pl
from lightning.fabric.plugins import CheckpointIO, ClusterEnvironment
from lightning.fabric.utilities.cloud_io import get_filesystem
from lightning.pytorch.accelerators.ipu import _IPU_AVAILABLE, _POPTORCH_AVAILABLE
from lightning.pytorch.overrides.base import _LightningModuleWrapperBase
from lightning.pytorch.plugins.precision import PrecisionPlugin
from lightning.pytorch.strategies.parallel import ParallelStrategy
from lightning.pytorch.strategies.strategy import TBroadcast
from lightning.pytorch.strategies.utils import _fp_to_half
from lightning.pytorch.trainer.states import RunningStage, TrainerFn
from lightning.pytorch.utilities import _IPU_AVAILABLE, _POPTORCH_AVAILABLE, rank_zero_warn
from lightning.pytorch.utilities import rank_zero_warn
from lightning.pytorch.utilities.data import _get_dataloader_init_args_and_kwargs, _reinstantiate_wrapped_cls
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.model_helpers import is_overridden
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from lightning.pytorch.accelerators.accelerator import Accelerator
from lightning.pytorch.accelerators.cuda import CUDAAccelerator
from lightning.pytorch.accelerators.hpu import HPUAccelerator
from lightning.pytorch.accelerators.ipu import IPUAccelerator
from lightning.pytorch.accelerators.ipu import _IPU_AVAILABLE, IPUAccelerator
from lightning.pytorch.accelerators.mps import MPSAccelerator
from lightning.pytorch.accelerators.tpu import TPUAccelerator
from lightning.pytorch.plugins import (
Expand Down Expand Up @@ -72,7 +72,6 @@
)
from lightning.pytorch.strategies.ddp_spawn import _DDP_FORK_ALIASES
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.imports import _IPU_AVAILABLE
from lightning.pytorch.utilities.rank_zero import rank_zero_info, rank_zero_warn

log = logging.getLogger(__name__)
Expand Down
3 changes: 2 additions & 1 deletion src/lightning/pytorch/trainer/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
MPSAccelerator,
TPUAccelerator,
)
from lightning.pytorch.accelerators.ipu import _IPU_AVAILABLE
from lightning.pytorch.loggers.logger import DummyLogger
from lightning.pytorch.profilers import (
AdvancedProfiler,
Expand All @@ -33,7 +34,7 @@
SimpleProfiler,
XLAProfiler,
)
from lightning.pytorch.utilities import _HPU_AVAILABLE, _IPU_AVAILABLE
from lightning.pytorch.utilities import _HPU_AVAILABLE
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from lightning.pytorch.utilities.rank_zero import rank_zero_info, rank_zero_warn

Expand Down
2 changes: 0 additions & 2 deletions src/lightning/pytorch/utilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@
from lightning.pytorch.utilities.grads import grad_norm # noqa: F401
from lightning.pytorch.utilities.imports import ( # noqa: F401
_HPU_AVAILABLE,
_IPU_AVAILABLE,
_OMEGACONF_AVAILABLE,
_POPTORCH_AVAILABLE,
_TORCH_QUANTIZE_AVAILABLE,
_TORCHVISION_AVAILABLE,
)
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/accelerators/test_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@

from lightning.pytorch import Callback, seed_everything, Trainer
from lightning.pytorch.accelerators import IPUAccelerator
from lightning.pytorch.accelerators.ipu import _IPU_AVAILABLE
from lightning.pytorch.core.module import LightningModule
from lightning.pytorch.demos.boring_classes import BoringModel
from lightning.pytorch.plugins import IPUPrecisionPlugin
from lightning.pytorch.strategies.ipu import IPUStrategy
from lightning.pytorch.trainer.states import RunningStage, TrainerFn
from lightning.pytorch.trainer.supporters import CombinedLoader
from lightning.pytorch.utilities import _IPU_AVAILABLE
from lightning.pytorch.utilities.exceptions import MisconfigurationException
from tests_pytorch.helpers.datamodules import ClassifDataModule
from tests_pytorch.helpers.runif import RunIf
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/helpers/runif.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from packaging.version import Version

from lightning.fabric.accelerators.cuda import num_cuda_devices
from lightning.pytorch.accelerators.ipu import _IPU_AVAILABLE
from lightning.pytorch.accelerators.mps import MPSAccelerator
from lightning.pytorch.accelerators.tpu import TPUAccelerator
from lightning.pytorch.callbacks.progress.rich_progress import _RICH_AVAILABLE
Expand All @@ -30,7 +31,6 @@
from lightning.pytorch.strategies.deepspeed import _DEEPSPEED_AVAILABLE
from lightning.pytorch.utilities.imports import (
_HPU_AVAILABLE,
_IPU_AVAILABLE,
_OMEGACONF_AVAILABLE,
_PSUTIL_AVAILABLE,
_TORCH_QUANTIZE_AVAILABLE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -625,10 +625,10 @@ def test_unsupported_tpu_choice(tpu_available):

@mock.patch("lightning.pytorch.accelerators.ipu.IPUAccelerator.is_available", return_value=True)
def test_unsupported_ipu_choice(mock_ipu_acc_avail, monkeypatch):
import lightning.pytorch.accelerators.ipu as ipu_
import lightning.pytorch.strategies.ipu as ipu
import lightning.pytorch.utilities.imports as imports

monkeypatch.setattr(imports, "_IPU_AVAILABLE", True)
monkeypatch.setattr(ipu_, "_IPU_AVAILABLE", True)
monkeypatch.setattr(ipu, "_IPU_AVAILABLE", True)
with pytest.raises(ValueError, match=r"accelerator='ipu', precision='bf16'\)` is not supported"):
Trainer(accelerator="ipu", precision="bf16")
Expand All @@ -637,7 +637,7 @@ def test_unsupported_ipu_choice(mock_ipu_acc_avail, monkeypatch):


@mock.patch("lightning.pytorch.accelerators.tpu._XLA_AVAILABLE", return_value=False)
@mock.patch("lightning.pytorch.utilities.imports._IPU_AVAILABLE", return_value=False)
@mock.patch("lightning.pytorch.accelerators.ipu._IPU_AVAILABLE", return_value=False)
@mock.patch("lightning.pytorch.utilities.imports._HPU_AVAILABLE", return_value=False)
def test_devices_auto_choice_cpu(cuda_count_0, *_):
trainer = Trainer(accelerator="auto", devices="auto")
Expand Down
3 changes: 2 additions & 1 deletion tests/tests_pytorch/utilities/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@
from lightning_utilities.core.imports import compare_version, RequirementCache
from torch.distributed import is_available

from lightning.pytorch.accelerators.ipu import _POPTORCH_AVAILABLE
from lightning.pytorch.strategies.bagua import _BAGUA_AVAILABLE
from lightning.pytorch.utilities import _OMEGACONF_AVAILABLE, _POPTORCH_AVAILABLE
from lightning.pytorch.utilities import _OMEGACONF_AVAILABLE
from tests_pytorch.helpers.runif import RunIf


Expand Down

0 comments on commit 7bbbe22

Please sign in to comment.