From bb674f2a36a9ff7ee01927f4abdc3a3c3ed1ced2 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Wed, 2 Dec 2020 09:40:55 +0100 Subject: [PATCH] aval --- pytorch_lightning/core/lightning.py | 2 +- pytorch_lightning/core/optimizer.py | 29 +++-------------------------- 2 files changed, 4 insertions(+), 27 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index a4a22bc43c59a0..79566254e3eb4b 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -36,7 +36,7 @@ from pytorch_lightning.core.memory import ModelSummary from pytorch_lightning.core.saving import ALLOWED_CONFIG_TYPES, PRIMITIVE_TYPES, ModelIO from pytorch_lightning.core.step_result import Result -from pytorch_lightning.utilities import TPU_AVAILABLE, AMPType, rank_zero_warn, XLADeviceUtils +from pytorch_lightning.utilities import TPU_AVAILABLE, rank_zero_warn from pytorch_lightning.utilities.device_dtype_mixin import DeviceDtypeModuleMixin from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args diff --git a/pytorch_lightning/core/optimizer.py b/pytorch_lightning/core/optimizer.py index 66ce64b0c6887f..e80cb203eada43 100644 --- a/pytorch_lightning/core/optimizer.py +++ b/pytorch_lightning/core/optimizer.py @@ -11,38 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import collections -import copy -import inspect -import os -import re -import tempfile + import types -from abc import ABC -from argparse import Namespace -from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Callable, Optional from weakref import proxy -import torch -from torch import ScriptModule, Tensor -from torch.nn import Module -from torch.optim import SGD from torch.optim.optimizer import Optimizer -from pytorch_lightning import _logger as log -from pytorch_lightning.callbacks import Callback -from pytorch_lightning.core.grads import GradInformation -from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks -from pytorch_lightning.core.memory import ModelSummary -from pytorch_lightning.core.saving import ALLOWED_CONFIG_TYPES, PRIMITIVE_TYPES, ModelIO -from pytorch_lightning.core.step_result import Result -from pytorch_lightning.utilities import AMPType, rank_zero_warn -from pytorch_lightning.utilities.device_dtype_mixin import DeviceDtypeModuleMixin +from pytorch_lightning.utilities import TPU_AVAILABLE from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args -from pytorch_lightning.utilities.xla_device_utils import XLADeviceUtils - -TPU_AVAILABLE = XLADeviceUtils.tpu_device_exists() if TPU_AVAILABLE: import torch_xla.core.xla_model as xm