Skip to content

Commit

Permalink
added xla_device_utils to test
Browse files Browse the repository at this point in the history
  • Loading branch information
lezwon committed Nov 11, 2020
1 parent 3b569ce commit 0c9316b
Showing 1 changed file with 10 additions and 9 deletions.
19 changes: 10 additions & 9 deletions pytorch_lightning/utilities/xla_device_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,14 @@
TORCHXLA_AVAILABLE = importlib.util.find_spec("torch_xla") is not None
if TORCHXLA_AVAILABLE:
import torch_xla.core.xla_model as xm
import torch_xla.distributed.xla_multiprocessing as xmp
else:
xm = None


def inner_f(queue, func, *args, **kwargs): # pragma: no cover
def inner_f(_, queue, func, *args): # pragma: no cover
try:
queue.put(func(*args, **kwargs))
queue.put(func(*args))
except Exception:
import traceback

Expand All @@ -39,13 +40,13 @@ def pl_multi_process(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
queue = Queue()
proc = Process(target=inner_f, args=(queue, func, *args), kwargs=kwargs)
proc.start()
proc.join(10)
try:
return queue.get_nowait()
except q.Empty:
return False
xmp.spawn(inner_f,
args=(queue, func, *args),
nprocs=1,
join=True,
daemon=False,
start_method='fork')
return queue.get()

return wrapper

Expand Down

0 comments on commit 0c9316b

Please sign in to comment.