From 843e02588fa44ca273cff6555457ed32446e39b8 Mon Sep 17 00:00:00 2001 From: statelesshz Date: Fri, 24 May 2024 10:55:35 +0800 Subject: [PATCH 1/3] Improve `transformers-cli env` reporting --- src/transformers/commands/env.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/transformers/commands/env.py b/src/transformers/commands/env.py index 8567bbcf5b61..46baba36af78 100644 --- a/src/transformers/commands/env.py +++ b/src/transformers/commands/env.py @@ -26,6 +26,7 @@ is_safetensors_available, is_tf_available, is_torch_available, + is_torch_npu_available, ) from . import BaseTransformersCLICommand @@ -88,6 +89,7 @@ def run(self): pt_version = torch.__version__ pt_cuda_available = torch.cuda.is_available() + pt_npu_available = is_torch_npu_available() tf_version = "not installed" tf_cuda_available = "NA" @@ -132,6 +134,11 @@ def run(self): "Using GPU in script?": "", "Using distributed or parallel set-up in script?": "", } + if pt_cuda_available: + info["GPU type"] = torch.cuda.get_device_name() + elif pt_npu_available: + info["NPU type"] = torch.npu.get_device_name() + info["CANN version"] = torch.version.cann print("\nCopy-and-paste the text below in your GitHub issue and FILL OUT the two last points.\n") print(self.format_dict(info)) From 9d90a76e5319254cc8ebf9aaf76a836770ccb93f Mon Sep 17 00:00:00 2001 From: statelesshz Date: Tue, 28 May 2024 12:15:57 +0800 Subject: [PATCH 2/3] move the line `"Using GPU in script?": ""` to in if conditional statement --- src/transformers/commands/env.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/commands/env.py b/src/transformers/commands/env.py index 46baba36af78..f51e81c11fc3 100644 --- a/src/transformers/commands/env.py +++ b/src/transformers/commands/env.py @@ -131,10 +131,10 @@ def run(self): "Flax version (CPU?/GPU?/TPU?)": f"{flax_version} ({jax_backend})", "Jax version": f"{jax_version}", "JaxLib version": f"{jaxlib_version}", - "Using GPU in script?": "", "Using distributed or parallel set-up in script?": "", } if pt_cuda_available: + info["Using GPU in script?"] = "" info["GPU type"] = torch.cuda.get_device_name() elif pt_npu_available: info["NPU type"] = torch.npu.get_device_name() From 162568112736bedd2a9071fb2fdd98214a7d0e03 Mon Sep 17 00:00:00 2001 From: statelesshz Date: Tue, 28 May 2024 12:30:02 +0800 Subject: [PATCH 3/3] same option for npu --- src/transformers/commands/env.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/transformers/commands/env.py b/src/transformers/commands/env.py index f51e81c11fc3..da9ca6660be1 100644 --- a/src/transformers/commands/env.py +++ b/src/transformers/commands/env.py @@ -137,6 +137,7 @@ def run(self): info["Using GPU in script?"] = "" info["GPU type"] = torch.cuda.get_device_name() elif pt_npu_available: + info["Using NPU in script?"] = "" info["NPU type"] = torch.npu.get_device_name() info["CANN version"] = torch.version.cann