Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

📦 Update torch to 2.3.1 #368

Merged
merged 12 commits into from
Jul 17, 2024
2 changes: 0 additions & 2 deletions caikit_nlp/toolkit/torch_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

# Third Party
from torch import cuda
from torch.distributed.elastic.multiprocessing.api import Std
from torch.distributed.launcher.api import LaunchConfig
import torch.distributed as dist

Expand Down Expand Up @@ -100,6 +99,5 @@ def get_torch_elastic_launch_config(
rdzv_backend="static",
rdzv_endpoint=f"{master_addr}:{master_port}",
rdzv_configs=rdzv_configs,
tee=Std.ALL,
max_restarts=max_restarts,
)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ dependencies = [
"scipy>=1.8.1",
"sentence-transformers>=2.3.1,<2.4.0",
"tokenizers>=0.13.3",
"torch>=2.2.2,<2.3.0",
"torch>=2.3.1,<2.4.0",
"tqdm>=4.65.0",
"transformers>=4.32.0",
"peft==0.6.0",
Expand Down
16 changes: 11 additions & 5 deletions tests/modules/text_embedding/test_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -1018,7 +1018,7 @@ def test_encoding_order(loaded_model: EmbeddingModule, truncate_input_tokens):

# test order by comparing value of individual embeddings in sequence
for i, e in enumerate(separate_vectors):
assert np.allclose(e, combined_vectors[i])
assert np.isclose(e, combined_vectors[i], rtol=1e-03, atol=1e-05).all()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

q - what's the difference between isclose.all() here vs. allclose? Can't allclose also take the rtol etc. https://numpy.org/doc/stable/reference/generated/numpy.allclose.html


# test expected failure case by reordering
shifted_separate_vectors = separate_vectors[1:] + [separate_vectors[0]]
Expand All @@ -1029,7 +1029,7 @@ def test_encoding_order(loaded_model: EmbeddingModule, truncate_input_tokens):
not approx(e) == combined_vectors[i]
), "expected altered order to not match combined vectors"
assert not np.allclose(
e, combined_vectors[i]
e, combined_vectors[i], rtol=1e-05, atol=1e-08
), "expected altered order to not match combined"


Expand Down Expand Up @@ -1105,7 +1105,13 @@ def test_same_same(loaded_model: EmbeddingModule, truncate_input_tokens):
assert np.allclose(e, combined_vectors[i])

# Next ensuring that the two identical sentences yield identical results (and 3rd does not)
assert np.array_equal(combined_vectors[0], combined_vectors[1])
assert np.isclose(
combined_vectors[0], combined_vectors[1], rtol=1e-05, atol=1e-08
).all()
assert not np.array_equal(combined_vectors[1], combined_vectors[2])
assert np.array_equal(separate_vectors[0], separate_vectors[1])
assert not np.array_equal(separate_vectors[1], separate_vectors[2])
assert np.isclose(
separate_vectors[0], separate_vectors[1], rtol=1e-05, atol=1e-08
).all()
assert not np.isclose(
separate_vectors[1], separate_vectors[2], rtol=1e-05, atol=1e-08
).all()
Loading