Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update for security issues #3500

Merged
merged 3 commits into from
May 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .ci/ipas_default.config
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ exclude_dirs: [
'.vscode/',
'.git/',
'build/',
'tests',
]

### (optional) plugin settings - some test plugins require configuration data
Expand Down
6 changes: 3 additions & 3 deletions .ci/requirements/benchmark/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,9 @@ jedi==0.19.1 \
--hash=sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd \
--hash=sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0
# via ipython
jinja2==3.1.3 \
--hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \
--hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90
jinja2==3.1.4 \
--hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
--hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
# via nbconvert
jsonschema==4.21.1 \
--hash=sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f \
Expand Down
2 changes: 1 addition & 1 deletion for_developers/regression_test/requirements.in
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
mlflow==2.12.1
mlflow==2.12.2
psycopg2-binary==2.9.9
12 changes: 6 additions & 6 deletions for_developers/regression_test/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -545,9 +545,9 @@ matplotlib==3.8.3 \
--hash=sha256:f386cf162b059809ecfac3bcc491a9ea17da69fa35c8ded8ad154cd4b933d5ec \
--hash=sha256:fa93695d5c08544f4a0dfd0965f378e7afc410d8672816aff1e81be1f45dbf2e
# via mlflow
mlflow==2.12.1 \
--hash=sha256:4c8f631df7ceea75c53464c976f3fef5bd0f80fc4ba5e871f3416d8c139a0312 \
--hash=sha256:aa92aebb2379a9c5484cbe901cdf779d5408ac96a641e4b1f8a2d1ff974db7c9
mlflow==2.12.2 \
--hash=sha256:38dd04710fe64ee8229b7233b4d91db32c3ff887934c40d926246a566c886c0b \
--hash=sha256:d712f1af9d44f1eb9e1baee8ca64f7311e185b7572fc3c1e0a83a4c8ceff6aad
# via -r requirements.in
numpy==1.26.4 \
--hash=sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b \
Expand Down Expand Up @@ -1051,9 +1051,9 @@ websocket-client==1.7.0 \
--hash=sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6 \
--hash=sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588
# via docker
werkzeug==3.0.1 \
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
werkzeug==3.0.3 \
--hash=sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18 \
--hash=sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8
# via flask
zipp==3.17.0 \
--hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ dev = [
"pytest-mock",
"pytest-csv",
"pytest-cov",
"mlflow==2.11.1", # For perf benchmark
"mlflow==2.12.2", # For perf benchmark
"py-cpuinfo==9.0.0", # For perf benchmark
"openpyxl", # For perf benchmark
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from __future__ import annotations

import logging as log
import pickle
import pickle # nosec B403 used pickle for dumping object
from collections import defaultdict
from copy import deepcopy
from itertools import product
Expand Down
2 changes: 1 addition & 1 deletion src/otx/cli/utils/installation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import os
import platform
import re
import subprocess
import subprocess # nosec B404
from importlib.metadata import requires
from importlib.util import find_spec
from pathlib import Path
Expand Down
4 changes: 2 additions & 2 deletions src/otx/core/data/dataset/action_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from __future__ import annotations

import pickle
import pickle # nosec B403
from functools import partial
from pathlib import Path
from typing import Callable
Expand Down Expand Up @@ -90,7 +90,7 @@ def _get_proposals(frame_path: str, proposal_file: str | None) -> np.ndarray:
if not proposal_file_path.exists():
return np.array([[0, 0, 1, 1]], dtype=np.float64)
with Path.open(proposal_file_path, "rb") as f:
info = pickle.load(f) # noqa: S301
info = pickle.load(f) # noqa: S301 # nosec: B301 used only for getting dataset
return (
info[",".join(Path(frame_path).stem.rsplit("_", 1))][:, :4]
if ",".join(Path(frame_path).stem.rsplit("_", 1)) in info
Expand Down
4 changes: 2 additions & 2 deletions src/otx/core/model/visual_prompting.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from __future__ import annotations

import logging as log
import pickle
import pickle # nosec: B403 used pickle dump and load only to share inference results
from collections import defaultdict
from copy import deepcopy
from functools import partial
Expand Down Expand Up @@ -1329,7 +1329,7 @@ def load_reference_info(self, default_root_dir: Path | str, *args, **kwargs) ->
if (
path_reference_info := _infer_reference_info_root / self.reference_info_dir / "reference_info.pickle"
).is_file():
reference_info: dict[str, np.ndarray] = pickle.load(path_reference_info.open("rb")) # noqa: S301
reference_info: dict[str, np.ndarray] = pickle.load(path_reference_info.open("rb")) # noqa: S301 # nosec: B301
self.reference_feats = reference_info.get(
"reference_feats",
np.zeros((0, 1, self.model["decoder"].embed_dim), dtype=np.float32),
Expand Down
Loading