Skip to content

Commit

Permalink
Merge branch 'develop' into bs/3d
Browse files Browse the repository at this point in the history
  • Loading branch information
nmanovic authored Sep 22, 2023
2 parents 898f2d4 + ee1d421 commit 46e3e73
Show file tree
Hide file tree
Showing 64 changed files with 1,543 additions and 598 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ jobs:

- uses: actions/setup-node@v3
with:
node-version: '16.x'
node-version: '18.x'

- name: Download CVAT server image
uses: actions/download-artifact@v3
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Changed
- Do not reload annotation view when renew the job or update job state (<https://github.com/opencv/cvat/pull/6851>)
- Now images from cloud buckets are loaded in parallel when preparing a chunk (<https://github.com/opencv/cvat/pull/6881>)

### Deprecated
- TDB
Expand All @@ -25,6 +26,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
(<https://github.com/opencv/cvat/pull/6862>)
- Intencity level from .bin lidar data ignored when converting .bin -> .pcd
(<https://github.com/opencv/cvat/pull/6862>)
- Downloading additional data from cloud storage if use_cache=true and job_file_mapping are specified
(<https://github.com/opencv/cvat/pull/6879>)
- Leaving an organization (<https://github.com/opencv/cvat/pull/6422>)

### Security
- TDB
Expand Down
2 changes: 1 addition & 1 deletion cvat-core/src/organization.ts
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ Object.defineProperties(Organization.prototype.leave, {
const result = await serverProxy.organizations.members(this.slug, 1, 10, {
filter: JSON.stringify({
and: [{
'==': [{ var: 'user' }, user.id],
'==': [{ var: 'user' }, user.username],
}],
}),
});
Expand Down
2 changes: 1 addition & 1 deletion cvat-ui/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "cvat-ui",
"version": "1.56.1",
"version": "1.56.2",
"description": "CVAT single-page application",
"main": "src/index.tsx",
"scripts": {
Expand Down
7 changes: 5 additions & 2 deletions cvat-ui/src/actions/organization-actions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -212,14 +212,17 @@ export function inviteOrganizationMembersAsync(
};
}

export function leaveOrganizationAsync(organization: any): ThunkAction {
export function leaveOrganizationAsync(
organization: any,
onLeaveSuccess?: () => void,
): ThunkAction {
return async function (dispatch, getState) {
const { user } = getState().auth;
dispatch(organizationActions.leaveOrganization());
try {
await organization.leave(user);
dispatch(organizationActions.leaveOrganizationSuccess());
localStorage.removeItem('currentOrganization');
if (onLeaveSuccess) onLeaveSuccess();
} catch (error) {
dispatch(organizationActions.leaveOrganizationFailed(error));
}
Expand Down
7 changes: 5 additions & 2 deletions cvat-ui/src/components/organization-page/member-item.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
// SPDX-License-Identifier: MIT

import React from 'react';
import { useSelector } from 'react-redux';
import Select from 'antd/lib/select';
import Text from 'antd/lib/typography/Text';
import { Row, Col } from 'antd/lib/grid';
import moment from 'moment';
import { DeleteOutlined } from '@ant-design/icons';
import Modal from 'antd/lib/modal';
import { CombinedState } from 'reducers';

export interface Props {
membershipInstance: any;
Expand All @@ -24,6 +26,7 @@ function MemberItem(props: Props): JSX.Element {
user, joined_date: joinedDate, role, invitation,
} = membershipInstance;
const { username, firstName, lastName } = user;
const { username: selfUserName } = useSelector((state: CombinedState) => state.auth.user);

return (
<Row className='cvat-organization-member-item' justify='space-between'>
Expand Down Expand Up @@ -61,7 +64,7 @@ function MemberItem(props: Props): JSX.Element {
</Select>
</Col>
<Col span={1} className='cvat-organization-member-item-remove'>
{role !== 'owner' ? (
{(role === 'owner' || selfUserName === username) ? null : (
<DeleteOutlined
onClick={() => {
Modal.confirm({
Expand All @@ -78,7 +81,7 @@ function MemberItem(props: Props): JSX.Element {
});
}}
/>
) : null}
)}
</Col>
</Row>
);
Expand Down
5 changes: 4 additions & 1 deletion cvat-ui/src/components/organization-page/top-bar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,10 @@ function OrganizationTopBar(props: Props): JSX.Element {
onClick={() => {
Modal.confirm({
onOk: () => {
dispatch(leaveOrganizationAsync(organizationInstance));
dispatch(leaveOrganizationAsync(organizationInstance, () => {
localStorage.removeItem('currentOrganization');
window.location.reload();
}));
},
className: 'cvat-modal-organization-leave-confirm',
content: (
Expand Down
132 changes: 75 additions & 57 deletions cvat/apps/engine/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
import zipfile
from datetime import datetime
from io import BytesIO
from tempfile import NamedTemporaryFile
import shutil
import tempfile

from typing import Optional, Tuple

import cv2
Expand Down Expand Up @@ -111,6 +113,75 @@ def _get_frame_provider_class():
FrameProvider # TODO: remove circular dependency
return FrameProvider

from contextlib import contextmanager

@staticmethod
@contextmanager
def _get_images(db_data, chunk_number):
images = []
tmp_dir = None
upload_dir = {
StorageChoice.LOCAL: db_data.get_upload_dirname(),
StorageChoice.SHARE: settings.SHARE_ROOT,
StorageChoice.CLOUD_STORAGE: db_data.get_upload_dirname(),
}[db_data.storage]

try:
if hasattr(db_data, 'video'):
source_path = os.path.join(upload_dir, db_data.video.path)

reader = VideoDatasetManifestReader(manifest_path=db_data.get_manifest_path(),
source_path=source_path, chunk_number=chunk_number,
chunk_size=db_data.chunk_size, start=db_data.start_frame,
stop=db_data.stop_frame, step=db_data.get_frame_step())
for frame in reader:
images.append((frame, source_path, None))
else:
reader = ImageDatasetManifestReader(manifest_path=db_data.get_manifest_path(),
chunk_number=chunk_number, chunk_size=db_data.chunk_size,
start=db_data.start_frame, stop=db_data.stop_frame,
step=db_data.get_frame_step())
if db_data.storage == StorageChoice.CLOUD_STORAGE:
db_cloud_storage = db_data.cloud_storage
assert db_cloud_storage, 'Cloud storage instance was deleted'
credentials = Credentials()
credentials.convert_from_db({
'type': db_cloud_storage.credentials_type,
'value': db_cloud_storage.credentials,
})
details = {
'resource': db_cloud_storage.resource,
'credentials': credentials,
'specific_attributes': db_cloud_storage.get_specific_attributes()
}
cloud_storage_instance = get_cloud_storage_instance(cloud_provider=db_cloud_storage.provider_type, **details)

tmp_dir = tempfile.mkdtemp(prefix='cvat')
files_to_download = []
checksums = []
for item in reader:
file_name = f"{item['name']}{item['extension']}"
fs_filename = os.path.join(tmp_dir, file_name)

files_to_download.append(file_name)
checksums.append(item.get('checksum', None))
images.append((fs_filename, fs_filename, None))

cloud_storage_instance.bulk_download_to_dir(files=files_to_download, upload_dir=tmp_dir)

for checksum, fs_filename in zip(checksums, images):
if checksum and not md5_hash(fs_filename) == checksum:
slogger.cloud_storage[db_cloud_storage.id].warning('Hash sums of files {} do not match'.format(file_name))
else:
for item in reader:
source_path = os.path.join(upload_dir, f"{item['name']}{item['extension']}")
images.append((source_path, source_path, None))

yield images
finally:
if db_data.storage == StorageChoice.CLOUD_STORAGE and tmp_dir is not None:
shutil.rmtree(tmp_dir)

def _prepare_task_chunk(self, db_data, quality, chunk_number):
FrameProvider = self._get_frame_provider_class()

Expand All @@ -127,64 +198,11 @@ def _prepare_task_chunk(self, db_data, quality, chunk_number):
kwargs["dimension"] = DimensionType.DIM_3D
writer = writer_classes[quality](image_quality, **kwargs)

images = []
buff = BytesIO()
upload_dir = {
StorageChoice.LOCAL: db_data.get_upload_dirname(),
StorageChoice.SHARE: settings.SHARE_ROOT,
StorageChoice.CLOUD_STORAGE: db_data.get_upload_dirname(),
}[db_data.storage]
if hasattr(db_data, 'video'):
source_path = os.path.join(upload_dir, db_data.video.path)

reader = VideoDatasetManifestReader(manifest_path=db_data.get_manifest_path(),
source_path=source_path, chunk_number=chunk_number,
chunk_size=db_data.chunk_size, start=db_data.start_frame,
stop=db_data.stop_frame, step=db_data.get_frame_step())
for frame in reader:
images.append((frame, source_path, None))
else:
reader = ImageDatasetManifestReader(manifest_path=db_data.get_manifest_path(),
chunk_number=chunk_number, chunk_size=db_data.chunk_size,
start=db_data.start_frame, stop=db_data.stop_frame,
step=db_data.get_frame_step())
if db_data.storage == StorageChoice.CLOUD_STORAGE:
db_cloud_storage = db_data.cloud_storage
assert db_cloud_storage, 'Cloud storage instance was deleted'
credentials = Credentials()
credentials.convert_from_db({
'type': db_cloud_storage.credentials_type,
'value': db_cloud_storage.credentials,
})
details = {
'resource': db_cloud_storage.resource,
'credentials': credentials,
'specific_attributes': db_cloud_storage.get_specific_attributes()
}
cloud_storage_instance = get_cloud_storage_instance(cloud_provider=db_cloud_storage.provider_type, **details)
for item in reader:
file_name = f"{item['name']}{item['extension']}"
with NamedTemporaryFile(mode='w+b', prefix='cvat', suffix=file_name.replace(os.path.sep, '#'), delete=False) as temp_file:
source_path = temp_file.name
buf = cloud_storage_instance.download_fileobj(file_name)
temp_file.write(buf.getvalue())
temp_file.flush()
checksum = item.get('checksum', None)
if not checksum:
slogger.cloud_storage[db_cloud_storage.id].warning('A manifest file does not contain checksum for image {}'.format(item.get('name')))
if checksum and not md5_hash(source_path) == checksum:
slogger.cloud_storage[db_cloud_storage.id].warning('Hash sums of files {} do not match'.format(file_name))
images.append((source_path, source_path, None))
else:
for item in reader:
source_path = os.path.join(upload_dir, f"{item['name']}{item['extension']}")
images.append((source_path, source_path, None))
writer.save_as_chunk(images, buff)
with self._get_images(db_data, chunk_number) as images:
writer.save_as_chunk(images, buff)
buff.seek(0)
if db_data.storage == StorageChoice.CLOUD_STORAGE:
images = [image[0] for image in images if os.path.exists(image[0])]
for image_path in images:
os.remove(image_path)

return buff, mime_type

def prepare_selective_job_chunk(self, db_job: Job, quality, chunk_number: int):
Expand Down
27 changes: 21 additions & 6 deletions cvat/apps/engine/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,12 +791,18 @@ def __init__(self, *args, **kwargs):

class JobFileMapping(serializers.ListField):
"""
Represents a file-to-job mapping. Useful to specify a custom job
configuration during task creation. This option is not compatible with
most other job split-related options. Files in the jobs must not overlap or repeat.
Represents a file-to-job mapping.
Useful to specify a custom job configuration during task creation.
This option is not compatible with most other job split-related options.
Files in the jobs must not overlap or repeat.
Job file mapping files must be a subset of the input files.
If directories are specified in server_files, all files obtained by recursive search
in the specified directories will be used as input files.
In case of missing items in the input files, an error will be raised.
Example:
[
["file1.jpg", "file2.jpg"], # job #1 files
["file3.png"], # job #2 files
["file4.jpg", "file5.png", "file6.bmp"], # job #3 files
Expand Down Expand Up @@ -824,9 +830,15 @@ class DataSerializer(serializers.ModelSerializer):
When false, video chunks are represented as video segments
"""))
client_files = ClientFileSerializer(many=True, default=[],
help_text="Uploaded files")
help_text=textwrap.dedent("""
Uploaded files.
Must contain all files from job_file_mapping if job_file_mapping is not empty.
"""))
server_files = ServerFileSerializer(many=True, default=[],
help_text="Paths to files from a file share mounted on the server, or from a cloud storage")
help_text=textwrap.dedent("""
Paths to files from a file share mounted on the server, or from a cloud storage.
Must contain all files from job_file_mapping if job_file_mapping is not empty.
"""))
server_files_exclude = serializers.ListField(required=False, default=[],
child=serializers.CharField(max_length=1024),
help_text=textwrap.dedent("""\
Expand All @@ -845,7 +857,10 @@ class DataSerializer(serializers.ModelSerializer):
""")
)
remote_files = RemoteFileSerializer(many=True, default=[],
help_text="Direct download URLs for files")
help_text=textwrap.dedent("""
Direct download URLs for files.
Must contain all files from job_file_mapping if job_file_mapping is not empty.
"""))
use_cache = serializers.BooleanField(default=False,
help_text=textwrap.dedent("""\
Enable or disable task data chunk caching for the task.
Expand Down
9 changes: 9 additions & 0 deletions cvat/apps/engine/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,6 +623,15 @@ def _create_thread(

data['server_files'].extend(additional_files)

# We only need to process the files specified in job_file_mapping
if job_file_mapping is not None:
filtered_files = []
for f in itertools.chain.from_iterable(job_file_mapping):
if f not in data['server_files']:
raise ValidationError(f"Job mapping file {f} is not specified in input files")
filtered_files.append(f)
data['server_files'] = filtered_files

if db_data.storage_method == models.StorageMethodChoice.FILE_SYSTEM or not settings.USE_CACHE:
_download_data_from_cloud_storage(db_data.cloud_storage, data['server_files'], upload_dir)
is_data_in_cloud = False
Expand Down
11 changes: 9 additions & 2 deletions cvat/apps/iam/rules/memberships.rego
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ allow {
input.resource.organization.id == input.auth.organization.id
}

# maintainer of the organization can change the role of any member and remove any member except himself/another maintainer/owner
allow {
{ utils.CHANGE_ROLE, utils.DELETE }[input.scope]
input.resource.is_active
Expand All @@ -81,22 +82,28 @@ allow {
organizations.OWNER,
organizations.MAINTAINER
}[input.resource.role]
input.resource.user.id != input.auth.user.id
}


# owner of the organization can change the role of any member and remove any member except himself
allow {
{ utils.CHANGE_ROLE, utils.DELETE }[input.scope]
input.resource.is_active
input.resource.organization.id == input.auth.organization.id
utils.has_perm(utils.USER)
organizations.is_owner
input.resource.user.id != input.auth.user.id
input.resource.role != organizations.OWNER
}

# member can leave the organization except case when member is the owner
allow {
input.scope == utils.DELETE
input.resource.is_active
utils.is_sandbox
input.resource.role != organizations.OWNER
organizations.is_member
input.resource.organization.id == input.auth.organization.id
input.resource.user.id == input.auth.user.id
input.resource.role != organizations.OWNER
utils.has_perm(utils.WORKER)
}
10 changes: 5 additions & 5 deletions cvat/apps/iam/rules/tests/configs/memberships.csv
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ list,Membership,Organization,N/A,,GET,/memberships,None,Worker
view,Membership,Sandbox,None,,GET,/membership/{id},Admin,N/A
view,Membership,Sandbox,Self,,GET,/membership/{id},None,N/A
view,Membership,Organization,"None, Self",,GET,/membership/{id},None,Worker
change:role,Membership,Organization,"None, Self","resource[""role""] not in [""maintainer"", ""owner""]",PATCH,/membership/{id},User,Maintainer
change:role,Membership,Organization,"None, Self","resource[""role""] != ""owner""",PATCH,/membership/{id},User,Owner
delete,Membership,Organization,"None, Self","resource[""role""] not in [""maintainer"", ""owner""]",DELETE,/membership/{id},User,Maintainer
delete,Membership,Organization,"None, Self","resource[""role""] != ""owner""",DELETE,/membership/{id},User,Owner
delete,Membership,Sandbox,Self,"resource[""role""] != ""owner""",DELETE,/membership/{id},Worker,N/A
change:role,Membership,Organization,None,"resource[""role""] != ""owner""",PATCH,/membership/{id},User,Owner
change:role,Membership,Organization,None,"resource[""role""] not in [""maintainer"", ""owner""]",PATCH,/membership/{id},User,Maintainer
delete,Membership,Organization,None,"resource[""role""] != ""owner""",DELETE,/membership/{id},User,Owner
delete,Membership,Organization,None,"resource[""role""] not in [""maintainer"", ""owner""]",DELETE,/membership/{id},User,Maintainer
delete,Membership,Organization,Self,"resource[""role""] != ""owner""",DELETE,/membership/{id},Worker,Worker
Loading

0 comments on commit 46e3e73

Please sign in to comment.