From 5d8be0aafbb30997c3a5569013aa6d87b36e1ca0 Mon Sep 17 00:00:00 2001 From: Maya Date: Fri, 16 Apr 2021 00:31:08 +0300 Subject: [PATCH] Fix bucket public access --- cvat/apps/engine/cloud_provider.py | 8 +++++--- cvat/apps/engine/task.py | 2 +- cvat/apps/engine/views.py | 14 ++++++++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/cvat/apps/engine/cloud_provider.py b/cvat/apps/engine/cloud_provider.py index dc9f2247eba3..423e0c977f18 100644 --- a/cvat/apps/engine/cloud_provider.py +++ b/cvat/apps/engine/cloud_provider.py @@ -5,6 +5,7 @@ import boto3 from boto3.s3.transfer import TransferConfig from botocore.exceptions import WaiterError, NoCredentialsError +from botocore.handlers import disable_signing from azure.storage.blob import BlobServiceClient from azure.core.exceptions import ResourceExistsError @@ -116,10 +117,11 @@ def __init__(self, bucket, access_key_id=None, secret_key=None, session_token=No ) elif any([access_key_id, secret_key, session_token]): raise Exception('Insufficient data for authorization') - else: - # anonymous access - self._client_s3 = boto3.client('s3') self._s3 = boto3.resource('s3') + # anonymous access + if not any([access_key_id, secret_key, session_token]): + self._s3.meta.client.meta.events.register('choose-signer.s3.*', disable_signing) + self._client_s3 = self._s3.meta.client self._bucket = self._s3.Bucket(bucket) @property diff --git a/cvat/apps/engine/task.py b/cvat/apps/engine/task.py index 2181fc542011..be7143d9b554 100644 --- a/cvat/apps/engine/task.py +++ b/cvat/apps/engine/task.py @@ -234,7 +234,7 @@ def _create_thread(tid, data): if data['server_files']: if db_data.storage == StorageChoice.LOCAL: _copy_data_from_share(data['server_files'], upload_dir) - else: + elif db_data.storage == StorageChoice.SHARE: upload_dir = settings.SHARE_ROOT av_scan_paths(upload_dir) diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index f43b69f6536d..02c79675c572 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -988,13 +988,22 @@ def self(self, request): serializer = serializer_class(request.user, context={ "request": request }) return Response(serializer.data) +class CloudStorageContentFilterInspector(CoreAPICompatInspector): + def get_filter_parameters(self, filter_backend): + if isinstance(filter_backend, DjangoFilterBackend): + filter_params = super(CloudStorageContentFilterInspector, self).get_filter_parameters(filter_backend) + + action_param = filter_params + filter_params_copy = filter_params.copy() + + return NotHandled @method_decorator(name='list', decorator=swagger_auto_schema( operation_summary='Returns a paginated list of storages according to query parameters', manual_parameters=[ openapi.Parameter('provider_type', openapi.IN_QUERY, description="A supported provider of cloud storages", type=openapi.TYPE_STRING, enum=CloudProviderChoice.list()), - openapi.Parameter('resource', openapi.IN_QUERY, description="A name of buket or container", type=openapi.TYPE_STRING), + openapi.Parameter('resource', openapi.IN_QUERY, description="A name of bucket or container", type=openapi.TYPE_STRING), openapi.Parameter('owner', openapi.IN_QUERY, description="A resource owner", type=openapi.TYPE_STRING), openapi.Parameter('credentials_type', openapi.IN_QUERY, description="A type of a granting access", type=openapi.TYPE_STRING, enum=CredentialsTypeChoice.list()), ], @@ -1104,6 +1113,7 @@ def perform_destroy(self, instance): responses={ '200': openapi.Response(description='A list of a storage content'), }, + filter_inspectors=[CloudStorageContentFilterInspector], tags=['cloud storages'] ) ) @@ -1138,7 +1148,7 @@ def retrieve(self, request, *args, **kwargs): if key in storage_files: content[key].append('s') # storage if key in manifest_files: content[key].append('m') # manifest - data = json.loads(content) + data = json.dumps(content) return Response(data=data, content_type="aplication/json") except CloudStorageModel.DoesNotExist: