Skip to content

Commit

Permalink
Fix bucket public access
Browse files Browse the repository at this point in the history
  • Loading branch information
Marishka17 committed Apr 15, 2021
1 parent 089d6a8 commit 5d8be0a
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 6 deletions.
8 changes: 5 additions & 3 deletions cvat/apps/engine/cloud_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import boto3
from boto3.s3.transfer import TransferConfig
from botocore.exceptions import WaiterError, NoCredentialsError
from botocore.handlers import disable_signing

from azure.storage.blob import BlobServiceClient
from azure.core.exceptions import ResourceExistsError
Expand Down Expand Up @@ -116,10 +117,11 @@ def __init__(self, bucket, access_key_id=None, secret_key=None, session_token=No
)
elif any([access_key_id, secret_key, session_token]):
raise Exception('Insufficient data for authorization')
else:
# anonymous access
self._client_s3 = boto3.client('s3')
self._s3 = boto3.resource('s3')
# anonymous access
if not any([access_key_id, secret_key, session_token]):
self._s3.meta.client.meta.events.register('choose-signer.s3.*', disable_signing)
self._client_s3 = self._s3.meta.client
self._bucket = self._s3.Bucket(bucket)

@property
Expand Down
2 changes: 1 addition & 1 deletion cvat/apps/engine/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def _create_thread(tid, data):
if data['server_files']:
if db_data.storage == StorageChoice.LOCAL:
_copy_data_from_share(data['server_files'], upload_dir)
else:
elif db_data.storage == StorageChoice.SHARE:
upload_dir = settings.SHARE_ROOT

av_scan_paths(upload_dir)
Expand Down
14 changes: 12 additions & 2 deletions cvat/apps/engine/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -988,13 +988,22 @@ def self(self, request):
serializer = serializer_class(request.user, context={ "request": request })
return Response(serializer.data)

class CloudStorageContentFilterInspector(CoreAPICompatInspector):
def get_filter_parameters(self, filter_backend):
if isinstance(filter_backend, DjangoFilterBackend):
filter_params = super(CloudStorageContentFilterInspector, self).get_filter_parameters(filter_backend)

action_param = filter_params
filter_params_copy = filter_params.copy()

return NotHandled

@method_decorator(name='list', decorator=swagger_auto_schema(
operation_summary='Returns a paginated list of storages according to query parameters',
manual_parameters=[
openapi.Parameter('provider_type', openapi.IN_QUERY, description="A supported provider of cloud storages",
type=openapi.TYPE_STRING, enum=CloudProviderChoice.list()),
openapi.Parameter('resource', openapi.IN_QUERY, description="A name of buket or container", type=openapi.TYPE_STRING),
openapi.Parameter('resource', openapi.IN_QUERY, description="A name of bucket or container", type=openapi.TYPE_STRING),
openapi.Parameter('owner', openapi.IN_QUERY, description="A resource owner", type=openapi.TYPE_STRING),
openapi.Parameter('credentials_type', openapi.IN_QUERY, description="A type of a granting access", type=openapi.TYPE_STRING, enum=CredentialsTypeChoice.list()),
],
Expand Down Expand Up @@ -1104,6 +1113,7 @@ def perform_destroy(self, instance):
responses={
'200': openapi.Response(description='A list of a storage content'),
},
filter_inspectors=[CloudStorageContentFilterInspector],
tags=['cloud storages']
)
)
Expand Down Expand Up @@ -1138,7 +1148,7 @@ def retrieve(self, request, *args, **kwargs):
if key in storage_files: content[key].append('s') # storage
if key in manifest_files: content[key].append('m') # manifest

data = json.loads(content)
data = json.dumps(content)
return Response(data=data, content_type="aplication/json")

except CloudStorageModel.DoesNotExist:
Expand Down

0 comments on commit 5d8be0a

Please sign in to comment.