From 6381af44980163bcfd4dfe18cd0732ffb91a09b1 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Fri, 30 Nov 2018 14:40:26 -0800 Subject: [PATCH 1/2] add black to noxfiles --- bigquery/noxfile.py | 31 +++++++++++++++++++++++-------- storage/noxfile.py | 32 +++++++++++++++++++++++--------- 2 files changed, 46 insertions(+), 17 deletions(-) diff --git a/bigquery/noxfile.py b/bigquery/noxfile.py index 21b8487039cc..0927d2d430f1 100644 --- a/bigquery/noxfile.py +++ b/bigquery/noxfile.py @@ -127,6 +127,18 @@ def snippets(session): 'py.test', os.path.join('docs', 'snippets.py'), *session.posargs) +@nox.session(python='3.6') +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') + + @nox.session(python='3.6') def lint(session): """Run linters. @@ -152,13 +164,16 @@ def lint_setup_py(session): 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session(python='3.6') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. +@nox.session(python="3.6") +def blacken(session): + """Run black. + Format code to uniform standard. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("black") + session.run( + "black", + "google", + "tests", + "docs", + ) \ No newline at end of file diff --git a/storage/noxfile.py b/storage/noxfile.py index d3a2b93dc117..74c74fdcad57 100644 --- a/storage/noxfile.py +++ b/storage/noxfile.py @@ -89,6 +89,18 @@ def system(session): session.run('py.test', '--quiet', 'tests/system.py', *session.posargs) +@nox.session(python='3.6') +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') + + @nox.session(python='3.6') def lint(session): """Run linters. @@ -109,13 +121,15 @@ def lint_setup_py(session): 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session(python='3.6') -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. +@nox.session(python="3.6") +def blacken(session): + """Run black. + Format code to uniform standard. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("black") + session.run( + "black", + "google", + "tests", + "docs", + ) From 21b92ec1c20fbff65b1c0751f70420e81e061721 Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Fri, 30 Nov 2018 14:42:32 -0800 Subject: [PATCH 2/2] blacken --- bigquery/docs/snippets.py | 1411 ++--- bigquery/google/__init__.py | 2 + bigquery/google/cloud/__init__.py | 2 + bigquery/google/cloud/bigquery/__init__.py | 88 +- bigquery/google/cloud/bigquery/_helpers.py | 119 +- bigquery/google/cloud/bigquery/_http.py | 10 +- bigquery/google/cloud/bigquery/client.py | 516 +- bigquery/google/cloud/bigquery/dataset.py | 180 +- .../google/cloud/bigquery/dbapi/__init__.py | 38 +- .../google/cloud/bigquery/dbapi/_helpers.py | 28 +- .../google/cloud/bigquery/dbapi/connection.py | 1 + .../google/cloud/bigquery/dbapi/cursor.py | 62 +- bigquery/google/cloud/bigquery/dbapi/types.py | 13 +- .../google/cloud/bigquery/external_config.py | 143 +- bigquery/google/cloud/bigquery/job.py | 650 +-- bigquery/google/cloud/bigquery/magics.py | 81 +- bigquery/google/cloud/bigquery/query.py | 187 +- bigquery/google/cloud/bigquery/retry.py | 14 +- bigquery/google/cloud/bigquery/schema.py | 45 +- bigquery/google/cloud/bigquery/table.py | 414 +- bigquery/tests/scrub_datasets.py | 5 +- bigquery/tests/system.py | 1295 +++-- bigquery/tests/unit/test__helpers.py | 617 +-- bigquery/tests/unit/test__http.py | 39 +- bigquery/tests/unit/test_client.py | 4141 +++++++------- bigquery/tests/unit/test_dataset.py | 400 +- bigquery/tests/unit/test_dbapi__helpers.py | 75 +- bigquery/tests/unit/test_dbapi_connection.py | 9 +- bigquery/tests/unit/test_dbapi_cursor.py | 164 +- bigquery/tests/unit/test_dbapi_types.py | 18 +- bigquery/tests/unit/test_external_config.py | 322 +- bigquery/tests/unit/test_job.py | 3472 ++++++------ bigquery/tests/unit/test_magics.py | 248 +- bigquery/tests/unit/test_query.py | 979 ++-- bigquery/tests/unit/test_retry.py | 25 +- bigquery/tests/unit/test_schema.py | 407 +- bigquery/tests/unit/test_table.py | 942 ++-- firestore/docs/conf.py | 167 +- firestore/google/__init__.py | 2 + firestore/google/cloud/__init__.py | 2 + firestore/google/cloud/firestore.py | 38 +- .../cloud/firestore_v1beta1/__init__.py | 45 +- .../cloud/firestore_v1beta1/_helpers.py | 286 +- .../google/cloud/firestore_v1beta1/batch.py | 24 +- .../google/cloud/firestore_v1beta1/client.py | 67 +- .../cloud/firestore_v1beta1/collection.py | 44 +- .../cloud/firestore_v1beta1/document.py | 60 +- .../cloud/firestore_v1beta1/gapic/enums.py | 7 + .../gapic/firestore_client.py | 614 ++- .../gapic/firestore_client_config.py | 38 +- .../transports/firestore_grpc_transport.py | 59 +- .../google/cloud/firestore_v1beta1/order.py | 88 +- .../proto/admin/firestore_admin_pb2.py | 1499 +++-- .../proto/admin/firestore_admin_pb2_grpc.py | 143 +- .../proto/admin/index_pb2.py | 395 +- .../proto/admin/index_pb2_grpc.py | 1 - .../firestore_v1beta1/proto/common_pb2.py | 579 +- .../proto/common_pb2_grpc.py | 1 - .../firestore_v1beta1/proto/document_pb2.py | 1027 ++-- .../proto/document_pb2_grpc.py | 1 - .../proto/event_flow_document_change_pb2.py | 54 +- .../event_flow_document_change_pb2_grpc.py | 1 - .../firestore_v1beta1/proto/firestore_pb2.py | 4895 +++++++++++------ .../proto/firestore_pb2_grpc.py | 391 +- .../firestore_v1beta1/proto/query_pb2.py | 1582 +++--- .../firestore_v1beta1/proto/query_pb2_grpc.py | 1 - .../cloud/firestore_v1beta1/proto/test_pb2.py | 3202 +++++++---- .../firestore_v1beta1/proto/write_pb2.py | 1254 +++-- .../firestore_v1beta1/proto/write_pb2_grpc.py | 1 - .../google/cloud/firestore_v1beta1/query.py | 193 +- .../cloud/firestore_v1beta1/transaction.py | 40 +- .../cloud/firestore_v1beta1/transforms.py | 9 +- .../google/cloud/firestore_v1beta1/types.py | 10 +- .../google/cloud/firestore_v1beta1/watch.py | 301 +- firestore/tests/system.py | 577 +- .../v1beta1/test_firestore_client_v1beta1.py | 271 +- firestore/tests/unit/test__helpers.py | 1472 ++--- firestore/tests/unit/test_batch.py | 74 +- firestore/tests/unit/test_client.py | 224 +- firestore/tests/unit/test_collection.py | 216 +- firestore/tests/unit/test_cross_language.py | 100 +- firestore/tests/unit/test_document.py | 371 +- firestore/tests/unit/test_order.py | 78 +- firestore/tests/unit/test_query.py | 651 +-- firestore/tests/unit/test_transaction.py | 428 +- firestore/tests/unit/test_transforms.py | 14 +- firestore/tests/unit/test_watch.py | 355 +- storage/docs/conf.py | 173 +- storage/docs/snippets.py | 105 +- storage/google/__init__.py | 2 + storage/google/cloud/__init__.py | 2 + storage/google/cloud/storage/__init__.py | 5 +- storage/google/cloud/storage/_helpers.py | 39 +- storage/google/cloud/storage/_http.py | 8 +- storage/google/cloud/storage/_signing.py | 91 +- storage/google/cloud/storage/acl.py | 103 +- storage/google/cloud/storage/batch.py | 76 +- storage/google/cloud/storage/blob.py | 490 +- storage/google/cloud/storage/bucket.py | 456 +- storage/google/cloud/storage/client.py | 55 +- storage/google/cloud/storage/iam.py | 42 +- storage/google/cloud/storage/notification.py | 117 +- storage/tests/system.py | 719 ++- storage/tests/unit/test__helpers.py | 190 +- storage/tests/unit/test__http.py | 39 +- storage/tests/unit/test__signing.py | 102 +- storage/tests/unit/test_acl.py | 516 +- storage/tests/unit/test_batch.py | 275 +- storage/tests/unit/test_blob.py | 2293 ++++---- storage/tests/unit/test_bucket.py | 1855 +++---- storage/tests/unit/test_client.py | 393 +- storage/tests/unit/test_notification.py | 280 +- 112 files changed, 25088 insertions(+), 22452 deletions(-) diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py index 9e8ba524a115..a60f587d03cf 100644 --- a/bigquery/docs/snippets.py +++ b/bigquery/docs/snippets.py @@ -29,6 +29,7 @@ import mock import pytest import six + try: import pandas except (ImportError, AttributeError): @@ -46,36 +47,38 @@ from google.cloud import storage from test_utils.retry import RetryErrors -ORIGINAL_FRIENDLY_NAME = 'Original friendly name' -ORIGINAL_DESCRIPTION = 'Original description' -LOCALLY_CHANGED_FRIENDLY_NAME = 'Locally-changed friendly name' -LOCALLY_CHANGED_DESCRIPTION = 'Locally-changed description' -UPDATED_FRIENDLY_NAME = 'Updated friendly name' -UPDATED_DESCRIPTION = 'Updated description' +ORIGINAL_FRIENDLY_NAME = "Original friendly name" +ORIGINAL_DESCRIPTION = "Original description" +LOCALLY_CHANGED_FRIENDLY_NAME = "Locally-changed friendly name" +LOCALLY_CHANGED_DESCRIPTION = "Locally-changed description" +UPDATED_FRIENDLY_NAME = "Updated friendly name" +UPDATED_DESCRIPTION = "Updated description" SCHEMA = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] ROWS = [ - ('Phred Phlyntstone', 32), - ('Bharney Rhubble', 33), - ('Wylma Phlyntstone', 29), - ('Bhettye Rhubble', 27), + ("Phred Phlyntstone", 32), + ("Bharney Rhubble", 33), + ("Wylma Phlyntstone", 29), + ("Bhettye Rhubble", 27), ] QUERY = ( - 'SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` ' - 'WHERE state = "TX"') + "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " + 'WHERE state = "TX"' +) retry_429 = RetryErrors(TooManyRequests) retry_storage_errors = RetryErrors( - (TooManyRequests, InternalServerError, ServiceUnavailable)) + (TooManyRequests, InternalServerError, ServiceUnavailable) +) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def client(): return bigquery.Client() @@ -98,7 +101,6 @@ def _millis(): class _CloseOnDelete(object): - def __init__(self, wrapped): self._wrapped = wrapped @@ -122,19 +124,20 @@ def test_create_client_default_credentials(): def test_create_client_json_credentials(): """Create a BigQuery client with Application Default Credentials""" - with open(os.environ['GOOGLE_APPLICATION_CREDENTIALS']) as creds_file: + with open(os.environ["GOOGLE_APPLICATION_CREDENTIALS"]) as creds_file: creds_file_data = creds_file.read() open_mock = mock.mock_open(read_data=creds_file_data) - with mock.patch('io.open', open_mock): + with mock.patch("io.open", open_mock): # [START bigquery_client_json_credentials] from google.cloud import bigquery # Explicitly use service account credentials by specifying the private # key file. All clients in google-cloud-python have this helper. client = bigquery.Client.from_service_account_json( - 'path/to/service_account.json') + "path/to/service_account.json" + ) # [END bigquery_client_json_credentials] assert client is not None @@ -150,18 +153,18 @@ def test_list_datasets(client): project = client.project if datasets: - print('Datasets in project {}:'.format(project)) + print("Datasets in project {}:".format(project)) for dataset in datasets: # API request(s) - print('\t{}'.format(dataset.dataset_id)) + print("\t{}".format(dataset.dataset_id)) else: - print('{} project does not contain any datasets.'.format(project)) + print("{} project does not contain any datasets.".format(project)) # [END bigquery_list_datasets] def test_list_datasets_by_label(client, to_delete): - dataset_id = 'list_datasets_by_label_{}'.format(_millis()) + dataset_id = "list_datasets_by_label_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.labels = {'color': 'green'} + dataset.labels = {"color": "green"} dataset = client.create_dataset(dataset) # API request to_delete.append(dataset) @@ -171,15 +174,15 @@ def test_list_datasets_by_label(client, to_delete): # The following label filter example will find datasets with an # arbitrary 'color' label set to 'green' - label_filter = 'labels.color:green' + label_filter = "labels.color:green" datasets = list(client.list_datasets(filter=label_filter)) if datasets: - print('Datasets filtered by {}:'.format(label_filter)) + print("Datasets filtered by {}:".format(label_filter)) for dataset in datasets: # API request(s) - print('\t{}'.format(dataset.dataset_id)) + print("\t{}".format(dataset.dataset_id)) else: - print('No datasets found with this filter.') + print("No datasets found with this filter.") # [END bigquery_list_datasets_by_label] found = set([dataset.dataset_id for dataset in datasets]) assert dataset_id in found @@ -187,7 +190,7 @@ def test_list_datasets_by_label(client, to_delete): def test_create_dataset(client, to_delete): """Create a dataset.""" - dataset_id = 'create_dataset_{}'.format(_millis()) + dataset_id = "create_dataset_{}".format(_millis()) # [START bigquery_create_dataset] # from google.cloud import bigquery @@ -201,7 +204,7 @@ def test_create_dataset(client, to_delete): # Construct a full Dataset object to send to the API. dataset = bigquery.Dataset(dataset_ref) # Specify the geographic location where the dataset should reside. - dataset.location = 'US' + dataset.location = "US" # Send the dataset to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Dataset already @@ -214,8 +217,8 @@ def test_create_dataset(client, to_delete): def test_get_dataset_information(client, to_delete): """View information about a dataset.""" - dataset_id = 'get_dataset_{}'.format(_millis()) - dataset_labels = {'color': 'green'} + dataset_id = "get_dataset_{}".format(_millis()) + dataset_labels = {"color": "green"} dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) dataset.description = ORIGINAL_DESCRIPTION @@ -232,24 +235,24 @@ def test_get_dataset_information(client, to_delete): dataset = client.get_dataset(dataset_ref) # API request # View dataset properties - print('Dataset ID: {}'.format(dataset_id)) - print('Description: {}'.format(dataset.description)) - print('Labels:') + print("Dataset ID: {}".format(dataset_id)) + print("Description: {}".format(dataset.description)) + print("Labels:") labels = dataset.labels if labels: for label, value in labels.items(): - print('\t{}: {}'.format(label, value)) + print("\t{}: {}".format(label, value)) else: print("\tDataset has no labels defined.") # View tables in dataset - print('Tables:') + print("Tables:") tables = list(client.list_tables(dataset_ref)) # API request(s) if tables: for table in tables: - print('\t{}'.format(table.table_id)) + print("\t{}".format(table.table_id)) else: - print('\tThis dataset does not contain any tables.') + print("\tThis dataset does not contain any tables.") # [END bigquery_get_dataset] assert dataset.description == ORIGINAL_DESCRIPTION @@ -277,29 +280,34 @@ def dataset_exists(client, dataset_reference): return True except NotFound: return False + + # [END bigquery_dataset_exists] def test_dataset_exists(client, to_delete): """Determine if a dataset exists.""" - DATASET_ID = 'get_table_dataset_{}'.format(_millis()) + DATASET_ID = "get_table_dataset_{}".format(_millis()) dataset_ref = client.dataset(DATASET_ID) dataset = bigquery.Dataset(dataset_ref) dataset = client.create_dataset(dataset) to_delete.append(dataset) assert dataset_exists(client, dataset_ref) - assert not dataset_exists(client, client.dataset('i_dont_exist')) + assert not dataset_exists(client, client.dataset("i_dont_exist")) -@pytest.mark.skip(reason=( - 'update_dataset() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588')) +@pytest.mark.skip( + reason=( + "update_dataset() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" + ) +) def test_update_dataset_description(client, to_delete): """Update a dataset's description.""" - dataset_id = 'update_dataset_description_{}'.format(_millis()) + dataset_id = "update_dataset_description_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.description = 'Original description.' + dataset.description = "Original description." client.create_dataset(dataset) to_delete.append(dataset) @@ -309,21 +317,24 @@ def test_update_dataset_description(client, to_delete): # dataset_ref = client.dataset('my_dataset') # dataset = client.get_dataset(dataset_ref) # API request - assert dataset.description == 'Original description.' - dataset.description = 'Updated description.' + assert dataset.description == "Original description." + dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ['description']) # API request + dataset = client.update_dataset(dataset, ["description"]) # API request - assert dataset.description == 'Updated description.' + assert dataset.description == "Updated description." # [END bigquery_update_dataset_description] -@pytest.mark.skip(reason=( - 'update_dataset() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588')) +@pytest.mark.skip( + reason=( + "update_dataset() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" + ) +) def test_update_dataset_default_table_expiration(client, to_delete): """Update a dataset's default table expiration.""" - dataset_id = 'update_dataset_default_expiration_{}'.format(_millis()) + dataset_id = "update_dataset_default_expiration_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -339,17 +350,21 @@ def test_update_dataset_default_table_expiration(client, to_delete): dataset.default_table_expiration_ms = one_day_ms dataset = client.update_dataset( - dataset, ['default_table_expiration_ms']) # API request + dataset, ["default_table_expiration_ms"] + ) # API request assert dataset.default_table_expiration_ms == one_day_ms # [END bigquery_update_dataset_expiration] -@pytest.mark.skip(reason=( - 'update_dataset() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588')) +@pytest.mark.skip( + reason=( + "update_dataset() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" + ) +) def test_manage_dataset_labels(client, to_delete): - dataset_id = 'label_dataset_{}'.format(_millis()) + dataset_id = "label_dataset_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -361,10 +376,10 @@ def test_manage_dataset_labels(client, to_delete): # dataset = client.get_dataset(dataset_ref) # API request assert dataset.labels == {} - labels = {'color': 'green'} + labels = {"color": "green"} dataset.labels = labels - dataset = client.update_dataset(dataset, ['labels']) # API request + dataset = client.update_dataset(dataset, ["labels"]) # API request assert dataset.labels == labels # [END bigquery_label_dataset] @@ -378,11 +393,11 @@ def test_manage_dataset_labels(client, to_delete): dataset = client.get_dataset(dataset_ref) # API request # View dataset labels - print('Dataset ID: {}'.format(dataset_id)) - print('Labels:') + print("Dataset ID: {}".format(dataset_id)) + print("Labels:") if dataset.labels: for label, value in dataset.labels.items(): - print('\t{}: {}'.format(label, value)) + print("\t{}: {}".format(label, value)) else: print("\tDataset has no labels defined.") # [END bigquery_get_dataset_labels] @@ -395,22 +410,25 @@ def test_manage_dataset_labels(client, to_delete): # dataset = client.get_dataset(dataset_ref) # API request # This example dataset starts with one label - assert dataset.labels == {'color': 'green'} + assert dataset.labels == {"color": "green"} # To delete a label from a dataset, set its value to None - dataset.labels['color'] = None + dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ['labels']) # API request + dataset = client.update_dataset(dataset, ["labels"]) # API request assert dataset.labels == {} # [END bigquery_delete_label_dataset] -@pytest.mark.skip(reason=( - 'update_dataset() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588')) +@pytest.mark.skip( + reason=( + "update_dataset() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5588" + ) +) def test_update_dataset_access(client, to_delete): """Update a dataset's access controls.""" - dataset_id = 'update_dataset_access_{}'.format(_millis()) + dataset_id = "update_dataset_access_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -421,15 +439,16 @@ def test_update_dataset_access(client, to_delete): # dataset = client.get_dataset(client.dataset('my_dataset')) entry = bigquery.AccessEntry( - role='READER', - entity_type='userByEmail', - entity_id='sample.bigquery.dev@gmail.com') + role="READER", + entity_type="userByEmail", + entity_id="sample.bigquery.dev@gmail.com", + ) assert entry not in dataset.access_entries entries = list(dataset.access_entries) entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ['access_entries']) # API request + dataset = client.update_dataset(dataset, ["access_entries"]) # API request assert entry in dataset.access_entries # [END bigquery_update_dataset_access] @@ -439,15 +458,15 @@ def test_delete_dataset(client): """Delete a dataset.""" from google.cloud.exceptions import NotFound - dataset1_id = 'delete_dataset_{}'.format(_millis()) + dataset1_id = "delete_dataset_{}".format(_millis()) dataset1 = bigquery.Dataset(client.dataset(dataset1_id)) client.create_dataset(dataset1) - dataset2_id = 'delete_dataset_with_tables{}'.format(_millis()) + dataset2_id = "delete_dataset_with_tables{}".format(_millis()) dataset2 = bigquery.Dataset(client.dataset(dataset2_id)) client.create_dataset(dataset2) - table = bigquery.Table(dataset2.table('new_table')) + table = bigquery.Table(dataset2.table("new_table")) client.create_table(table) # [START bigquery_delete_dataset] @@ -459,14 +478,14 @@ def test_delete_dataset(client): dataset1_ref = client.dataset(dataset1_id) client.delete_dataset(dataset1_ref) # API request - print('Dataset {} deleted.'.format(dataset1_id)) + print("Dataset {} deleted.".format(dataset1_id)) # Use the delete_contents parameter to delete a dataset and its contents # dataset2_id = 'my_dataset_with_tables' dataset2_ref = client.dataset(dataset2_id) client.delete_dataset(dataset2_ref, delete_contents=True) # API request - print('Dataset {} deleted.'.format(dataset2_id)) + print("Dataset {} deleted.".format(dataset2_id)) # [END bigquery_delete_dataset] for dataset in [dataset1, dataset2]: @@ -476,7 +495,7 @@ def test_delete_dataset(client): def test_list_tables(client, to_delete): """List tables within a dataset.""" - dataset_id = 'list_tables_dataset_{}'.format(_millis()) + dataset_id = "list_tables_dataset_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = client.create_dataset(bigquery.Dataset(dataset_ref)) to_delete.append(dataset) @@ -489,19 +508,19 @@ def test_list_tables(client, to_delete): tables = list(client.list_tables(dataset_ref)) # API request(s) assert len(tables) == 0 - table_ref = dataset.table('my_table') + table_ref = dataset.table("my_table") table = bigquery.Table(table_ref) - client.create_table(table) # API request + client.create_table(table) # API request tables = list(client.list_tables(dataset)) # API request(s) assert len(tables) == 1 - assert tables[0].table_id == 'my_table' + assert tables[0].table_id == "my_table" # [END bigquery_list_tables] def test_create_table(client, to_delete): """Create a table.""" - dataset_id = 'create_table_dataset_{}'.format(_millis()) + dataset_id = "create_table_dataset_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) client.create_dataset(dataset) @@ -513,19 +532,19 @@ def test_create_table(client, to_delete): # dataset_ref = client.dataset('my_dataset') schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] - table_ref = dataset_ref.table('my_table') + table_ref = dataset_ref.table("my_table") table = bigquery.Table(table_ref, schema=schema) table = client.create_table(table) # API request - assert table.table_id == 'my_table' + assert table.table_id == "my_table" # [END bigquery_create_table] def test_create_table_nested_repeated_schema(client, to_delete): - dataset_id = 'create_table_nested_repeated_{}'.format(_millis()) + dataset_id = "create_table_nested_repeated_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) client.create_dataset(dataset) @@ -537,29 +556,34 @@ def test_create_table_nested_repeated_schema(client, to_delete): # dataset_ref = client.dataset('my_dataset') schema = [ - bigquery.SchemaField('id', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('first_name', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('last_name', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('dob', 'DATE', mode='NULLABLE'), - bigquery.SchemaField('addresses', 'RECORD', mode='REPEATED', fields=[ - bigquery.SchemaField('status', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('address', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('city', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('state', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('zip', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('numberOfYears', 'STRING', mode='NULLABLE'), - ]), + bigquery.SchemaField("id", "STRING", mode="NULLABLE"), + bigquery.SchemaField("first_name", "STRING", mode="NULLABLE"), + bigquery.SchemaField("last_name", "STRING", mode="NULLABLE"), + bigquery.SchemaField("dob", "DATE", mode="NULLABLE"), + bigquery.SchemaField( + "addresses", + "RECORD", + mode="REPEATED", + fields=[ + bigquery.SchemaField("status", "STRING", mode="NULLABLE"), + bigquery.SchemaField("address", "STRING", mode="NULLABLE"), + bigquery.SchemaField("city", "STRING", mode="NULLABLE"), + bigquery.SchemaField("state", "STRING", mode="NULLABLE"), + bigquery.SchemaField("zip", "STRING", mode="NULLABLE"), + bigquery.SchemaField("numberOfYears", "STRING", mode="NULLABLE"), + ], + ), ] - table_ref = dataset_ref.table('my_table') + table_ref = dataset_ref.table("my_table") table = bigquery.Table(table_ref, schema=schema) table = client.create_table(table) # API request - print('Created table {}'.format(table.full_table_id)) + print("Created table {}".format(table.full_table_id)) # [END bigquery_nested_repeated_schema] def test_create_table_cmek(client, to_delete): - dataset_id = 'create_table_cmek_{}'.format(_millis()) + dataset_id = "create_table_cmek_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -569,15 +593,17 @@ def test_create_table_cmek(client, to_delete): # client = bigquery.Client() # dataset_id = 'my_dataset' - table_ref = client.dataset(dataset_id).table('my_table') + table_ref = client.dataset(dataset_id).table("my_table") table = bigquery.Table(table_ref) # Set the encryption key to use for the table. # TODO: Replace this key with a key you have created in Cloud KMS. - kms_key_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - 'cloud-samples-tests', 'us-central1', 'test', 'test') + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name) + kms_key_name=kms_key_name + ) table = client.create_table(table) # API request @@ -586,7 +612,7 @@ def test_create_table_cmek(client, to_delete): def test_create_partitioned_table(client, to_delete): - dataset_id = 'create_table_partitioned_{}'.format(_millis()) + dataset_id = "create_table_partitioned_{}".format(_millis()) dataset_ref = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset_ref) to_delete.append(dataset) @@ -596,31 +622,35 @@ def test_create_partitioned_table(client, to_delete): # client = bigquery.Client() # dataset_ref = client.dataset('my_dataset') - table_ref = dataset_ref.table('my_partitioned_table') + table_ref = dataset_ref.table("my_partitioned_table") schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING'), - bigquery.SchemaField('date', 'DATE') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + bigquery.SchemaField("date", "DATE"), ] table = bigquery.Table(table_ref, schema=schema) table.time_partitioning = bigquery.TimePartitioning( type_=bigquery.TimePartitioningType.DAY, - field='date', # name of column to use for partitioning - expiration_ms=7776000000) # 90 days + field="date", # name of column to use for partitioning + expiration_ms=7776000000, + ) # 90 days table = client.create_table(table) - print('Created table {}, partitioned on column {}'.format( - table.table_id, table.time_partitioning.field)) + print( + "Created table {}, partitioned on column {}".format( + table.table_id, table.time_partitioning.field + ) + ) # [END bigquery_create_table_partitioned] - assert table.time_partitioning.type_ == 'DAY' - assert table.time_partitioning.field == 'date' + assert table.time_partitioning.type_ == "DAY" + assert table.time_partitioning.field == "date" assert table.time_partitioning.expiration_ms == 7776000000 def test_load_and_query_partitioned_table(client, to_delete): - dataset_id = 'load_partitioned_table_dataset_{}'.format(_millis()) + dataset_id = "load_partitioned_table_dataset_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -629,28 +659,28 @@ def test_load_and_query_partitioned_table(client, to_delete): # from google.cloud import bigquery # client = bigquery.Client() # dataset_id = 'my_dataset' - table_id = 'us_states_by_date' + table_id = "us_states_by_date" dataset_ref = client.dataset(dataset_id) job_config = bigquery.LoadJobConfig() job_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING'), - bigquery.SchemaField('date', 'DATE') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + bigquery.SchemaField("date", "DATE"), ] job_config.skip_leading_rows = 1 job_config.time_partitioning = bigquery.TimePartitioning( type_=bigquery.TimePartitioningType.DAY, - field='date', # name of column to use for partitioning - expiration_ms=7776000000) # 90 days - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv' + field="date", # name of column to use for partitioning + expiration_ms=7776000000, + ) # 90 days + uri = "gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv" load_job = client.load_table_from_uri( - uri, - dataset_ref.table(table_id), - job_config=job_config) # API request + uri, dataset_ref.table(table_id), job_config=job_config + ) # API request - assert load_job.job_type == 'load' + assert load_job.job_type == "load" load_job.result() # Waits for table load to complete. @@ -663,11 +693,12 @@ def test_load_and_query_partitioned_table(client, to_delete): # [START bigquery_query_partitioned_table] import datetime + # from google.cloud import bigquery # client = bigquery.Client() # project_id = 'my-project' # dataset_id = 'my_dataset' - table_id = 'us_states_by_date' + table_id = "us_states_by_date" sql_template = """ SELECT * @@ -677,23 +708,16 @@ def test_load_and_query_partitioned_table(client, to_delete): sql = sql_template.format(project_id, dataset_id, table_id) job_config = bigquery.QueryJobConfig() job_config.query_parameters = [ - bigquery.ScalarQueryParameter( - 'start_date', - 'DATE', - datetime.date(1800, 1, 1) - ), - bigquery.ScalarQueryParameter( - 'end_date', - 'DATE', - datetime.date(1899, 12, 31) - ) + bigquery.ScalarQueryParameter("start_date", "DATE", datetime.date(1800, 1, 1)), + bigquery.ScalarQueryParameter("end_date", "DATE", datetime.date(1899, 12, 31)), ] query_job = client.query( sql, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request + location="US", + job_config=job_config, + ) # API request rows = list(query_job) print("{} states were admitted to the US in the 1800s".format(len(rows))) @@ -703,8 +727,8 @@ def test_load_and_query_partitioned_table(client, to_delete): def test_get_table_information(client, to_delete): """Show a table's properties.""" - dataset_id = 'show_table_dataset_{}'.format(_millis()) - table_id = 'show_table_table_{}'.format(_millis()) + dataset_id = "show_table_dataset_{}".format(_millis()) + table_id = "show_table_table_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) client.create_dataset(dataset) @@ -755,13 +779,15 @@ def table_exists(client, table_reference): return True except NotFound: return False + + # [END bigquery_table_exists] def test_table_exists(client, to_delete): """Determine if a table exists.""" - DATASET_ID = 'get_table_dataset_{}'.format(_millis()) - TABLE_ID = 'get_table_table_{}'.format(_millis()) + DATASET_ID = "get_table_dataset_{}".format(_millis()) + TABLE_ID = "get_table_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(DATASET_ID)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -771,15 +797,18 @@ def test_table_exists(client, to_delete): table = client.create_table(table) assert table_exists(client, table_ref) - assert not table_exists(client, dataset.table('i_dont_exist')) + assert not table_exists(client, dataset.table("i_dont_exist")) -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_manage_table_labels(client, to_delete): - dataset_id = 'label_table_dataset_{}'.format(_millis()) - table_id = 'label_table_{}'.format(_millis()) + dataset_id = "label_table_dataset_{}".format(_millis()) + table_id = "label_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -794,10 +823,10 @@ def test_manage_table_labels(client, to_delete): # table = client.get_table(table_ref) # API request assert table.labels == {} - labels = {'color': 'green'} + labels = {"color": "green"} table.labels = labels - table = client.update_table(table, ['labels']) # API request + table = client.update_table(table, ["labels"]) # API request assert table.labels == labels # [END bigquery_label_table] @@ -813,11 +842,11 @@ def test_manage_table_labels(client, to_delete): table = client.get_table(table_ref) # API Request # View table labels - print('Table ID: {}'.format(table_id)) - print('Labels:') + print("Table ID: {}".format(table_id)) + print("Labels:") if table.labels: for label, value in table.labels.items(): - print('\t{}: {}'.format(label, value)) + print("\t{}: {}".format(label, value)) else: print("\tTable has no labels defined.") # [END bigquery_get_table_labels] @@ -830,29 +859,32 @@ def test_manage_table_labels(client, to_delete): # table = client.get_table(table_ref) # API request # This example table starts with one label - assert table.labels == {'color': 'green'} + assert table.labels == {"color": "green"} # To delete a label from a table, set its value to None - table.labels['color'] = None + table.labels["color"] = None - table = client.update_table(table, ['labels']) # API request + table = client.update_table(table, ["labels"]) # API request assert table.labels == {} # [END bigquery_delete_label_table] -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_update_table_description(client, to_delete): """Update a table's description.""" - dataset_id = 'update_table_description_dataset_{}'.format(_millis()) - table_id = 'update_table_description_table_{}'.format(_millis()) + dataset_id = "update_table_description_dataset_{}".format(_millis()) + table_id = "update_table_description_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table.description = 'Original description.' + table.description = "Original description." table = client.create_table(table) # [START bigquery_update_table_description] @@ -861,22 +893,25 @@ def test_update_table_description(client, to_delete): # table_ref = client.dataset('my_dataset').table('my_table') # table = client.get_table(table_ref) # API request - assert table.description == 'Original description.' - table.description = 'Updated description.' + assert table.description == "Original description." + table.description = "Updated description." - table = client.update_table(table, ['description']) # API request + table = client.update_table(table, ["description"]) # API request - assert table.description == 'Updated description.' + assert table.description == "Updated description." # [END bigquery_update_table_description] -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_update_table_expiration(client, to_delete): """Update a table's expiration time.""" - dataset_id = 'update_table_expiration_dataset_{}'.format(_millis()) - table_id = 'update_table_expiration_table_{}'.format(_millis()) + dataset_id = "update_table_expiration_dataset_{}".format(_millis()) + table_id = "update_table_expiration_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -898,7 +933,7 @@ def test_update_table_expiration(client, to_delete): # set table to expire 5 days from now expiration = datetime.datetime.now(pytz.utc) + datetime.timedelta(days=5) table.expires = expiration - table = client.update_table(table, ['expires']) # API request + table = client.update_table(table, ["expires"]) # API request # expiration is stored in milliseconds margin = datetime.timedelta(microseconds=1000) @@ -906,13 +941,16 @@ def test_update_table_expiration(client, to_delete): # [END bigquery_update_table_expiration] -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_add_empty_column(client, to_delete): """Adds an empty column to an existing table.""" - dataset_id = 'add_empty_column_dataset_{}'.format(_millis()) - table_id = 'add_empty_column_table_{}'.format(_millis()) + dataset_id = "add_empty_column_dataset_{}".format(_millis()) + table_id = "add_empty_column_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -931,22 +969,25 @@ def test_add_empty_column(client, to_delete): original_schema = table.schema new_schema = original_schema[:] # creates a copy of the schema - new_schema.append(bigquery.SchemaField('phone', 'STRING')) + new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ['schema']) # API request + table = client.update_table(table, ["schema"]) # API request assert len(table.schema) == len(original_schema) + 1 == len(new_schema) # [END bigquery_add_empty_column] -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_relax_column(client, to_delete): """Updates a schema field from required to nullable.""" - dataset_id = 'relax_column_dataset_{}'.format(_millis()) - table_id = 'relax_column_table_{}'.format(_millis()) + dataset_id = "relax_column_dataset_{}".format(_millis()) + table_id = "relax_column_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -958,44 +999,48 @@ def test_relax_column(client, to_delete): # table_id = 'my_table' original_schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] table_ref = client.dataset(dataset_id).table(table_id) table = bigquery.Table(table_ref, schema=original_schema) table = client.create_table(table) - assert all(field.mode == 'REQUIRED' for field in table.schema) + assert all(field.mode == "REQUIRED" for field in table.schema) # SchemaField properties cannot be edited after initialization. # To make changes, construct new SchemaField objects. relaxed_schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('age', 'INTEGER', mode='NULLABLE'), + bigquery.SchemaField("full_name", "STRING", mode="NULLABLE"), + bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), ] table.schema = relaxed_schema - table = client.update_table(table, ['schema']) + table = client.update_table(table, ["schema"]) - assert all(field.mode == 'NULLABLE' for field in table.schema) + assert all(field.mode == "NULLABLE" for field in table.schema) # [END bigquery_relax_column] -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_update_table_cmek(client, to_delete): """Patch a table's metadata.""" - dataset_id = 'update_table_cmek_{}'.format(_millis()) - table_id = 'update_table_cmek_{}'.format(_millis()) + dataset_id = "update_table_cmek_{}".format(_millis()) + table_id = "update_table_cmek_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) table = bigquery.Table(dataset.table(table_id)) - original_kms_key_name = ( - 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - 'cloud-samples-tests', 'us-central1', 'test', 'test')) + original_kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=original_kms_key_name) + kms_key_name=original_kms_key_name + ) table = client.create_table(table) # [START bigquery_update_table_cmek] @@ -1007,13 +1052,14 @@ def test_update_table_cmek(client, to_delete): # Set a new encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. updated_kms_key_name = ( - 'projects/cloud-samples-tests/locations/us-central1/' - 'keyRings/test/cryptoKeys/otherkey') + "projects/cloud-samples-tests/locations/us-central1/" + "keyRings/test/cryptoKeys/otherkey" + ) table.encryption_configuration = bigquery.EncryptionConfiguration( - kms_key_name=updated_kms_key_name) + kms_key_name=updated_kms_key_name + ) - table = client.update_table( - table, ['encryption_configuration']) # API request + table = client.update_table(table, ["encryption_configuration"]) # API request assert table.encryption_configuration.kms_key_name == updated_kms_key_name assert original_kms_key_name != updated_kms_key_name @@ -1027,8 +1073,8 @@ def test_browse_table_data(client, to_delete, capsys): # from google.cloud import bigquery # client = bigquery.Client() - dataset_ref = client.dataset('samples', project='bigquery-public-data') - table_ref = dataset_ref.table('shakespeare') + dataset_ref = client.dataset("samples", project="bigquery-public-data") + table_ref = dataset_ref.table("shakespeare") table = client.get_table(table_ref) # API call # Load all rows from a table @@ -1049,24 +1095,27 @@ def test_browse_table_data(client, to_delete, capsys): rows = client.list_rows(table, start_index=10, max_results=10) # Print row data in tabular format - format_string = '{!s:<16} ' * len(rows.schema) + format_string = "{!s:<16} " * len(rows.schema) field_names = [field.name for field in rows.schema] print(format_string.format(*field_names)) # prints column headers for row in rows: - print(format_string.format(*row)) # prints row data + print(format_string.format(*row)) # prints row data # [END bigquery_browse_table] out, err = capsys.readouterr() - out = list(filter(bool, out.split('\n'))) # list of non-blank lines + out = list(filter(bool, out.split("\n"))) # list of non-blank lines assert len(out) == 11 -@pytest.mark.skip(reason=( - 'update_table() is flaky ' - 'https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589')) +@pytest.mark.skip( + reason=( + "update_table() is flaky " + "https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5589" + ) +) def test_manage_views(client, to_delete): project = client.project - source_dataset_id = 'source_dataset_{}'.format(_millis()) + source_dataset_id = "source_dataset_{}".format(_millis()) source_dataset_ref = client.dataset(source_dataset_id) source_dataset = bigquery.Dataset(source_dataset_ref) source_dataset = client.create_dataset(source_dataset) @@ -1074,17 +1123,18 @@ def test_manage_views(client, to_delete): job_config = bigquery.LoadJobConfig() job_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] job_config.skip_leading_rows = 1 - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv' - source_table_id = 'us_states' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" + source_table_id = "us_states" load_job = client.load_table_from_uri( - uri, source_dataset.table(source_table_id), job_config=job_config) + uri, source_dataset.table(source_table_id), job_config=job_config + ) load_job.result() - shared_dataset_id = 'shared_dataset_{}'.format(_millis()) + shared_dataset_id = "shared_dataset_{}".format(_millis()) shared_dataset_ref = client.dataset(shared_dataset_id) shared_dataset = bigquery.Dataset(shared_dataset_ref) shared_dataset = client.create_dataset(shared_dataset) @@ -1101,15 +1151,13 @@ def test_manage_views(client, to_delete): # This example shows how to create a shared view of a source table of # US States. The source table contains all 50 states, while the view will # contain only states with names starting with 'W'. - view_ref = shared_dataset_ref.table('my_shared_view') + view_ref = shared_dataset_ref.table("my_shared_view") view = bigquery.Table(view_ref) - sql_template = ( - 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "W%"') - view.view_query = sql_template.format( - project, source_dataset_id, source_table_id) + sql_template = 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "W%"' + view.view_query = sql_template.format(project, source_dataset_id, source_table_id) view = client.create_table(view) # API request - print('Successfully created view at {}'.format(view.full_table_id)) + print("Successfully created view at {}".format(view.full_table_id)) # [END bigquery_create_view] # [START bigquery_update_view_query] @@ -1123,13 +1171,11 @@ def test_manage_views(client, to_delete): # This example shows how to update a shared view of a source table of # US States. The view's query will be updated to contain only states with # names starting with 'M'. - view_ref = shared_dataset_ref.table('my_shared_view') + view_ref = shared_dataset_ref.table("my_shared_view") view = bigquery.Table(view_ref) - sql_template = ( - 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "M%"') - view.view_query = sql_template.format( - project, source_dataset_id, source_table_id) - view = client.update_table(view, ['view_query']) # API request + sql_template = 'SELECT name, post_abbr FROM `{}.{}.{}` WHERE name LIKE "M%"' + view.view_query = sql_template.format(project, source_dataset_id, source_table_id) + view = client.update_table(view, ["view_query"]) # API request # [END bigquery_update_view_query] # [START bigquery_get_view] @@ -1137,16 +1183,16 @@ def test_manage_views(client, to_delete): # client = bigquery.Client() # shared_dataset_id = 'my_shared_dataset' - view_ref = client.dataset(shared_dataset_id).table('my_shared_view') + view_ref = client.dataset(shared_dataset_id).table("my_shared_view") view = client.get_table(view_ref) # API Request # Display view properties - print('View at {}'.format(view.full_table_id)) - print('View Query:\n{}'.format(view.view_query)) + print("View at {}".format(view.full_table_id)) + print("View Query:\n{}".format(view.view_query)) # [END bigquery_get_view] assert view.view_query is not None - analyst_group_email = 'example-analyst-group@google.com' + analyst_group_email = "example-analyst-group@google.com" # [START bigquery_grant_view_access] # from google.cloud import bigquery # client = bigquery.Client() @@ -1155,42 +1201,44 @@ def test_manage_views(client, to_delete): # shared_dataset_id = 'my_shared_dataset' # analyst_group_email = 'data_analysts@example.com' shared_dataset = client.get_dataset( - client.dataset(shared_dataset_id)) # API request + client.dataset(shared_dataset_id) + ) # API request access_entries = shared_dataset.access_entries access_entries.append( - bigquery.AccessEntry('READER', 'groupByEmail', analyst_group_email) + bigquery.AccessEntry("READER", "groupByEmail", analyst_group_email) ) shared_dataset.access_entries = access_entries shared_dataset = client.update_dataset( - shared_dataset, ['access_entries']) # API request + shared_dataset, ["access_entries"] + ) # API request # Authorize the view to access the source dataset # project = 'my-project' # source_dataset_id = 'my_source_dataset' source_dataset = client.get_dataset( - client.dataset(source_dataset_id)) # API request + client.dataset(source_dataset_id) + ) # API request view_reference = { - 'projectId': project, - 'datasetId': shared_dataset_id, - 'tableId': 'my_shared_view', + "projectId": project, + "datasetId": shared_dataset_id, + "tableId": "my_shared_view", } access_entries = source_dataset.access_entries - access_entries.append( - bigquery.AccessEntry(None, 'view', view_reference) - ) + access_entries.append(bigquery.AccessEntry(None, "view", view_reference)) source_dataset.access_entries = access_entries source_dataset = client.update_dataset( - source_dataset, ['access_entries']) # API request + source_dataset, ["access_entries"] + ) # API request # [END bigquery_grant_view_access] def test_table_insert_rows(client, to_delete): """Insert / fetch table data.""" - dataset_id = 'table_insert_rows_dataset_{}'.format(_millis()) - table_id = 'table_insert_rows_table_{}'.format(_millis()) + dataset_id = "table_insert_rows_dataset_{}".format(_millis()) + table_id = "table_insert_rows_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) - dataset.location = 'US' + dataset.location = "US" to_delete.append(dataset) table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) @@ -1206,10 +1254,7 @@ def test_table_insert_rows(client, to_delete): # table_ref = client.dataset(dataset_id).table(table_id) # table = client.get_table(table_ref) # API request - rows_to_insert = [ - (u'Phred Phlyntstone', 32), - (u'Wylma Phlyntstone', 29), - ] + rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] errors = client.insert_rows(table, rows_to_insert) # API request @@ -1219,15 +1264,16 @@ def test_table_insert_rows(client, to_delete): def test_load_table_from_file(client, to_delete): """Upload table data from a CSV file.""" - dataset_id = 'load_table_from_file_dataset_{}'.format(_millis()) - table_id = 'load_table_from_file_table_{}'.format(_millis()) + dataset_id = "load_table_from_file_dataset_{}".format(_millis()) + table_id = "load_table_from_file_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) to_delete.append(dataset) snippets_dir = os.path.abspath(os.path.dirname(__file__)) filename = os.path.join( - snippets_dir, '..', '..', 'bigquery', 'tests', 'data', 'people.csv') + snippets_dir, "..", "..", "bigquery", "tests", "data", "people.csv" + ) # [START bigquery_load_from_file] # from google.cloud import bigquery @@ -1243,17 +1289,17 @@ def test_load_table_from_file(client, to_delete): job_config.skip_leading_rows = 1 job_config.autodetect = True - with open(filename, 'rb') as source_file: + with open(filename, "rb") as source_file: job = client.load_table_from_file( source_file, table_ref, - location='US', # Must match the destination dataset location. - job_config=job_config) # API request + location="US", # Must match the destination dataset location. + job_config=job_config, + ) # API request job.result() # Waits for table load to complete. - print('Loaded {} rows into {}:{}.'.format( - job.output_rows, dataset_id, table_id)) + print("Loaded {} rows into {}:{}.".format(job.output_rows, dataset_id, table_id)) # [END bigquery_load_from_file] table = client.get_table(table_ref) @@ -1261,14 +1307,14 @@ def test_load_table_from_file(client, to_delete): assert len(rows) == 2 # Order is not preserved, so compare individually - row1 = bigquery.Row(('Wylma Phlyntstone', 29), {'full_name': 0, 'age': 1}) + row1 = bigquery.Row(("Wylma Phlyntstone", 29), {"full_name": 0, "age": 1}) assert row1 in rows - row2 = bigquery.Row(('Phred Phlyntstone', 32), {'full_name': 0, 'age': 1}) + row2 = bigquery.Row(("Phred Phlyntstone", 32), {"full_name": 0, "age": 1}) assert row2 in rows def test_load_table_from_uri_csv(client, to_delete, capsys): - dataset_id = 'load_table_from_uri_csv_{}'.format(_millis()) + dataset_id = "load_table_from_uri_csv_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -1281,35 +1327,34 @@ def test_load_table_from_uri_csv(client, to_delete, capsys): dataset_ref = client.dataset(dataset_id) job_config = bigquery.LoadJobConfig() job_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] job_config.skip_leading_rows = 1 # The source format defaults to CSV, so the line below is optional. job_config.source_format = bigquery.SourceFormat.CSV - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" load_job = client.load_table_from_uri( - uri, - dataset_ref.table('us_states'), - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + uri, dataset_ref.table("us_states"), job_config=job_config + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") - destination_table = client.get_table(dataset_ref.table('us_states')) - print('Loaded {} rows.'.format(destination_table.num_rows)) + destination_table = client.get_table(dataset_ref.table("us_states")) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_csv] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_from_uri_json(client, to_delete, capsys): - dataset_id = 'load_table_from_uri_json_{}'.format(_millis()) + dataset_id = "load_table_from_uri_json_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) to_delete.append(dataset) @@ -1321,34 +1366,35 @@ def test_load_table_from_uri_json(client, to_delete, capsys): dataset_ref = client.dataset(dataset_id) job_config = bigquery.LoadJobConfig() job_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" load_job = client.load_table_from_uri( uri, - dataset_ref.table('us_states'), - location='US', # Location must match that of the destination dataset. - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + dataset_ref.table("us_states"), + location="US", # Location must match that of the destination dataset. + job_config=job_config, + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") - destination_table = client.get_table(dataset_ref.table('us_states')) - print('Loaded {} rows.'.format(destination_table.num_rows)) + destination_table = client.get_table(dataset_ref.table("us_states")) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_json] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_from_uri_cmek(client, to_delete): - dataset_id = 'load_table_from_uri_cmek_{}'.format(_millis()) + dataset_id = "load_table_from_uri_cmek_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) to_delete.append(dataset) @@ -1364,31 +1410,32 @@ def test_load_table_from_uri_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. - kms_key_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - 'cloud-samples-tests', 'us-central1', 'test', 'test') - encryption_config = bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name) + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) + encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" load_job = client.load_table_from_uri( uri, - dataset_ref.table('us_states'), - location='US', # Location must match that of the destination dataset. - job_config=job_config) # API request + dataset_ref.table("us_states"), + location="US", # Location must match that of the destination dataset. + job_config=job_config, + ) # API request - assert load_job.job_type == 'load' + assert load_job.job_type == "load" load_job.result() # Waits for table load to complete. - assert load_job.state == 'DONE' - table = client.get_table(dataset_ref.table('us_states')) + assert load_job.state == "DONE" + table = client.get_table(dataset_ref.table("us_states")) assert table.encryption_configuration.kms_key_name == kms_key_name # [END bigquery_load_table_gcs_json_cmek] def test_load_table_from_uri_parquet(client, to_delete, capsys): - dataset_id = 'load_table_from_uri_parquet_{}'.format(_millis()) + dataset_id = "load_table_from_uri_parquet_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -1401,27 +1448,26 @@ def test_load_table_from_uri_parquet(client, to_delete, capsys): dataset_ref = client.dataset(dataset_id) job_config = bigquery.LoadJobConfig() job_config.source_format = bigquery.SourceFormat.PARQUET - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet" load_job = client.load_table_from_uri( - uri, - dataset_ref.table('us_states'), - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + uri, dataset_ref.table("us_states"), job_config=job_config + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") - destination_table = client.get_table(dataset_ref.table('us_states')) - print('Loaded {} rows.'.format(destination_table.num_rows)) + destination_table = client.get_table(dataset_ref.table("us_states")) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_parquet] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_from_uri_orc(client, to_delete, capsys): - dataset_id = 'load_table_from_uri_orc_{}'.format(_millis()) + dataset_id = "load_table_from_uri_orc_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -1434,23 +1480,22 @@ def test_load_table_from_uri_orc(client, to_delete, capsys): dataset_ref = client.dataset(dataset_id) job_config = bigquery.LoadJobConfig() job_config.source_format = bigquery.SourceFormat.ORC - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.orc" load_job = client.load_table_from_uri( - uri, - dataset_ref.table('us_states'), - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + uri, dataset_ref.table("us_states"), job_config=job_config + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") - destination_table = client.get_table(dataset_ref.table('us_states')) - print('Loaded {} rows.'.format(destination_table.num_rows)) + destination_table = client.get_table(dataset_ref.table("us_states")) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_orc] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_from_uri_autodetect(client, to_delete, capsys): @@ -1465,7 +1510,7 @@ def test_load_table_from_uri_autodetect(client, to_delete, capsys): followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. """ - dataset_id = 'load_table_from_uri_auto_{}'.format(_millis()) + dataset_id = "load_table_from_uri_auto_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -1488,35 +1533,34 @@ def test_load_table_from_uri_autodetect(client, to_delete, capsys): job_config.skip_leading_rows = 1 # The source format defaults to CSV, so the line below is optional. job_config.source_format = bigquery.SourceFormat.CSV - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" # [END bigquery_load_table_gcs_csv_autodetect] # unset csv-specific attribute - del job_config._properties['load']['skipLeadingRows'] + del job_config._properties["load"]["skipLeadingRows"] # [START bigquery_load_table_gcs_json_autodetect] job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" # [END bigquery_load_table_gcs_json_autodetect] # Shared code # [START bigquery_load_table_gcs_csv_autodetect] # [START bigquery_load_table_gcs_json_autodetect] load_job = client.load_table_from_uri( - uri, - dataset_ref.table('us_states'), - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + uri, dataset_ref.table("us_states"), job_config=job_config + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") - destination_table = client.get_table(dataset_ref.table('us_states')) - print('Loaded {} rows.'.format(destination_table.num_rows)) + destination_table = client.get_table(dataset_ref.table("us_states")) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_csv_autodetect] # [END bigquery_load_table_gcs_json_autodetect] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_from_uri_truncate(client, to_delete, capsys): @@ -1531,20 +1575,19 @@ def test_load_table_from_uri_truncate(client, to_delete, capsys): followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. """ - dataset_id = 'load_table_from_uri_trunc_{}'.format(_millis()) + dataset_id = "load_table_from_uri_trunc_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) job_config = bigquery.LoadJobConfig() job_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] - table_ref = dataset.table('us_states') - body = six.BytesIO(b'Washington,WA') - client.load_table_from_file( - body, table_ref, job_config=job_config).result() + table_ref = dataset.table("us_states") + body = six.BytesIO(b"Washington,WA") + client.load_table_from_file(body, table_ref, job_config=job_config).result() # Shared code # [START bigquery_load_table_gcs_csv_truncate] @@ -1570,24 +1613,24 @@ def test_load_table_from_uri_truncate(client, to_delete, capsys): job_config.skip_leading_rows = 1 # The source format defaults to CSV, so the line below is optional. job_config.source_format = bigquery.SourceFormat.CSV - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv" # [END bigquery_load_table_gcs_csv_truncate] # unset csv-specific attribute - del job_config._properties['load']['skipLeadingRows'] + del job_config._properties["load"]["skipLeadingRows"] # [START bigquery_load_table_gcs_json_truncate] job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json" # [END bigquery_load_table_gcs_json_truncate] # [START bigquery_load_table_gcs_parquet_truncate] job_config.source_format = bigquery.SourceFormat.PARQUET - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.parquet" # [END bigquery_load_table_gcs_parquet_truncate] # [START bigquery_load_table_gcs_orc_truncate] job_config.source_format = bigquery.SourceFormat.ORC - uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc' + uri = "gs://cloud-samples-data/bigquery/us-states/us-states.orc" # [END bigquery_load_table_gcs_orc_truncate] # Shared code @@ -1596,40 +1639,38 @@ def test_load_table_from_uri_truncate(client, to_delete, capsys): # [START bigquery_load_table_gcs_parquet_truncate] # [START bigquery_load_table_gcs_orc_truncate] load_job = client.load_table_from_uri( - uri, - table_ref, - job_config=job_config) # API request - print('Starting job {}'.format(load_job.job_id)) + uri, table_ref, job_config=job_config + ) # API request + print("Starting job {}".format(load_job.job_id)) load_job.result() # Waits for table load to complete. - print('Job finished.') + print("Job finished.") destination_table = client.get_table(table_ref) - print('Loaded {} rows.'.format(destination_table.num_rows)) + print("Loaded {} rows.".format(destination_table.num_rows)) # [END bigquery_load_table_gcs_csv_truncate] # [END bigquery_load_table_gcs_json_truncate] # [END bigquery_load_table_gcs_parquet_truncate] # [END bigquery_load_table_gcs_orc_truncate] out, _ = capsys.readouterr() - assert 'Loaded 50 rows.' in out + assert "Loaded 50 rows." in out def test_load_table_add_column(client, to_delete): - dataset_id = 'load_table_add_column_{}'.format(_millis()) + dataset_id = "load_table_add_column_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) snippets_dir = os.path.abspath(os.path.dirname(__file__)) filepath = os.path.join( - snippets_dir, '..', '..', 'bigquery', 'tests', 'data', 'people.csv') - table_ref = dataset_ref.table('my_table') - old_schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - ] + snippets_dir, "..", "..", "bigquery", "tests", "data", "people.csv" + ) + table_ref = dataset_ref.table("my_table") + old_schema = [bigquery.SchemaField("full_name", "STRING", mode="REQUIRED")] table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) # [START bigquery_add_column_load_append] @@ -1639,7 +1680,7 @@ def test_load_table_add_column(client, to_delete): # filepath = 'path/to/your_file.csv' # Retrieves the destination table and checks the length of the schema - table_id = 'my_table' + table_id = "my_table" table_ref = dataset_ref.table(table_id) table = client.get_table(table_ref) print("Table {} contains {} columns.".format(table_id, len(table.schema))) @@ -1649,54 +1690,58 @@ def test_load_table_add_column(client, to_delete): job_config = bigquery.LoadJobConfig() job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION, + bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION ] # In this example, the existing table contains only the 'full_name' column. # 'REQUIRED' fields cannot be added to an existing schema, so the # additional column must be 'NULLABLE'. job_config.schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='NULLABLE'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"), ] job_config.source_format = bigquery.SourceFormat.CSV job_config.skip_leading_rows = 1 - with open(filepath, 'rb') as source_file: + with open(filepath, "rb") as source_file: job = client.load_table_from_file( source_file, table_ref, - location='US', # Must match the destination dataset location. - job_config=job_config) # API request + location="US", # Must match the destination dataset location. + job_config=job_config, + ) # API request job.result() # Waits for table load to complete. - print('Loaded {} rows into {}:{}.'.format( - job.output_rows, dataset_id, table_ref.table_id)) + print( + "Loaded {} rows into {}:{}.".format( + job.output_rows, dataset_id, table_ref.table_id + ) + ) # Checks the updated length of the schema table = client.get_table(table) - print("Table {} now contains {} columns.".format( - table_id, len(table.schema))) + print("Table {} now contains {} columns.".format(table_id, len(table.schema))) # [END bigquery_add_column_load_append] assert len(table.schema) == 2 assert table.num_rows > 0 def test_load_table_relax_column(client, to_delete): - dataset_id = 'load_table_relax_column_{}'.format(_millis()) + dataset_id = "load_table_relax_column_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) snippets_dir = os.path.abspath(os.path.dirname(__file__)) filepath = os.path.join( - snippets_dir, '..', '..', 'bigquery', 'tests', 'data', 'people.csv') - table_ref = dataset_ref.table('my_table') + snippets_dir, "..", "..", "bigquery", "tests", "data", "people.csv" + ) + table_ref = dataset_ref.table("my_table") old_schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), - bigquery.SchemaField('favorite_color', 'STRING', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + bigquery.SchemaField("favorite_color", "STRING", mode="REQUIRED"), ] table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) @@ -1707,60 +1752,60 @@ def test_load_table_relax_column(client, to_delete): # filepath = 'path/to/your_file.csv' # Retrieves the destination table and checks the number of required fields - table_id = 'my_table' + table_id = "my_table" table_ref = dataset_ref.table(table_id) table = client.get_table(table_ref) - original_required_fields = sum( - field.mode == 'REQUIRED' for field in table.schema) + original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) # In this example, the existing table has 3 required fields. - print("{} fields in the schema are required.".format( - original_required_fields)) + print("{} fields in the schema are required.".format(original_required_fields)) # Configures the load job to append the data to a destination table, # allowing field relaxation job_config = bigquery.LoadJobConfig() job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION ] # In this example, the existing table contains three required fields # ('full_name', 'age', and 'favorite_color'), while the data to load # contains only the first two fields. job_config.schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] job_config.source_format = bigquery.SourceFormat.CSV job_config.skip_leading_rows = 1 - with open(filepath, 'rb') as source_file: + with open(filepath, "rb") as source_file: job = client.load_table_from_file( source_file, table_ref, - location='US', # Must match the destination dataset location. - job_config=job_config) # API request + location="US", # Must match the destination dataset location. + job_config=job_config, + ) # API request job.result() # Waits for table load to complete. - print('Loaded {} rows into {}:{}.'.format( - job.output_rows, dataset_id, table_ref.table_id)) + print( + "Loaded {} rows into {}:{}.".format( + job.output_rows, dataset_id, table_ref.table_id + ) + ) # Checks the updated number of required fields table = client.get_table(table) - current_required_fields = sum( - field.mode == 'REQUIRED' for field in table.schema) - print("{} fields in the schema are now required.".format( - current_required_fields)) + current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) + print("{} fields in the schema are now required.".format(current_required_fields)) # [END bigquery_relax_column_load_append] assert original_required_fields - current_required_fields == 1 assert len(table.schema) == 3 - assert table.schema[2].mode == 'NULLABLE' + assert table.schema[2].mode == "NULLABLE" assert table.num_rows > 0 def test_copy_table(client, to_delete): - dataset_id = 'copy_table_dataset_{}'.format(_millis()) + dataset_id = "copy_table_dataset_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) - dest_dataset.location = 'US' + dest_dataset.location = "US" dest_dataset = client.create_dataset(dest_dataset) to_delete.append(dest_dataset) @@ -1768,45 +1813,46 @@ def test_copy_table(client, to_delete): # from google.cloud import bigquery # client = bigquery.Client() - source_dataset = client.dataset('samples', project='bigquery-public-data') - source_table_ref = source_dataset.table('shakespeare') + source_dataset = client.dataset("samples", project="bigquery-public-data") + source_table_ref = source_dataset.table("shakespeare") # dataset_id = 'my_dataset' - dest_table_ref = client.dataset(dataset_id).table('destination_table') + dest_table_ref = client.dataset(dataset_id).table("destination_table") job = client.copy_table( source_table_ref, dest_table_ref, # Location must match that of the source and destination tables. - location='US') # API request + location="US", + ) # API request job.result() # Waits for job to complete. - assert job.state == 'DONE' + assert job.state == "DONE" dest_table = client.get_table(dest_table_ref) # API request assert dest_table.num_rows > 0 # [END bigquery_copy_table] def test_copy_table_multiple_source(client, to_delete): - dest_dataset_id = 'dest_dataset_{}'.format(_millis()) + dest_dataset_id = "dest_dataset_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dest_dataset_id)) - dest_dataset.location = 'US' + dest_dataset.location = "US" dest_dataset = client.create_dataset(dest_dataset) to_delete.append(dest_dataset) - source_dataset_id = 'source_dataset_{}'.format(_millis()) + source_dataset_id = "source_dataset_{}".format(_millis()) source_dataset = bigquery.Dataset(client.dataset(source_dataset_id)) - source_dataset.location = 'US' + source_dataset.location = "US" source_dataset = client.create_dataset(source_dataset) to_delete.append(source_dataset) schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] - table_data = {'table1': b'Washington,WA', 'table2': b'California,CA'} + table_data = {"table1": b"Washington,WA", "table2": b"California,CA"} for table_id, data in table_data.items(): table_ref = source_dataset.table(table_id) job_config = bigquery.LoadJobConfig() @@ -1816,8 +1862,9 @@ def test_copy_table_multiple_source(client, to_delete): body, table_ref, # Location must match that of the destination dataset. - location='US', - job_config=job_config).result() + location="US", + job_config=job_config, + ).result() # [START bigquery_copy_table_multiple_source] # from google.cloud import bigquery @@ -1825,18 +1872,19 @@ def test_copy_table_multiple_source(client, to_delete): # source_dataset_id = 'my_source_dataset' # dest_dataset_id = 'my_destination_dataset' - table1_ref = client.dataset(source_dataset_id).table('table1') - table2_ref = client.dataset(source_dataset_id).table('table2') - dest_table_ref = client.dataset(dest_dataset_id).table('destination_table') + table1_ref = client.dataset(source_dataset_id).table("table1") + table2_ref = client.dataset(source_dataset_id).table("table2") + dest_table_ref = client.dataset(dest_dataset_id).table("destination_table") job = client.copy_table( [table1_ref, table2_ref], dest_table_ref, # Location must match that of the source and destination tables. - location='US') # API request + location="US", + ) # API request job.result() # Waits for job to complete. - assert job.state == 'DONE' + assert job.state == "DONE" dest_table = client.get_table(dest_table_ref) # API request assert dest_table.num_rows > 0 # [END bigquery_copy_table_multiple_source] @@ -1845,9 +1893,9 @@ def test_copy_table_multiple_source(client, to_delete): def test_copy_table_cmek(client, to_delete): - dataset_id = 'copy_table_cmek_{}'.format(_millis()) + dataset_id = "copy_table_cmek_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) - dest_dataset.location = 'US' + dest_dataset.location = "US" dest_dataset = client.create_dataset(dest_dataset) to_delete.append(dest_dataset) @@ -1855,20 +1903,19 @@ def test_copy_table_cmek(client, to_delete): # from google.cloud import bigquery # client = bigquery.Client() - source_dataset = bigquery.DatasetReference( - 'bigquery-public-data', 'samples') - source_table_ref = source_dataset.table('shakespeare') + source_dataset = bigquery.DatasetReference("bigquery-public-data", "samples") + source_table_ref = source_dataset.table("shakespeare") # dataset_id = 'my_dataset' dest_dataset_ref = client.dataset(dataset_id) - dest_table_ref = dest_dataset_ref.table('destination_table') + dest_table_ref = dest_dataset_ref.table("destination_table") # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. - kms_key_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - 'cloud-samples-tests', 'us-central1', 'test', 'test') - encryption_config = bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name) + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) + encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config = bigquery.CopyJobConfig() job_config.destination_encryption_configuration = encryption_config @@ -1876,18 +1923,19 @@ def test_copy_table_cmek(client, to_delete): source_table_ref, dest_table_ref, # Location must match that of the source and destination tables. - location='US', - job_config=job_config) # API request + location="US", + job_config=job_config, + ) # API request job.result() # Waits for job to complete. - assert job.state == 'DONE' + assert job.state == "DONE" dest_table = client.get_table(dest_table_ref) assert dest_table.encryption_configuration.kms_key_name == kms_key_name # [END bigquery_copy_table_cmek] def test_extract_table(client, to_delete): - bucket_name = 'extract_shakespeare_{}'.format(_millis()) + bucket_name = "extract_shakespeare_{}".format(_millis()) storage_client = storage.Client() bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) to_delete.append(bucket) @@ -1896,11 +1944,11 @@ def test_extract_table(client, to_delete): # from google.cloud import bigquery # client = bigquery.Client() # bucket_name = 'my-bucket' - project = 'bigquery-public-data' - dataset_id = 'samples' - table_id = 'shakespeare' + project = "bigquery-public-data" + dataset_id = "samples" + table_id = "shakespeare" - destination_uri = 'gs://{}/{}'.format(bucket_name, 'shakespeare.csv') + destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.csv") dataset_ref = client.dataset(dataset_id, project=project) table_ref = dataset_ref.table(table_id) @@ -1908,21 +1956,23 @@ def test_extract_table(client, to_delete): table_ref, destination_uri, # Location must match that of the source table. - location='US') # API request + location="US", + ) # API request extract_job.result() # Waits for job to complete. - print('Exported {}:{}.{} to {}'.format( - project, dataset_id, table_id, destination_uri)) + print( + "Exported {}:{}.{} to {}".format(project, dataset_id, table_id, destination_uri) + ) # [END bigquery_extract_table] - blob = retry_storage_errors(bucket.get_blob)('shakespeare.csv') + blob = retry_storage_errors(bucket.get_blob)("shakespeare.csv") assert blob.exists assert blob.size > 0 to_delete.insert(0, blob) def test_extract_table_json(client, to_delete): - bucket_name = 'extract_shakespeare_json_{}'.format(_millis()) + bucket_name = "extract_shakespeare_json_{}".format(_millis()) storage_client = storage.Client() bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) to_delete.append(bucket) @@ -1932,30 +1982,30 @@ def test_extract_table_json(client, to_delete): # client = bigquery.Client() # bucket_name = 'my-bucket' - destination_uri = 'gs://{}/{}'.format(bucket_name, 'shakespeare.json') - dataset_ref = client.dataset('samples', project='bigquery-public-data') - table_ref = dataset_ref.table('shakespeare') + destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.json") + dataset_ref = client.dataset("samples", project="bigquery-public-data") + table_ref = dataset_ref.table("shakespeare") job_config = bigquery.job.ExtractJobConfig() - job_config.destination_format = ( - bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON) + job_config.destination_format = bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON extract_job = client.extract_table( table_ref, destination_uri, job_config=job_config, # Location must match that of the source table. - location='US') # API request + location="US", + ) # API request extract_job.result() # Waits for job to complete. # [END bigquery_extract_table_json] - blob = retry_storage_errors(bucket.get_blob)('shakespeare.json') + blob = retry_storage_errors(bucket.get_blob)("shakespeare.json") assert blob.exists assert blob.size > 0 to_delete.insert(0, blob) def test_extract_table_compressed(client, to_delete): - bucket_name = 'extract_shakespeare_compress_{}'.format(_millis()) + bucket_name = "extract_shakespeare_compress_{}".format(_millis()) storage_client = storage.Client() bucket = retry_storage_errors(storage_client.create_bucket)(bucket_name) to_delete.append(bucket) @@ -1965,9 +2015,9 @@ def test_extract_table_compressed(client, to_delete): # client = bigquery.Client() # bucket_name = 'my-bucket' - destination_uri = 'gs://{}/{}'.format(bucket_name, 'shakespeare.csv.gz') - dataset_ref = client.dataset('samples', project='bigquery-public-data') - table_ref = dataset_ref.table('shakespeare') + destination_uri = "gs://{}/{}".format(bucket_name, "shakespeare.csv.gz") + dataset_ref = client.dataset("samples", project="bigquery-public-data") + table_ref = dataset_ref.table("shakespeare") job_config = bigquery.job.ExtractJobConfig() job_config.compression = bigquery.Compression.GZIP @@ -1975,12 +2025,13 @@ def test_extract_table_compressed(client, to_delete): table_ref, destination_uri, # Location must match that of the source table. - location='US', - job_config=job_config) # API request + location="US", + job_config=job_config, + ) # API request extract_job.result() # Waits for job to complete. # [END bigquery_extract_table_compressed] - blob = retry_storage_errors(bucket.get_blob)('shakespeare.csv.gz') + blob = retry_storage_errors(bucket.get_blob)("shakespeare.csv.gz") assert blob.exists assert blob.size > 0 to_delete.insert(0, blob) @@ -1990,11 +2041,11 @@ def test_delete_table(client, to_delete): """Delete a table.""" from google.cloud.exceptions import NotFound - dataset_id = 'delete_table_dataset_{}'.format(_millis()) - table_id = 'delete_table_table_{}'.format(_millis()) + dataset_id = "delete_table_dataset_{}".format(_millis()) + table_id = "delete_table_table_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -2010,7 +2061,7 @@ def test_delete_table(client, to_delete): table_ref = client.dataset(dataset_id).table(table_id) client.delete_table(table_ref) # API request - print('Table {}:{} deleted.'.format(dataset_id, table_id)) + print("Table {}:{} deleted.".format(dataset_id, table_id)) # [END bigquery_delete_table] with pytest.raises(NotFound): @@ -2018,10 +2069,10 @@ def test_delete_table(client, to_delete): def test_undelete_table(client, to_delete): - dataset_id = 'undelete_table_dataset_{}'.format(_millis()) - table_id = 'undelete_table_table_{}'.format(_millis()) + dataset_id = "undelete_table_dataset_{}".format(_millis()) + table_id = "undelete_table_table_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -2057,11 +2108,11 @@ def test_undelete_table(client, to_delete): client.delete_table(table_ref) # API request # Construct the restore-from table ID using a snapshot decorator. - snapshot_table_id = '{}@{}'.format(table_id, snapshot_epoch) + snapshot_table_id = "{}@{}".format(table_id, snapshot_epoch) source_table_ref = client.dataset(dataset_id).table(snapshot_table_id) # Choose a new table ID for the recovered table data. - recovered_table_id = '{}_recovered'.format(table_id) + recovered_table_id = "{}_recovered".format(table_id) dest_table_ref = client.dataset(dataset_id).table(recovered_table_id) # Construct and run a copy job. @@ -2069,12 +2120,14 @@ def test_undelete_table(client, to_delete): source_table_ref, dest_table_ref, # Location must match that of the source and destination tables. - location='US') # API request + location="US", + ) # API request job.result() # Waits for job to complete. - print('Copied data from deleted table {} to {}'.format( - table_id, recovered_table_id)) + print( + "Copied data from deleted table {} to {}".format(table_id, recovered_table_id) + ) # [END bigquery_undelete_table] @@ -2086,17 +2139,19 @@ def test_client_query(client): # client = bigquery.Client() query = ( - 'SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` ' + "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " 'WHERE state = "TX" ' - 'LIMIT 100') + "LIMIT 100" + ) query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US') # API request - starts the query + location="US", + ) # API request - starts the query for row in query_job: # API request - fetches results # Row values can be accessed by field name or index - assert row[0] == row.name == row['name'] + assert row[0] == row.name == row["name"] print(row) # [END bigquery_query] @@ -2108,9 +2163,10 @@ def test_client_query_legacy_sql(client): # client = bigquery.Client() query = ( - 'SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] ' + "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] " 'WHERE state = "TX" ' - 'LIMIT 100') + "LIMIT 100" + ) # Set use_legacy_sql to True to use legacy SQL syntax. job_config = bigquery.QueryJobConfig() @@ -2119,8 +2175,9 @@ def test_client_query_legacy_sql(client): query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results. for row in query_job: # API request - fetches results @@ -2134,7 +2191,7 @@ def test_manage_job(client): FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus; """ - location = 'us' + location = "us" job = client.query(sql, location=location) job_id = job.job_id @@ -2158,19 +2215,22 @@ def test_manage_job(client): job = client.get_job(job_id, location=location) # API request # Print selected job properties - print('Details for job {} running in {}:'.format(job_id, location)) - print('\tType: {}\n\tState: {}\n\tCreated: {}'.format( - job.job_type, job.state, job.created)) + print("Details for job {} running in {}:".format(job_id, location)) + print( + "\tType: {}\n\tState: {}\n\tCreated: {}".format( + job.job_type, job.state, job.created + ) + ) # [END bigquery_get_job] def test_client_query_destination_table(client, to_delete): """Run a query""" - dataset_id = 'query_destination_table_{}'.format(_millis()) + dataset_id = "query_destination_table_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) to_delete.append(dataset_ref) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) # [START bigquery_query_destination_table] @@ -2180,7 +2240,7 @@ def test_client_query_destination_table(client, to_delete): job_config = bigquery.QueryJobConfig() # Set the destination table - table_ref = client.dataset(dataset_id).table('your_table_id') + table_ref = client.dataset(dataset_id).table("your_table_id") job_config.destination = table_ref sql = """ SELECT corpus @@ -2193,20 +2253,21 @@ def test_client_query_destination_table(client, to_delete): sql, # Location must match that of the dataset(s) referenced in the query # and of the destination table. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query query_job.result() # Waits for the query to finish - print('Query results loaded to table {}'.format(table_ref.path)) + print("Query results loaded to table {}".format(table_ref.path)) # [END bigquery_query_destination_table] def test_client_query_destination_table_legacy(client, to_delete): - dataset_id = 'query_destination_table_legacy_{}'.format(_millis()) + dataset_id = "query_destination_table_legacy_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) to_delete.append(dataset_ref) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) # [START bigquery_query_legacy_large_results] @@ -2218,7 +2279,7 @@ def test_client_query_destination_table_legacy(client, to_delete): # Set use_legacy_sql to True to use legacy SQL syntax. job_config.use_legacy_sql = True # Set the destination table - table_ref = client.dataset(dataset_id).table('your_table_id') + table_ref = client.dataset(dataset_id).table("your_table_id") job_config.destination = table_ref job_config.allow_large_results = True sql = """ @@ -2231,21 +2292,22 @@ def test_client_query_destination_table_legacy(client, to_delete): sql, # Location must match that of the dataset(s) referenced in the query # and of the destination table. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query query_job.result() # Waits for the query to finish - print('Query results loaded to table {}'.format(table_ref.path)) + print("Query results loaded to table {}".format(table_ref.path)) # [END bigquery_query_legacy_large_results] def test_client_query_destination_table_cmek(client, to_delete): """Run a query""" - dataset_id = 'query_destination_table_{}'.format(_millis()) + dataset_id = "query_destination_table_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) to_delete.append(dataset_ref) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" client.create_dataset(dataset) # [START bigquery_query_destination_table_cmek] @@ -2256,24 +2318,25 @@ def test_client_query_destination_table_cmek(client, to_delete): # Set the destination table. Here, dataset_id is a string, such as: # dataset_id = 'your_dataset_id' - table_ref = client.dataset(dataset_id).table('your_table_id') + table_ref = client.dataset(dataset_id).table("your_table_id") job_config.destination = table_ref # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. - kms_key_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format( - 'cloud-samples-tests', 'us-central1', 'test', 'test') - encryption_config = bigquery.EncryptionConfiguration( - kms_key_name=kms_key_name) + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) + encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config # Start the query, passing in the extra configuration. query_job = client.query( - 'SELECT 17 AS my_col;', + "SELECT 17 AS my_col;", # Location must match that of the dataset(s) referenced in the query # and of the destination table. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query query_job.result() # The destination table is written using the encryption configuration. @@ -2296,7 +2359,7 @@ def test_client_query_batch(client, to_delete): GROUP BY corpus; """ # Location must match that of the dataset(s) referenced in the query. - location = 'US' + location = "US" # API request - starts the query query_job = client.query(sql, location=location, job_config=job_config) @@ -2304,27 +2367,26 @@ def test_client_query_batch(client, to_delete): # Check on the progress by getting the job's updated state. Once the state # is `DONE`, the results are ready. query_job = client.get_job( - query_job.job_id, location=location) # API request - fetches job - print('Job {} is currently in state {}'.format( - query_job.job_id, query_job.state)) + query_job.job_id, location=location + ) # API request - fetches job + print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) # [END bigquery_query_batch] def test_client_query_relax_column(client, to_delete): - dataset_id = 'query_relax_column_{}'.format(_millis()) + dataset_id = "query_relax_column_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) - table_ref = dataset_ref.table('my_table') + table_ref = dataset_ref.table("my_table") schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] - table = client.create_table( - bigquery.Table(table_ref, schema=schema)) + table = client.create_table(bigquery.Table(table_ref, schema=schema)) # [START bigquery_relax_column_query_append] # from google.cloud import bigquery @@ -2332,20 +2394,18 @@ def test_client_query_relax_column(client, to_delete): # dataset_ref = client.dataset('my_dataset') # Retrieves the destination table and checks the number of required fields - table_id = 'my_table' + table_id = "my_table" table_ref = dataset_ref.table(table_id) table = client.get_table(table_ref) - original_required_fields = sum( - field.mode == 'REQUIRED' for field in table.schema) + original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) # In this example, the existing table has 2 required fields - print("{} fields in the schema are required.".format( - original_required_fields)) + print("{} fields in the schema are required.".format(original_required_fields)) # Configures the query to append the results to a destination table, # allowing field relaxation job_config = bigquery.QueryJobConfig() job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION ] job_config.destination = table_ref job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND @@ -2356,8 +2416,8 @@ def test_client_query_relax_column(client, to_delete): 'SELECT "Beyonce" as full_name;', # Location must match that of the dataset(s) referenced in the query # and of the destination table. - location='US', - job_config=job_config + location="US", + job_config=job_config, ) # API request - starts the query query_job.result() # Waits for the query to finish @@ -2365,29 +2425,27 @@ def test_client_query_relax_column(client, to_delete): # Checks the updated number of required fields table = client.get_table(table) - current_required_fields = sum( - field.mode == 'REQUIRED' for field in table.schema) - print("{} fields in the schema are now required.".format( - current_required_fields)) + current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) + print("{} fields in the schema are now required.".format(current_required_fields)) # [END bigquery_relax_column_query_append] assert original_required_fields - current_required_fields > 0 assert len(table.schema) == 2 - assert table.schema[1].mode == 'NULLABLE' + assert table.schema[1].mode == "NULLABLE" assert table.num_rows > 0 def test_client_query_add_column(client, to_delete): - dataset_id = 'query_add_column_{}'.format(_millis()) + dataset_id = "query_add_column_{}".format(_millis()) dataset_ref = client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) - dataset.location = 'US' + dataset.location = "US" dataset = client.create_dataset(dataset) to_delete.append(dataset) - table_ref = dataset_ref.table('my_table') + table_ref = dataset_ref.table("my_table") schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] table = client.create_table(bigquery.Table(table_ref, schema=schema)) @@ -2397,7 +2455,7 @@ def test_client_query_add_column(client, to_delete): # dataset_ref = client.dataset('my_dataset') # Retrieves the destination table and checks the length of the schema - table_id = 'my_table' + table_id = "my_table" table_ref = dataset_ref.table(table_id) table = client.get_table(table_ref) print("Table {} contains {} columns.".format(table_id, len(table.schema))) @@ -2406,7 +2464,7 @@ def test_client_query_add_column(client, to_delete): # allowing field addition job_config = bigquery.QueryJobConfig() job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION, + bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION ] job_config.destination = table_ref job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND @@ -2418,8 +2476,8 @@ def test_client_query_add_column(client, to_delete): 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', # Location must match that of the dataset(s) referenced in the query # and of the destination table. - location='US', - job_config=job_config + location="US", + job_config=job_config, ) # API request - starts the query query_job.result() # Waits for the query to finish @@ -2427,8 +2485,7 @@ def test_client_query_add_column(client, to_delete): # Checks the updated length of the schema table = client.get_table(table) - print("Table {} now contains {} columns.".format( - table_id, len(table.schema))) + print("Table {} now contains {} columns.".format(table_id, len(table.schema))) # [END bigquery_add_column_query_append] assert len(table.schema) == 3 assert table.num_rows > 0 @@ -2449,26 +2506,27 @@ def test_client_query_w_named_params(client, capsys): ORDER BY word_count DESC; """ query_params = [ - bigquery.ScalarQueryParameter('corpus', 'STRING', 'romeoandjuliet'), - bigquery.ScalarQueryParameter('min_word_count', 'INT64', 250) + bigquery.ScalarQueryParameter("corpus", "STRING", "romeoandjuliet"), + bigquery.ScalarQueryParameter("min_word_count", "INT64", 250), ] job_config = bigquery.QueryJobConfig() job_config.query_parameters = query_params query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results for row in query_job: - print('{}: \t{}'.format(row.word, row.word_count)) + print("{}: \t{}".format(row.word, row.word_count)) - assert query_job.state == 'DONE' + assert query_job.state == "DONE" # [END bigquery_query_params_named] out, _ = capsys.readouterr() - assert 'the' in out + assert "the" in out def test_client_query_w_positional_params(client, capsys): @@ -2488,26 +2546,27 @@ def test_client_query_w_positional_params(client, capsys): # Set the name to None to use positional parameters. # Note that you cannot mix named and positional parameters. query_params = [ - bigquery.ScalarQueryParameter(None, 'STRING', 'romeoandjuliet'), - bigquery.ScalarQueryParameter(None, 'INT64', 250) + bigquery.ScalarQueryParameter(None, "STRING", "romeoandjuliet"), + bigquery.ScalarQueryParameter(None, "INT64", 250), ] job_config = bigquery.QueryJobConfig() job_config.query_parameters = query_params query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results for row in query_job: - print('{}: \t{}'.format(row.word, row.word_count)) + print("{}: \t{}".format(row.word, row.word_count)) - assert query_job.state == 'DONE' + assert query_job.state == "DONE" # [END bigquery_query_params_positional] out, _ = capsys.readouterr() - assert 'the' in out + assert "the" in out def test_client_query_w_timestamp_params(client, capsys): @@ -2520,30 +2579,32 @@ def test_client_query_w_timestamp_params(client, capsys): import datetime import pytz - query = 'SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);' + query = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" query_params = [ bigquery.ScalarQueryParameter( - 'ts_value', - 'TIMESTAMP', - datetime.datetime(2016, 12, 7, 8, 0, tzinfo=pytz.UTC)) + "ts_value", + "TIMESTAMP", + datetime.datetime(2016, 12, 7, 8, 0, tzinfo=pytz.UTC), + ) ] job_config = bigquery.QueryJobConfig() job_config.query_parameters = query_params query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results for row in query_job: print(row) - assert query_job.state == 'DONE' + assert query_job.state == "DONE" # [END bigquery_query_params_timestamps] out, _ = capsys.readouterr() - assert '2016, 12, 7, 9, 0' in out + assert "2016, 12, 7, 9, 0" in out def test_client_query_w_array_params(client, capsys): @@ -2562,27 +2623,27 @@ def test_client_query_w_array_params(client, capsys): LIMIT 10; """ query_params = [ - bigquery.ScalarQueryParameter('gender', 'STRING', 'M'), - bigquery.ArrayQueryParameter( - 'states', 'STRING', ['WA', 'WI', 'WV', 'WY']) + bigquery.ScalarQueryParameter("gender", "STRING", "M"), + bigquery.ArrayQueryParameter("states", "STRING", ["WA", "WI", "WV", "WY"]), ] job_config = bigquery.QueryJobConfig() job_config.query_parameters = query_params query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results for row in query_job: - print('{}: \t{}'.format(row.name, row.count)) + print("{}: \t{}".format(row.name, row.count)) - assert query_job.state == 'DONE' + assert query_job.state == "DONE" # [END bigquery_query_params_arrays] out, _ = capsys.readouterr() - assert 'James' in out + assert "James" in out def test_client_query_w_struct_params(client, capsys): @@ -2591,12 +2652,12 @@ def test_client_query_w_struct_params(client, capsys): # from google.cloud import bigquery # client = bigquery.Client() - query = 'SELECT @struct_value AS s;' + query = "SELECT @struct_value AS s;" query_params = [ bigquery.StructQueryParameter( - 'struct_value', - bigquery.ScalarQueryParameter('x', 'INT64', 1), - bigquery.ScalarQueryParameter('y', 'STRING', 'foo') + "struct_value", + bigquery.ScalarQueryParameter("x", "INT64", 1), + bigquery.ScalarQueryParameter("y", "STRING", "foo"), ) ] job_config = bigquery.QueryJobConfig() @@ -2604,19 +2665,20 @@ def test_client_query_w_struct_params(client, capsys): query_job = client.query( query, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request - starts the query + location="US", + job_config=job_config, + ) # API request - starts the query # Print the results for row in query_job: print(row.s) - assert query_job.state == 'DONE' + assert query_job.state == "DONE" # [END bigquery_query_params_structs] out, _ = capsys.readouterr() - assert '1' in out - assert 'foo' in out + assert "1" in out + assert "foo" in out def test_client_query_dry_run(client): @@ -2630,20 +2692,22 @@ def test_client_query_dry_run(client): job_config.dry_run = True job_config.use_query_cache = False query_job = client.query( - ('SELECT name, COUNT(*) as name_count ' - 'FROM `bigquery-public-data.usa_names.usa_1910_2013` ' - "WHERE state = 'WA' " - 'GROUP BY name'), + ( + "SELECT name, COUNT(*) as name_count " + "FROM `bigquery-public-data.usa_names.usa_1910_2013` " + "WHERE state = 'WA' " + "GROUP BY name" + ), # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request + location="US", + job_config=job_config, + ) # API request # A dry run query completes immediately. - assert query_job.state == 'DONE' + assert query_job.state == "DONE" assert query_job.dry_run - print("This query will process {} bytes.".format( - query_job.total_bytes_processed)) + print("This query will process {} bytes.".format(query_job.total_bytes_processed)) # [END bigquery_query_dry_run] assert query_job.total_bytes_processed > 0 @@ -2664,8 +2728,9 @@ def test_query_no_cache(client): query_job = client.query( sql, # Location must match that of the dataset(s) referenced in the query. - location='US', - job_config=job_config) # API request + location="US", + job_config=job_config, + ) # API request # Print the results. for row in query_job: # API request - fetches results @@ -2679,16 +2744,16 @@ def test_query_external_gcs_temporary_table(client): # client = bigquery.Client() # Configure the external data source and query job - external_config = bigquery.ExternalConfig('CSV') + external_config = bigquery.ExternalConfig("CSV") external_config.source_uris = [ - 'gs://cloud-samples-data/bigquery/us-states/us-states.csv', + "gs://cloud-samples-data/bigquery/us-states/us-states.csv" ] external_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] external_config.options.skip_leading_rows = 1 # optionally skip header row - table_id = 'us_states' + table_id = "us_states" job_config = bigquery.QueryJobConfig() job_config.table_definitions = {table_id: external_config} @@ -2698,14 +2763,13 @@ def test_query_external_gcs_temporary_table(client): query_job = client.query(sql, job_config=job_config) # API request w_states = list(query_job) # Waits for query to finish - print('There are {} states with names starting with W.'.format( - len(w_states))) + print("There are {} states with names starting with W.".format(len(w_states))) # [END bigquery_query_external_gcs_temp] assert len(w_states) == 4 def test_query_external_gcs_permanent_table(client, to_delete): - dataset_id = 'query_external_gcs_{}'.format(_millis()) + dataset_id = "query_external_gcs_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -2717,15 +2781,15 @@ def test_query_external_gcs_permanent_table(client, to_delete): # Configure the external data source dataset_ref = client.dataset(dataset_id) - table_id = 'us_states' + table_id = "us_states" schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] table = bigquery.Table(dataset_ref.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig('CSV') + external_config = bigquery.ExternalConfig("CSV") external_config.source_uris = [ - 'gs://cloud-samples-data/bigquery/us-states/us-states.csv', + "gs://cloud-samples-data/bigquery/us-states/us-states.csv" ] external_config.options.skip_leading_rows = 1 # optionally skip header row table.external_data_configuration = external_config @@ -2734,14 +2798,12 @@ def test_query_external_gcs_permanent_table(client, to_delete): table = client.create_table(table) # API request # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format( - dataset_id, table_id) + sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) query_job = client.query(sql) # API request w_states = list(query_job) # Waits for query to finish - print('There are {} states with names starting with W.'.format( - len(w_states))) + print("There are {} states with names starting with W.".format(len(w_states))) # [END bigquery_query_external_gcs_perm] assert len(w_states) == 4 @@ -2750,31 +2812,35 @@ def test_query_external_sheets_temporary_table(client): # [START bigquery_query_external_sheets_temp] # [START bigquery_auth_drive_scope] import google.auth + # from google.cloud import bigquery # Create credentials with Drive & BigQuery API scopes # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default(scopes=[ - 'https://www.googleapis.com/auth/drive', - 'https://www.googleapis.com/auth/bigquery', - ]) + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) client = bigquery.Client(credentials=credentials, project=project) # [END bigquery_auth_drive_scope] # Configure the external data source and query job - external_config = bigquery.ExternalConfig('GOOGLE_SHEETS') + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") # Use a shareable link or grant viewing access to the email address you # used to authenticate with BigQuery (this example Sheet is public) sheet_url = ( - 'https://docs.google.com/spreadsheets' - '/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing') + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) external_config.source_uris = [sheet_url] external_config.schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] external_config.options.skip_leading_rows = 1 # optionally skip header row - table_id = 'us_states' + table_id = "us_states" job_config = bigquery.QueryJobConfig() job_config.table_definitions = {table_id: external_config} @@ -2784,45 +2850,48 @@ def test_query_external_sheets_temporary_table(client): query_job = client.query(sql, job_config=job_config) # API request w_states = list(query_job) # Waits for query to finish - print('There are {} states with names starting with W.'.format( - len(w_states))) + print("There are {} states with names starting with W.".format(len(w_states))) # [END bigquery_query_external_sheets_temp] assert len(w_states) == 4 def test_query_external_sheets_permanent_table(client, to_delete): - dataset_id = 'query_external_sheets_{}'.format(_millis()) + dataset_id = "query_external_sheets_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) # [START bigquery_query_external_sheets_perm] import google.auth + # from google.cloud import bigquery # dataset_id = 'my_dataset' # Create credentials with Drive & BigQuery API scopes # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default(scopes=[ - 'https://www.googleapis.com/auth/drive', - 'https://www.googleapis.com/auth/bigquery', - ]) + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) client = bigquery.Client(credentials=credentials, project=project) # Configure the external data source dataset_ref = client.dataset(dataset_id) - table_id = 'us_states' + table_id = "us_states" schema = [ - bigquery.SchemaField('name', 'STRING'), - bigquery.SchemaField('post_abbr', 'STRING') + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), ] table = bigquery.Table(dataset_ref.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig('GOOGLE_SHEETS') + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") # Use a shareable link or grant viewing access to the email address you # used to authenticate with BigQuery (this example Sheet is public) sheet_url = ( - 'https://docs.google.com/spreadsheets' - '/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing') + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) external_config.source_uris = [sheet_url] external_config.options.skip_leading_rows = 1 # optionally skip header row table.external_data_configuration = external_config @@ -2831,14 +2900,12 @@ def test_query_external_sheets_permanent_table(client, to_delete): table = client.create_table(table) # API request # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format( - dataset_id, table_id) + sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) query_job = client.query(sql) # API request w_states = list(query_job) # Waits for query to finish - print('There are {} states with names starting with W.'.format( - len(w_states))) + print("There are {} states with names starting with W.".format(len(w_states))) # [END bigquery_query_external_sheets_perm] assert len(w_states) == 4 @@ -2846,8 +2913,8 @@ def test_query_external_sheets_permanent_table(client, to_delete): def test_ddl_create_view(client, to_delete, capsys): """Create a view via a DDL query.""" project = client.project - dataset_id = 'ddl_view_{}'.format(_millis()) - table_id = 'new_view' + dataset_id = "ddl_view_{}".format(_millis()) + table_id = "new_view" dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -2871,20 +2938,24 @@ def test_ddl_create_view(client, to_delete, capsys): AS SELECT name, state, year, number FROM `bigquery-public-data.usa_names.usa_1910_current` WHERE state LIKE 'W%' - """.format(project, dataset_id, table_id) + """.format( + project, dataset_id, table_id + ) job = client.query(sql) # API request. job.result() # Waits for the query to finish. - print('Created new view "{}.{}.{}".'.format( - job.destination.project, - job.destination.dataset_id, - job.destination.table_id)) + print( + 'Created new view "{}.{}.{}".'.format( + job.destination.project, + job.destination.dataset_id, + job.destination.table_id, + ) + ) # [END bigquery_ddl_create_view] out, _ = capsys.readouterr() - assert 'Created new view "{}.{}.{}".'.format( - project, dataset_id, table_id) in out + assert 'Created new view "{}.{}.{}".'.format(project, dataset_id, table_id) in out # Test that listing query result rows succeeds so that generic query # processing tools work with DDL statements. @@ -2927,12 +2998,12 @@ def test_client_list_jobs(client): # Use state_filter to filter by job state. print("Jobs currently running:") - for job in client.list_jobs(state_filter='RUNNING'): + for job in client.list_jobs(state_filter="RUNNING"): print(job.job_id) # [END bigquery_list_jobs] -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_query_results_as_dataframe(client): # [START bigquery_query_results_dataframe] # from google.cloud import bigquery @@ -2950,30 +3021,30 @@ def test_query_results_as_dataframe(client): # [END bigquery_query_results_dataframe] assert isinstance(df, pandas.DataFrame) assert len(list(df)) == 2 # verify the number of columns - assert len(df) == 10 # verify the number of rows + assert len(df) == 10 # verify the number of rows -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_list_rows_as_dataframe(client): # [START bigquery_list_rows_dataframe] # from google.cloud import bigquery # client = bigquery.Client() - dataset_ref = client.dataset('samples', project='bigquery-public-data') - table_ref = dataset_ref.table('shakespeare') + dataset_ref = client.dataset("samples", project="bigquery-public-data") + table_ref = dataset_ref.table("shakespeare") table = client.get_table(table_ref) df = client.list_rows(table).to_dataframe() # [END bigquery_list_rows_dataframe] assert isinstance(df, pandas.DataFrame) assert len(list(df)) == len(table.schema) # verify the number of columns - assert len(df) == table.num_rows # verify the number of rows + assert len(df) == table.num_rows # verify the number of rows -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') -@pytest.mark.skipif(pyarrow is None, reason='Requires `pyarrow`') +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") def test_load_table_from_dataframe(client, to_delete): - dataset_id = 'load_table_from_dataframe_{}'.format(_millis()) + dataset_id = "load_table_from_dataframe_{}".format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) @@ -2985,34 +3056,30 @@ def test_load_table_from_dataframe(client, to_delete): # dataset_id = 'my_dataset' dataset_ref = client.dataset(dataset_id) - table_ref = dataset_ref.table('monty_python') + table_ref = dataset_ref.table("monty_python") records = [ - {'title': 'The Meaning of Life', 'release_year': 1983}, - {'title': 'Monty Python and the Holy Grail', 'release_year': 1975}, - {'title': 'Life of Brian', 'release_year': 1979}, - { - 'title': 'And Now for Something Completely Different', - 'release_year': 1971 - }, + {"title": "The Meaning of Life", "release_year": 1983}, + {"title": "Monty Python and the Holy Grail", "release_year": 1975}, + {"title": "Life of Brian", "release_year": 1979}, + {"title": "And Now for Something Completely Different", "release_year": 1971}, ] # Optionally set explicit indices. # If indices are not specified, a column will be created for the default # indices created by pandas. - index = ['Q24980', 'Q25043', 'Q24953', 'Q16403'] - dataframe = pandas.DataFrame( - records, index=pandas.Index(index, name='wikidata_id')) + index = ["Q24980", "Q25043", "Q24953", "Q16403"] + dataframe = pandas.DataFrame(records, index=pandas.Index(index, name="wikidata_id")) - job = client.load_table_from_dataframe(dataframe, table_ref, location='US') + job = client.load_table_from_dataframe(dataframe, table_ref, location="US") job.result() # Waits for table load to complete. - assert job.state == 'DONE' + assert job.state == "DONE" table = client.get_table(table_ref) assert table.num_rows == 4 # [END bigquery_load_table_dataframe] column_names = [field.name for field in table.schema] - assert sorted(column_names) == ['release_year', 'title', 'wikidata_id'] + assert sorted(column_names) == ["release_year", "title", "wikidata_id"] -if __name__ == '__main__': +if __name__ == "__main__": pytest.main() diff --git a/bigquery/google/__init__.py b/bigquery/google/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/bigquery/google/__init__.py +++ b/bigquery/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery/google/cloud/__init__.py b/bigquery/google/cloud/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/bigquery/google/cloud/__init__.py +++ b/bigquery/google/cloud/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/bigquery/google/cloud/bigquery/__init__.py b/bigquery/google/cloud/bigquery/__init__.py index 65392214eeda..c3865d511b6c 100644 --- a/bigquery/google/cloud/bigquery/__init__.py +++ b/bigquery/google/cloud/bigquery/__init__.py @@ -29,7 +29,8 @@ from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-bigquery').version + +__version__ = get_distribution("google-cloud-bigquery").version from google.cloud.bigquery.client import Client from google.cloud.bigquery.dataset import AccessEntry @@ -73,52 +74,52 @@ from google.cloud.bigquery.table import TimePartitioning __all__ = [ - '__version__', - 'Client', + "__version__", + "Client", # Queries - 'QueryJob', - 'QueryJobConfig', - 'ArrayQueryParameter', - 'ScalarQueryParameter', - 'StructQueryParameter', + "QueryJob", + "QueryJobConfig", + "ArrayQueryParameter", + "ScalarQueryParameter", + "StructQueryParameter", # Datasets - 'Dataset', - 'DatasetReference', - 'AccessEntry', + "Dataset", + "DatasetReference", + "AccessEntry", # Tables - 'EncryptionConfiguration', - 'Table', - 'TableReference', - 'Row', - 'CopyJob', - 'CopyJobConfig', - 'ExtractJob', - 'ExtractJobConfig', - 'LoadJob', - 'LoadJobConfig', - 'UnknownJob', - 'TimePartitioningType', - 'TimePartitioning', + "EncryptionConfiguration", + "Table", + "TableReference", + "Row", + "CopyJob", + "CopyJobConfig", + "ExtractJob", + "ExtractJobConfig", + "LoadJob", + "LoadJobConfig", + "UnknownJob", + "TimePartitioningType", + "TimePartitioning", # Shared helpers - 'SchemaField', - 'UDFResource', - 'ExternalConfig', - 'BigtableOptions', - 'BigtableColumnFamily', - 'BigtableColumn', - 'CSVOptions', - 'GoogleSheetsOptions', - 'DEFAULT_RETRY', + "SchemaField", + "UDFResource", + "ExternalConfig", + "BigtableOptions", + "BigtableColumnFamily", + "BigtableColumn", + "CSVOptions", + "GoogleSheetsOptions", + "DEFAULT_RETRY", # Enum Constants - 'Compression', - 'CreateDisposition', - 'DestinationFormat', - 'ExternalSourceFormat', - 'Encoding', - 'QueryPriority', - 'SchemaUpdateOption', - 'SourceFormat', - 'WriteDisposition' + "Compression", + "CreateDisposition", + "DestinationFormat", + "ExternalSourceFormat", + "Encoding", + "QueryPriority", + "SchemaUpdateOption", + "SourceFormat", + "WriteDisposition", ] @@ -127,4 +128,5 @@ def load_ipython_extension(ipython): from google.cloud.bigquery.magics import _cell_magic ipython.register_magic_function( - _cell_magic, magic_kind='cell', magic_name='bigquery') + _cell_magic, magic_kind="cell", magic_name="bigquery" + ) diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index c0a29b427b3b..6990fb3eaa69 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -25,14 +25,14 @@ from google.cloud._helpers import _RFC3339_NO_FRACTION from google.cloud._helpers import _to_bytes -_RFC3339_MICROS_NO_ZULU = '%Y-%m-%dT%H:%M:%S.%f' -_TIMEONLY_WO_MICROS = '%H:%M:%S' -_TIMEONLY_W_MICROS = '%H:%M:%S.%f' +_RFC3339_MICROS_NO_ZULU = "%Y-%m-%dT%H:%M:%S.%f" +_TIMEONLY_WO_MICROS = "%H:%M:%S" +_TIMEONLY_W_MICROS = "%H:%M:%S.%f" def _not_null(value, field): """Check whether 'value' should be coerced to 'field' type.""" - return value is not None or field.mode != 'NULLABLE' + return value is not None or field.mode != "NULLABLE" def _int_from_json(value, field): @@ -56,7 +56,7 @@ def _decimal_from_json(value, field): def _bool_from_json(value, field): """Coerce 'value' to a bool, if set or not nullable.""" if _not_null(value, field): - return value.lower() in ['t', 'true', '1'] + return value.lower() in ["t", "true", "1"] def _string_from_json(value, _): @@ -93,19 +93,21 @@ def _timestamp_query_param_from_json(value, field): # Canonical formats for timestamps in BigQuery are flexible. See: # g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type # The separator between the date and time can be 'T' or ' '. - value = value.replace(' ', 'T', 1) + value = value.replace(" ", "T", 1) # The UTC timezone may be formatted as Z or +00:00. - value = value.replace('Z', '') - value = value.replace('+00:00', '') + value = value.replace("Z", "") + value = value.replace("+00:00", "") - if '.' in value: + if "." in value: # YYYY-MM-DDTHH:MM:SS.ffffff - return datetime.datetime.strptime( - value, _RFC3339_MICROS_NO_ZULU).replace(tzinfo=UTC) + return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace( + tzinfo=UTC + ) else: # YYYY-MM-DDTHH:MM:SS - return datetime.datetime.strptime( - value, _RFC3339_NO_FRACTION).replace(tzinfo=UTC) + return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace( + tzinfo=UTC + ) else: return None @@ -123,7 +125,7 @@ def _datetime_from_json(value, field): :data:`None`). """ if _not_null(value, field): - if '.' in value: + if "." in value: # YYYY-MM-DDTHH:MM:SS.ffffff return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU) else: @@ -156,37 +158,37 @@ def _record_from_json(value, field): """Coerce 'value' to a mapping, if set or not nullable.""" if _not_null(value, field): record = {} - record_iter = zip(field.fields, value['f']) + record_iter = zip(field.fields, value["f"]) for subfield, cell in record_iter: converter = _CELLDATA_FROM_JSON[subfield.field_type] - if subfield.mode == 'REPEATED': - value = [converter(item['v'], subfield) for item in cell['v']] + if subfield.mode == "REPEATED": + value = [converter(item["v"], subfield) for item in cell["v"]] else: - value = converter(cell['v'], subfield) + value = converter(cell["v"], subfield) record[subfield.name] = value return record _CELLDATA_FROM_JSON = { - 'INTEGER': _int_from_json, - 'INT64': _int_from_json, - 'FLOAT': _float_from_json, - 'FLOAT64': _float_from_json, - 'NUMERIC': _decimal_from_json, - 'BOOLEAN': _bool_from_json, - 'BOOL': _bool_from_json, - 'STRING': _string_from_json, - 'GEOGRAPHY': _string_from_json, - 'BYTES': _bytes_from_json, - 'TIMESTAMP': _timestamp_from_json, - 'DATETIME': _datetime_from_json, - 'DATE': _date_from_json, - 'TIME': _time_from_json, - 'RECORD': _record_from_json, + "INTEGER": _int_from_json, + "INT64": _int_from_json, + "FLOAT": _float_from_json, + "FLOAT64": _float_from_json, + "NUMERIC": _decimal_from_json, + "BOOLEAN": _bool_from_json, + "BOOL": _bool_from_json, + "STRING": _string_from_json, + "GEOGRAPHY": _string_from_json, + "BYTES": _bytes_from_json, + "TIMESTAMP": _timestamp_from_json, + "DATETIME": _datetime_from_json, + "DATE": _date_from_json, + "TIME": _time_from_json, + "RECORD": _record_from_json, } _QUERY_PARAMS_FROM_JSON = dict(_CELLDATA_FROM_JSON) -_QUERY_PARAMS_FROM_JSON['TIMESTAMP'] = _timestamp_query_param_from_json +_QUERY_PARAMS_FROM_JSON["TIMESTAMP"] = _timestamp_query_param_from_json def _field_to_index_mapping(schema): @@ -210,13 +212,12 @@ def _row_tuple_from_json(row, schema): :returns: A tuple of data converted to native types. """ row_data = [] - for field, cell in zip(schema, row['f']): + for field, cell in zip(schema, row["f"]): converter = _CELLDATA_FROM_JSON[field.field_type] - if field.mode == 'REPEATED': - row_data.append([converter(item['v'], field) - for item in cell['v']]) + if field.mode == "REPEATED": + row_data.append([converter(item["v"], field) for item in cell["v"]]) else: - row_data.append(converter(cell['v'], field)) + row_data.append(converter(cell["v"], field)) return tuple(row_data) @@ -226,8 +227,7 @@ def _rows_from_json(values, schema): from google.cloud.bigquery import Row field_to_index = _field_to_index_mapping(schema) - return [Row(_row_tuple_from_json(r, schema), field_to_index) - for r in values] + return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values] def _int_to_json(value): @@ -252,14 +252,14 @@ def _decimal_to_json(value): def _bool_to_json(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, bool): - value = 'true' if value else 'false' + value = "true" if value else "false" return value def _bytes_to_json(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, bytes): - value = base64.standard_b64encode(value).decode('ascii') + value = base64.standard_b64encode(value).decode("ascii") return value @@ -272,8 +272,7 @@ def _timestamp_to_json_parameter(value): if value.tzinfo not in (None, UTC): # Convert to UTC and remove the time zone info. value = value.replace(tzinfo=None) - value.utcoffset() - value = '%s %s+00:00' % ( - value.date().isoformat(), value.time().isoformat()) + value = "%s %s+00:00" % (value.date().isoformat(), value.time().isoformat()) return value @@ -310,30 +309,30 @@ def _time_to_json(value): # Converters used for scalar values marshalled as row data. _SCALAR_VALUE_TO_JSON_ROW = { - 'INTEGER': _int_to_json, - 'INT64': _int_to_json, - 'FLOAT': _float_to_json, - 'FLOAT64': _float_to_json, - 'NUMERIC': _decimal_to_json, - 'BOOLEAN': _bool_to_json, - 'BOOL': _bool_to_json, - 'BYTES': _bytes_to_json, - 'TIMESTAMP': _timestamp_to_json_row, - 'DATETIME': _datetime_to_json, - 'DATE': _date_to_json, - 'TIME': _time_to_json, + "INTEGER": _int_to_json, + "INT64": _int_to_json, + "FLOAT": _float_to_json, + "FLOAT64": _float_to_json, + "NUMERIC": _decimal_to_json, + "BOOLEAN": _bool_to_json, + "BOOL": _bool_to_json, + "BYTES": _bytes_to_json, + "TIMESTAMP": _timestamp_to_json_row, + "DATETIME": _datetime_to_json, + "DATE": _date_to_json, + "TIME": _time_to_json, } # Converters used for scalar values marshalled as query parameters. _SCALAR_VALUE_TO_JSON_PARAM = _SCALAR_VALUE_TO_JSON_ROW.copy() -_SCALAR_VALUE_TO_JSON_PARAM['TIMESTAMP'] = _timestamp_to_json_parameter +_SCALAR_VALUE_TO_JSON_PARAM["TIMESTAMP"] = _timestamp_to_json_parameter def _snake_to_camel_case(value): """Convert snake case string to camel case.""" - words = value.split('_') - return words[0] + ''.join(map(str.capitalize, words[1:])) + words = value.split("_") + return words[0] + "".join(map(str.capitalize, words[1:])) def _get_sub_prop(container, keys, default=None): diff --git a/bigquery/google/cloud/bigquery/_http.py b/bigquery/google/cloud/bigquery/_http.py index c2698cd80bc2..1dd7524542a5 100644 --- a/bigquery/google/cloud/bigquery/_http.py +++ b/bigquery/google/cloud/bigquery/_http.py @@ -29,15 +29,13 @@ class Connection(_http.JSONConnection): :param client: The client that owns the current connection. """ - API_BASE_URL = 'https://www.googleapis.com' + API_BASE_URL = "https://www.googleapis.com" """The base of the API call URL.""" - API_VERSION = 'v2' + API_VERSION = "v2" """The version of the API, used in building the API call's URL.""" - API_URL_TEMPLATE = '{api_base_url}/bigquery/{api_version}{path}' + API_URL_TEMPLATE = "{api_base_url}/bigquery/{api_version}{path}" """A template for the URL of a particular API call.""" - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } + _EXTRA_HEADERS = {_http.CLIENT_INFO_HEADER: _CLIENT_INFO} diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 8b18da22c59b..12c0b57ad641 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -58,14 +58,15 @@ _MAX_MULTIPART_SIZE = 5 * 1024 * 1024 _DEFAULT_NUM_RETRIES = 6 _BASE_UPLOAD_TEMPLATE = ( - u'https://www.googleapis.com/upload/bigquery/v2/projects/' - u'{project}/jobs?uploadType=') -_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'multipart' -_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'resumable' -_GENERIC_CONTENT_TYPE = u'*/*' + u"https://www.googleapis.com/upload/bigquery/v2/projects/" + u"{project}/jobs?uploadType=" +) +_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" +_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable" +_GENERIC_CONTENT_TYPE = u"*/*" _READ_LESS_THAN_SIZE = ( - 'Size {:d} was specified but the file-like object only had ' - '{:d} bytes remaining.') + "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." +) class Project(object): @@ -80,6 +81,7 @@ class Project(object): :type friendly_name: str :param friendly_name: Display name of the project """ + def __init__(self, project_id, numeric_id, friendly_name): self.project_id = project_id self.numeric_id = numeric_id @@ -88,8 +90,7 @@ def __init__(self, project_id, numeric_id, friendly_name): @classmethod def from_api_repr(cls, resource): """Factory: construct an instance from a resource dict.""" - return cls( - resource['id'], resource['numericId'], resource['friendlyName']) + return cls(resource["id"], resource["numericId"], resource["friendlyName"]) class Client(ClientWithProject): @@ -124,15 +125,23 @@ class Client(ClientWithProject): to acquire default credentials. """ - SCOPE = ('https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform') + SCOPE = ( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ) """The scopes required for authenticating as a BigQuery consumer.""" def __init__( - self, project=None, credentials=None, _http=None, - location=None, default_query_job_config=None): + self, + project=None, + credentials=None, + _http=None, + location=None, + default_query_job_config=None, + ): super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) + project=project, credentials=credentials, _http=_http + ) self._connection = Connection(self) self._location = location self._default_query_job_config = default_query_job_config @@ -167,12 +176,11 @@ def get_service_account_email(self, project=None): """ if project is None: project = self.project - path = '/projects/%s/serviceAccount' % (project,) - api_response = self._connection.api_request(method='GET', path=path) - return api_response['email'] + path = "/projects/%s/serviceAccount" % (project,) + api_response = self._connection.api_request(method="GET", path=path) + return api_response["email"] - def list_projects(self, max_results=None, page_token=None, - retry=DEFAULT_RETRY): + def list_projects(self, max_results=None, page_token=None, retry=DEFAULT_RETRY): """List projects for the project associated with this client. See @@ -201,15 +209,22 @@ def list_projects(self, max_results=None, page_token=None, return page_iterator.HTTPIterator( client=self, api_request=functools.partial(self._call_api, retry), - path='/projects', + path="/projects", item_to_value=_item_to_project, - items_key='projects', + items_key="projects", page_token=page_token, - max_results=max_results) + max_results=max_results, + ) def list_datasets( - self, project=None, include_all=False, filter=None, - max_results=None, page_token=None, retry=DEFAULT_RETRY): + self, + project=None, + include_all=False, + filter=None, + max_results=None, + page_token=None, + retry=DEFAULT_RETRY, + ): """List datasets for the project associated with this client. See @@ -248,21 +263,22 @@ def list_datasets( if project is None: project = self.project if include_all: - extra_params['all'] = True + extra_params["all"] = True if filter: # TODO: consider supporting a dict of label -> value for filter, # and converting it into a string here. - extra_params['filter'] = filter - path = '/projects/%s/datasets' % (project,) + extra_params["filter"] = filter + path = "/projects/%s/datasets" % (project,) return page_iterator.HTTPIterator( client=self, api_request=functools.partial(self._call_api, retry), path=path, item_to_value=_item_to_dataset, - items_key='datasets', + items_key="datasets", page_token=page_token, max_results=max_results, - extra_params=extra_params) + extra_params=extra_params, + ) def dataset(self, dataset_id, project=None): """Construct a reference to a dataset. @@ -312,18 +328,18 @@ def create_dataset(self, dataset): """ if isinstance(dataset, str): dataset = DatasetReference.from_string( - dataset, default_project=self.project) + dataset, default_project=self.project + ) if isinstance(dataset, DatasetReference): dataset = Dataset(dataset) - path = '/projects/%s/datasets' % (dataset.project,) + path = "/projects/%s/datasets" % (dataset.project,) data = dataset.to_api_repr() - if data.get('location') is None and self.location is not None: - data['location'] = self.location + if data.get("location") is None and self.location is not None: + data["location"] = self.location - api_response = self._connection.api_request( - method='POST', path=path, data=data) + api_response = self._connection.api_request(method="POST", path=path, data=data) return Dataset.from_api_repr(api_response) @@ -349,15 +365,14 @@ def create_table(self, table): A new ``Table`` returned from the service. """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) if isinstance(table, TableReference): table = Table(table) - path = '/projects/%s/datasets/%s/tables' % ( - table.project, table.dataset_id) + path = "/projects/%s/datasets/%s/tables" % (table.project, table.dataset_id) api_response = self._connection.api_request( - method='POST', path=path, data=table.to_api_repr()) + method="POST", path=path, data=table.to_api_repr() + ) return Table.from_api_repr(api_response) def _call_api(self, retry, **kwargs): @@ -387,10 +402,10 @@ def get_dataset(self, dataset_ref, retry=DEFAULT_RETRY): """ if isinstance(dataset_ref, str): dataset_ref = DatasetReference.from_string( - dataset_ref, default_project=self.project) + dataset_ref, default_project=self.project + ) - api_response = self._call_api( - retry, method='GET', path=dataset_ref.path) + api_response = self._call_api(retry, method="GET", path=dataset_ref.path) return Dataset.from_api_repr(api_response) def get_table(self, table_ref, retry=DEFAULT_RETRY): @@ -414,9 +429,10 @@ def get_table(self, table_ref, retry=DEFAULT_RETRY): """ if isinstance(table_ref, str): table_ref = TableReference.from_string( - table_ref, default_project=self.project) + table_ref, default_project=self.project + ) - api_response = self._call_api(retry, method='GET', path=table_ref.path) + api_response = self._call_api(retry, method="GET", path=table_ref.path) return Table.from_api_repr(api_response) def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY): @@ -447,15 +463,12 @@ def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY): """ partial = dataset._build_resource(fields) if dataset.etag is not None: - headers = {'If-Match': dataset.etag} + headers = {"If-Match": dataset.etag} else: headers = None api_response = self._call_api( - retry, - method='PATCH', - path=dataset.path, - data=partial, - headers=headers) + retry, method="PATCH", path=dataset.path, data=partial, headers=headers + ) return Dataset.from_api_repr(api_response) def update_table(self, table, fields, retry=DEFAULT_RETRY): @@ -485,16 +498,17 @@ def update_table(self, table, fields, retry=DEFAULT_RETRY): """ partial = table._build_resource(fields) if table.etag is not None: - headers = {'If-Match': table.etag} + headers = {"If-Match": table.etag} else: headers = None api_response = self._call_api( - retry, - method='PATCH', path=table.path, data=partial, headers=headers) + retry, method="PATCH", path=table.path, data=partial, headers=headers + ) return Table.from_api_repr(api_response) - def list_tables(self, dataset, max_results=None, page_token=None, - retry=DEFAULT_RETRY): + def list_tables( + self, dataset, max_results=None, page_token=None, retry=DEFAULT_RETRY + ): """List tables in the dataset. See @@ -531,26 +545,26 @@ def list_tables(self, dataset, max_results=None, page_token=None, """ if isinstance(dataset, str): dataset = DatasetReference.from_string( - dataset, default_project=self.project) + dataset, default_project=self.project + ) if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError( - 'dataset must be a Dataset, DatasetReference, or string') + raise TypeError("dataset must be a Dataset, DatasetReference, or string") - path = '%s/tables' % dataset.path + path = "%s/tables" % dataset.path result = page_iterator.HTTPIterator( client=self, api_request=functools.partial(self._call_api, retry), path=path, item_to_value=_item_to_table, - items_key='tables', + items_key="tables", page_token=page_token, - max_results=max_results) + max_results=max_results, + ) result.dataset = dataset return result - def delete_dataset(self, dataset, delete_contents=False, - retry=DEFAULT_RETRY): + def delete_dataset(self, dataset, delete_contents=False, retry=DEFAULT_RETRY): """Delete a dataset. See @@ -575,19 +589,17 @@ def delete_dataset(self, dataset, delete_contents=False, """ if isinstance(dataset, str): dataset = DatasetReference.from_string( - dataset, default_project=self.project) + dataset, default_project=self.project + ) if not isinstance(dataset, (Dataset, DatasetReference)): - raise TypeError('dataset must be a Dataset or a DatasetReference') + raise TypeError("dataset must be a Dataset or a DatasetReference") params = {} if delete_contents: - params['deleteContents'] = 'true' + params["deleteContents"] = "true" - self._call_api(retry, - method='DELETE', - path=dataset.path, - query_params=params) + self._call_api(retry, method="DELETE", path=dataset.path, query_params=params) def delete_table(self, table, retry=DEFAULT_RETRY): """Delete a table @@ -609,15 +621,15 @@ def delete_table(self, table, retry=DEFAULT_RETRY): (Optional) How to retry the RPC. """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) if not isinstance(table, (Table, TableReference)): - raise TypeError('table must be a Table or a TableReference') - self._call_api(retry, method='DELETE', path=table.path) + raise TypeError("table must be a Table or a TableReference") + self._call_api(retry, method="DELETE", path=table.path) def _get_query_results( - self, job_id, retry, project=None, timeout_ms=None, location=None): + self, job_id, retry, project=None, timeout_ms=None, location=None + ): """Get the query results object for a query job. Arguments: @@ -637,27 +649,28 @@ def _get_query_results( A new ``_QueryResults`` instance. """ - extra_params = {'maxResults': 0} + extra_params = {"maxResults": 0} if project is None: project = self.project if timeout_ms is not None: - extra_params['timeoutMs'] = timeout_ms + extra_params["timeoutMs"] = timeout_ms if location is None: location = self.location if location is not None: - extra_params['location'] = location + extra_params["location"] = location - path = '/projects/{}/queries/{}'.format(project, job_id) + path = "/projects/{}/queries/{}".format(project, job_id) # This call is typically made in a polling loop that checks whether the # job is complete (from QueryJob.done(), called ultimately from # QueryJob.result()). So we don't need to poll here. resource = self._call_api( - retry, method='GET', path=path, query_params=extra_params) + retry, method="GET", path=path, query_params=extra_params + ) return _QueryResults.from_api_repr(resource) def job_from_resource(self, resource): @@ -673,19 +686,18 @@ def job_from_resource(self, resource): or :class:`google.cloud.bigquery.job.QueryJob` :returns: the job instance, constructed via the resource """ - config = resource.get('configuration', {}) - if 'load' in config: + config = resource.get("configuration", {}) + if "load" in config: return job.LoadJob.from_api_repr(resource, self) - elif 'copy' in config: + elif "copy" in config: return job.CopyJob.from_api_repr(resource, self) - elif 'extract' in config: + elif "extract" in config: return job.ExtractJob.from_api_repr(resource, self) - elif 'query' in config: + elif "query" in config: return job.QueryJob.from_api_repr(resource, self) return job.UnknownJob.from_api_repr(resource, self) - def get_job( - self, job_id, project=None, location=None, retry=DEFAULT_RETRY): + def get_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): """Fetch a job for the project associated with this client. See @@ -709,7 +721,7 @@ def get_job( google.cloud.bigquery.job.QueryJob]: Job instance, based on the resource returned by the API. """ - extra_params = {'projection': 'full'} + extra_params = {"projection": "full"} if project is None: project = self.project @@ -718,17 +730,17 @@ def get_job( location = self.location if location is not None: - extra_params['location'] = location + extra_params["location"] = location - path = '/projects/{}/jobs/{}'.format(project, job_id) + path = "/projects/{}/jobs/{}".format(project, job_id) resource = self._call_api( - retry, method='GET', path=path, query_params=extra_params) + retry, method="GET", path=path, query_params=extra_params + ) return self.job_from_resource(resource) - def cancel_job( - self, job_id, project=None, location=None, retry=DEFAULT_RETRY): + def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): """Attempt to cancel a job from a job ID. See @@ -752,7 +764,7 @@ def cancel_job( google.cloud.bigquery.job.QueryJob]: Job instance, based on the resource returned by the API. """ - extra_params = {'projection': 'full'} + extra_params = {"projection": "full"} if project is None: project = self.project @@ -761,19 +773,27 @@ def cancel_job( location = self.location if location is not None: - extra_params['location'] = location + extra_params["location"] = location - path = '/projects/{}/jobs/{}/cancel'.format(project, job_id) + path = "/projects/{}/jobs/{}/cancel".format(project, job_id) resource = self._call_api( - retry, method='POST', path=path, query_params=extra_params) + retry, method="POST", path=path, query_params=extra_params + ) - return self.job_from_resource(resource['job']) + return self.job_from_resource(resource["job"]) def list_jobs( - self, project=None, max_results=None, page_token=None, - all_users=None, state_filter=None, retry=DEFAULT_RETRY, - min_creation_time=None, max_creation_time=None): + self, + project=None, + max_results=None, + page_token=None, + all_users=None, + state_filter=None, + retry=DEFAULT_RETRY, + min_creation_time=None, + max_creation_time=None, + ): """List jobs for the project associated with this client. See @@ -816,42 +836,47 @@ def list_jobs( Iterable of job instances. """ extra_params = { - 'allUsers': all_users, - 'stateFilter': state_filter, - 'minCreationTime': _str_or_none( - google.cloud._helpers._millis_from_datetime( - min_creation_time)), - 'maxCreationTime': _str_or_none( - google.cloud._helpers._millis_from_datetime( - max_creation_time)), - 'projection': 'full' + "allUsers": all_users, + "stateFilter": state_filter, + "minCreationTime": _str_or_none( + google.cloud._helpers._millis_from_datetime(min_creation_time) + ), + "maxCreationTime": _str_or_none( + google.cloud._helpers._millis_from_datetime(max_creation_time) + ), + "projection": "full", } - extra_params = {param: value for param, value in extra_params.items() - if value is not None} + extra_params = { + param: value for param, value in extra_params.items() if value is not None + } if project is None: project = self.project - path = '/projects/%s/jobs' % (project,) + path = "/projects/%s/jobs" % (project,) return page_iterator.HTTPIterator( client=self, api_request=functools.partial(self._call_api, retry), path=path, item_to_value=_item_to_job, - items_key='jobs', + items_key="jobs", page_token=page_token, max_results=max_results, - extra_params=extra_params) + extra_params=extra_params, + ) def load_table_from_uri( - self, source_uris, destination, - job_id=None, - job_id_prefix=None, - location=None, - project=None, - job_config=None, - retry=DEFAULT_RETRY): + self, + source_uris, + destination, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=None, + retry=DEFAULT_RETRY, + ): """Starts a job for loading data into a table from CloudStorage. See @@ -905,19 +930,27 @@ def load_table_from_uri( if isinstance(destination, str): destination = TableReference.from_string( - destination, default_project=self.project) + destination, default_project=self.project + ) - load_job = job.LoadJob( - job_ref, source_uris, destination, self, job_config) + load_job = job.LoadJob(job_ref, source_uris, destination, self, job_config) load_job._begin(retry=retry) return load_job def load_table_from_file( - self, file_obj, destination, rewind=False, size=None, - num_retries=_DEFAULT_NUM_RETRIES, job_id=None, - job_id_prefix=None, location=None, project=None, - job_config=None): + self, + file_obj, + destination, + rewind=False, + size=None, + num_retries=_DEFAULT_NUM_RETRIES, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=None, + ): """Upload the contents of this table from a file-like object. Similar to :meth:`load_table_from_uri`, this method creates, starts and @@ -976,7 +1009,8 @@ def load_table_from_file( if isinstance(destination, str): destination = TableReference.from_string( - destination, default_project=self.project) + destination, default_project=self.project + ) job_ref = job._JobReference(job_id, project=project, location=location) load_job = job.LoadJob(job_ref, None, destination, self, job_config) @@ -990,20 +1024,28 @@ def load_table_from_file( try: if size is None or size >= _MAX_MULTIPART_SIZE: response = self._do_resumable_upload( - file_obj, job_resource, num_retries) + file_obj, job_resource, num_retries + ) else: response = self._do_multipart_upload( - file_obj, job_resource, size, num_retries) + file_obj, job_resource, size, num_retries + ) except resumable_media.InvalidResponse as exc: raise exceptions.from_http_response(exc.response) return self.job_from_resource(response.json()) - def load_table_from_dataframe(self, dataframe, destination, - num_retries=_DEFAULT_NUM_RETRIES, - job_id=None, job_id_prefix=None, - location=None, project=None, - job_config=None): + def load_table_from_dataframe( + self, + dataframe, + destination, + num_retries=_DEFAULT_NUM_RETRIES, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=None, + ): """Upload the contents of a table from a pandas DataFrame. Similar to :meth:`load_table_from_uri`, this method creates, starts and @@ -1058,7 +1100,8 @@ def load_table_from_dataframe(self, dataframe, destination, location = self.location return self.load_table_from_file( - buffer, destination, + buffer, + destination, num_retries=num_retries, rewind=True, job_id=job_id, @@ -1086,7 +1129,8 @@ def _do_resumable_upload(self, stream, metadata, num_retries): is uploaded. """ upload, transport = self._initiate_resumable_upload( - stream, metadata, num_retries) + stream, metadata, num_retries + ) while not upload.finished: response = upload.transmit_next_chunk(transport) @@ -1124,11 +1168,12 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries): if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries) + max_retries=num_retries + ) upload.initiate( - transport, stream, metadata, _GENERIC_CONTENT_TYPE, - stream_final=False) + transport, stream, metadata, _GENERIC_CONTENT_TYPE, stream_final=False + ) return upload, transport @@ -1168,17 +1213,24 @@ def _do_multipart_upload(self, stream, metadata, size, num_retries): if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries) + max_retries=num_retries + ) - response = upload.transmit( - self._http, data, metadata, _GENERIC_CONTENT_TYPE) + response = upload.transmit(self._http, data, metadata, _GENERIC_CONTENT_TYPE) return response def copy_table( - self, sources, destination, job_id=None, job_id_prefix=None, - location=None, project=None, job_config=None, - retry=DEFAULT_RETRY): + self, + sources, + destination, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=None, + retry=DEFAULT_RETRY, + ): """Copy one or more tables to another table. See @@ -1229,27 +1281,34 @@ def copy_table( job_ref = job._JobReference(job_id, project=project, location=location) if isinstance(sources, str): - sources = TableReference.from_string( - sources, default_project=self.project) + sources = TableReference.from_string(sources, default_project=self.project) if isinstance(destination, str): destination = TableReference.from_string( - destination, default_project=self.project) + destination, default_project=self.project + ) if not isinstance(sources, collections_abc.Sequence): sources = [sources] copy_job = job.CopyJob( - job_ref, sources, destination, client=self, - job_config=job_config) + job_ref, sources, destination, client=self, job_config=job_config + ) copy_job._begin(retry=retry) return copy_job def extract_table( - self, source, destination_uris, job_id=None, job_id_prefix=None, - location=None, project=None, job_config=None, - retry=DEFAULT_RETRY): + self, + source, + destination_uris, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=None, + retry=DEFAULT_RETRY, + ): """Start a job to extract a table into Cloud Storage files. See @@ -1300,24 +1359,28 @@ def extract_table( job_ref = job._JobReference(job_id, project=project, location=location) if isinstance(source, str): - source = TableReference.from_string( - source, default_project=self.project) + source = TableReference.from_string(source, default_project=self.project) if isinstance(destination_uris, six.string_types): destination_uris = [destination_uris] extract_job = job.ExtractJob( - job_ref, source, destination_uris, client=self, - job_config=job_config) + job_ref, source, destination_uris, client=self, job_config=job_config + ) extract_job._begin(retry=retry) return extract_job def query( - self, query, - job_config=None, - job_id=None, job_id_prefix=None, - location=None, project=None, retry=DEFAULT_RETRY): + self, + query, + job_config=None, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + retry=DEFAULT_RETRY, + ): """Run a SQL query. See @@ -1366,13 +1429,13 @@ def query( # should be filled in with the default # the incoming therefore has precedence job_config = job_config._fill_from_default( - self._default_query_job_config) + self._default_query_job_config + ) else: job_config = self._default_query_job_config job_ref = job._JobReference(job_id, project=project, location=location) - query_job = job.QueryJob( - job_ref, query, client=self, job_config=job_config) + query_job = job.QueryJob(job_ref, query, client=self, job_config=job_config) query_job._begin(retry=retry) return query_job @@ -1419,19 +1482,18 @@ def insert_rows(self, table, rows, selected_fields=None, **kwargs): ValueError: if table's schema is not set """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) if selected_fields is not None: schema = selected_fields elif isinstance(table, TableReference): - raise ValueError('need selected_fields with TableReference') + raise ValueError("need selected_fields with TableReference") elif isinstance(table, Table): if len(table.schema) == 0: raise ValueError(_TABLE_HAS_NO_SCHEMA) schema = table.schema else: - raise TypeError('table should be Table or TableReference') + raise TypeError("table should be Table or TableReference") json_rows = [] @@ -1450,9 +1512,16 @@ def insert_rows(self, table, rows, selected_fields=None, **kwargs): return self.insert_rows_json(table, json_rows, **kwargs) - def insert_rows_json(self, table, json_rows, row_ids=None, - skip_invalid_rows=None, ignore_unknown_values=None, - template_suffix=None, retry=DEFAULT_RETRY): + def insert_rows_json( + self, + table, + json_rows, + row_ids=None, + skip_invalid_rows=None, + ignore_unknown_values=None, + template_suffix=None, + retry=DEFAULT_RETRY, + ): """Insert rows into a table without applying local type conversions. See @@ -1493,40 +1562,36 @@ def insert_rows_json(self, table, json_rows, row_ids=None, the mappings describing one or more problems with the row. """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) rows_info = [] - data = {'rows': rows_info} + data = {"rows": rows_info} for index, row in enumerate(json_rows): - info = {'json': row} + info = {"json": row} if row_ids is not None: - info['insertId'] = row_ids[index] + info["insertId"] = row_ids[index] else: - info['insertId'] = str(uuid.uuid4()) + info["insertId"] = str(uuid.uuid4()) rows_info.append(info) if skip_invalid_rows is not None: - data['skipInvalidRows'] = skip_invalid_rows + data["skipInvalidRows"] = skip_invalid_rows if ignore_unknown_values is not None: - data['ignoreUnknownValues'] = ignore_unknown_values + data["ignoreUnknownValues"] = ignore_unknown_values if template_suffix is not None: - data['templateSuffix'] = template_suffix + data["templateSuffix"] = template_suffix # We can always retry, because every row has an insert ID. response = self._call_api( - retry, - method='POST', - path='%s/insertAll' % table.path, - data=data) + retry, method="POST", path="%s/insertAll" % table.path, data=data + ) errors = [] - for error in response.get('insertErrors', ()): - errors.append({'index': int(error['index']), - 'errors': error['errors']}) + for error in response.get("insertErrors", ()): + errors.append({"index": int(error["index"]), "errors": error["errors"]}) return errors @@ -1548,23 +1613,31 @@ def list_partitions(self, table, retry=DEFAULT_RETRY): A list of the partition ids present in the partitioned table """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) meta_table = self.get_table( TableReference( self.dataset(table.dataset_id, project=table.project), - '%s$__PARTITIONS_SUMMARY__' % table.table_id)) - - subset = [col for col in - meta_table.schema if col.name == 'partition_id'] - return [row[0] for row in self.list_rows(meta_table, - selected_fields=subset, - retry=retry)] + "%s$__PARTITIONS_SUMMARY__" % table.table_id, + ) + ) - def list_rows(self, table, selected_fields=None, max_results=None, - page_token=None, start_index=None, page_size=None, - retry=DEFAULT_RETRY): + subset = [col for col in meta_table.schema if col.name == "partition_id"] + return [ + row[0] + for row in self.list_rows(meta_table, selected_fields=subset, retry=retry) + ] + + def list_rows( + self, + table, + selected_fields=None, + max_results=None, + page_token=None, + start_index=None, + page_size=None, + retry=DEFAULT_RETRY, + ): """List the rows of the table. See @@ -1616,36 +1689,35 @@ def list_rows(self, table, selected_fields=None, max_results=None, current page: ``iterator.page.num_items``). """ if isinstance(table, str): - table = TableReference.from_string( - table, default_project=self.project) + table = TableReference.from_string(table, default_project=self.project) if selected_fields is not None: schema = selected_fields elif isinstance(table, TableReference): - raise ValueError('need selected_fields with TableReference') + raise ValueError("need selected_fields with TableReference") elif isinstance(table, Table): if len(table.schema) == 0 and table.created is None: raise ValueError(_TABLE_HAS_NO_SCHEMA) schema = table.schema else: - raise TypeError('table should be Table or TableReference') + raise TypeError("table should be Table or TableReference") params = {} if selected_fields is not None: - params['selectedFields'] = ','.join( - field.name for field in selected_fields) + params["selectedFields"] = ",".join(field.name for field in selected_fields) if start_index is not None: - params['startIndex'] = start_index + params["startIndex"] = start_index row_iterator = RowIterator( client=self, api_request=functools.partial(self._call_api, retry), - path='%s/data' % (table.path,), + path="%s/data" % (table.path,), schema=schema, page_token=page_token, max_results=max_results, page_size=page_size, - extra_params=params) + extra_params=params, + ) return row_iterator @@ -1663,6 +1735,8 @@ def _item_to_project(iterator, resource): :returns: The next project in the page. """ return Project.from_api_repr(resource) + + # pylint: enable=unused-argument @@ -1740,18 +1814,20 @@ def _check_mode(stream): :raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute and is not among ``rb``, ``r+b`` or ``rb+``. """ - mode = getattr(stream, 'mode', None) + mode = getattr(stream, "mode", None) if isinstance(stream, gzip.GzipFile): if mode != gzip.READ: raise ValueError( "Cannot upload gzip files opened in write mode: use " - "gzip.GzipFile(filename, mode='rb')") + "gzip.GzipFile(filename, mode='rb')" + ) else: - if mode is not None and mode not in ('rb', 'r+b', 'rb+'): + if mode is not None and mode not in ("rb", "r+b", "rb+"): raise ValueError( "Cannot upload files opened in text mode: use " - "open(filename, mode='rb') or open(filename, mode='r+b')") + "open(filename, mode='rb') or open(filename, mode='r+b')" + ) def _get_upload_headers(user_agent): @@ -1764,8 +1840,8 @@ def _get_upload_headers(user_agent): :returns: The headers to be used for the request. """ return { - 'Accept': 'application/json', - 'Accept-Encoding': 'gzip, deflate', - 'User-Agent': user_agent, - 'content-type': 'application/json', + "Accept": "application/json", + "Accept-Encoding": "gzip, deflate", + "User-Agent": user_agent, + "content-type": "application/json", } diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index 82d9b432b6d2..c4e8e839497c 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -72,23 +72,28 @@ class AccessEntry(object): >>> entry = AccessEntry(None, 'view', view) """ - ENTITY_TYPES = frozenset(['userByEmail', 'groupByEmail', 'domain', - 'specialGroup', 'view']) + ENTITY_TYPES = frozenset( + ["userByEmail", "groupByEmail", "domain", "specialGroup", "view"] + ) """Allowed entity types.""" def __init__(self, role, entity_type, entity_id): if entity_type not in self.ENTITY_TYPES: - message = 'Entity type %r not among: %s' % ( - entity_type, ', '.join(self.ENTITY_TYPES)) + message = "Entity type %r not among: %s" % ( + entity_type, + ", ".join(self.ENTITY_TYPES), + ) raise ValueError(message) - if entity_type == 'view': + if entity_type == "view": if role is not None: - raise ValueError('Role must be None for a view. Received ' - 'role: %r' % (role,)) + raise ValueError( + "Role must be None for a view. Received " "role: %r" % (role,) + ) else: if role is None: - raise ValueError('Role must be set for entity ' - 'type %r' % (entity_type,)) + raise ValueError( + "Role must be set for entity " "type %r" % (entity_type,) + ) self.role = role self.entity_type = entity_type @@ -100,14 +105,18 @@ def __eq__(self, other): return ( self.role == other.role and self.entity_type == other.entity_type - and self.entity_id == other.entity_id) + and self.entity_id == other.entity_id + ) def __ne__(self, other): return not self == other def __repr__(self): - return '' % ( - self.role, self.entity_type, self.entity_id) + return "" % ( + self.role, + self.entity_type, + self.entity_id, + ) def to_api_repr(self): """Construct the API resource representation of this access entry @@ -117,7 +126,7 @@ def to_api_repr(self): """ resource = {self.entity_type: self.entity_id} if self.role is not None: - resource['role'] = self.role + resource["role"] = self.role return resource @classmethod @@ -138,10 +147,10 @@ def from_api_repr(cls, resource): key. """ entry = resource.copy() - role = entry.pop('role', None) + role = entry.pop("role", None) entity_type, entity_id = entry.popitem() if len(entry) != 0: - raise ValueError('Entry has unexpected keys remaining.', entry) + raise ValueError("Entry has unexpected keys remaining.", entry) return cls(role, entity_type, entity_id) @@ -180,7 +189,7 @@ def dataset_id(self): @property def path(self): """str: URL path for the dataset based on project and dataset ID.""" - return '/projects/%s/datasets/%s' % (self.project, self.dataset_id) + return "/projects/%s/datasets/%s" % (self.project, self.dataset_id) def table(self, table_id): """Constructs a TableReference. @@ -206,8 +215,8 @@ def from_api_repr(cls, resource): google.cloud.bigquery.dataset.DatasetReference: Dataset reference parsed from ``resource``. """ - project = resource['projectId'] - dataset_id = resource['datasetId'] + project = resource["projectId"] + dataset_id = resource["datasetId"] return cls(project, dataset_id) @classmethod @@ -238,20 +247,22 @@ def from_string(cls, dataset_id, default_project=None): """ output_dataset_id = dataset_id output_project_id = default_project - parts = dataset_id.split('.') + parts = dataset_id.split(".") if len(parts) == 1 and not default_project: raise ValueError( - 'When default_project is not set, dataset_id must be a ' - 'fully-qualified dataset ID in standard SQL format. ' - 'e.g. "project.dataset_id", got {}'.format(dataset_id)) + "When default_project is not set, dataset_id must be a " + "fully-qualified dataset ID in standard SQL format. " + 'e.g. "project.dataset_id", got {}'.format(dataset_id) + ) elif len(parts) == 2: output_project_id, output_dataset_id = parts elif len(parts) > 2: raise ValueError( - 'Too many parts in dataset_id. Expected a fully-qualified ' - 'dataset ID in standard SQL format. e.g. ' - '"project.dataset_id", got {}'.format(dataset_id)) + "Too many parts in dataset_id. Expected a fully-qualified " + "dataset ID in standard SQL format. e.g. " + '"project.dataset_id", got {}'.format(dataset_id) + ) return cls(output_project_id, output_dataset_id) @@ -261,10 +272,7 @@ def to_api_repr(self): Returns: Dict[str, str]: dataset reference represented as an API resource """ - return { - 'projectId': self._project, - 'datasetId': self._dataset_id, - } + return {"projectId": self._project, "datasetId": self._dataset_id} def _key(self): """A tuple key that uniquely describes this field. @@ -274,10 +282,7 @@ def _key(self): Returns: Tuple[str]: The contents of this :class:`.DatasetReference`. """ - return ( - self._project, - self._dataset_id, - ) + return (self._project, self._dataset_id) def __eq__(self, other): if not isinstance(other, DatasetReference): @@ -291,7 +296,7 @@ def __hash__(self): return hash(self._key()) def __repr__(self): - return 'DatasetReference{}'.format(self._key()) + return "DatasetReference{}".format(self._key()) class Dataset(object): @@ -306,27 +311,24 @@ class Dataset(object): """ _PROPERTY_TO_API_FIELD = { - 'access_entries': 'access', - 'created': 'creationTime', - 'default_table_expiration_ms': 'defaultTableExpirationMs', - 'friendly_name': 'friendlyName', + "access_entries": "access", + "created": "creationTime", + "default_table_expiration_ms": "defaultTableExpirationMs", + "friendly_name": "friendlyName", } def __init__(self, dataset_ref): - self._properties = { - 'datasetReference': dataset_ref.to_api_repr(), - 'labels': {}, - } + self._properties = {"datasetReference": dataset_ref.to_api_repr(), "labels": {}} @property def project(self): """str: Project ID of the project bound to the dataset.""" - return self._properties['datasetReference']['projectId'] + return self._properties["datasetReference"]["projectId"] @property def path(self): """str: URL path for the dataset based on project and dataset ID.""" - return '/projects/%s/datasets/%s' % (self.project, self.dataset_id) + return "/projects/%s/datasets/%s" % (self.project, self.dataset_id) @property def access_entries(self): @@ -342,31 +344,32 @@ def access_entries(self): If any item in the sequence is not an :class:`~google.cloud.bigquery.dataset.AccessEntry`. """ - entries = self._properties.get('access', []) + entries = self._properties.get("access", []) return [AccessEntry.from_api_repr(entry) for entry in entries] @access_entries.setter def access_entries(self, value): if not all(isinstance(field, AccessEntry) for field in value): - raise ValueError('Values must be AccessEntry instances') + raise ValueError("Values must be AccessEntry instances") entries = [entry.to_api_repr() for entry in value] - self._properties['access'] = entries + self._properties["access"] = entries @property def created(self): """Union[datetime.datetime, None]: Datetime at which the dataset was created (:data:`None` until set from the server). """ - creation_time = self._properties.get('creationTime') + creation_time = self._properties.get("creationTime") if creation_time is not None: # creation_time will be in milliseconds. return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(creation_time)) + 1000.0 * float(creation_time) + ) @property def dataset_id(self): """str: Dataset ID.""" - return self._properties['datasetReference']['datasetId'] + return self._properties["datasetReference"]["datasetId"] @property def full_dataset_id(self): @@ -375,7 +378,7 @@ def full_dataset_id(self): In the format ``project_id:dataset_id``. """ - return self._properties.get('id') + return self._properties.get("id") @property def reference(self): @@ -389,25 +392,26 @@ def etag(self): """Union[str, None]: ETag for the dataset resource (:data:`None` until set from the server). """ - return self._properties.get('etag') + return self._properties.get("etag") @property def modified(self): """Union[datetime.datetime, None]: Datetime at which the dataset was last modified (:data:`None` until set from the server). """ - modified_time = self._properties.get('lastModifiedTime') + modified_time = self._properties.get("lastModifiedTime") if modified_time is not None: # modified_time will be in milliseconds. return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(modified_time)) + 1000.0 * float(modified_time) + ) @property def self_link(self): """Union[str, None]: URL for the dataset resource (:data:`None` until set from the server). """ - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def default_table_expiration_ms(self): @@ -417,15 +421,13 @@ def default_table_expiration_ms(self): Raises: ValueError: For invalid value types. """ - return _helpers._int_or_none( - self._properties.get('defaultTableExpirationMs')) + return _helpers._int_or_none(self._properties.get("defaultTableExpirationMs")) @default_table_expiration_ms.setter def default_table_expiration_ms(self, value): if not isinstance(value, six.integer_types) and value is not None: raise ValueError("Pass an integer, or None") - self._properties['defaultTableExpirationMs'] = _helpers._str_or_none( - value) + self._properties["defaultTableExpirationMs"] = _helpers._str_or_none(value) @property def description(self): @@ -435,13 +437,13 @@ def description(self): Raises: ValueError: for invalid value types. """ - return self._properties.get('description') + return self._properties.get("description") @description.setter def description(self, value): if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") - self._properties['description'] = value + self._properties["description"] = value @property def friendly_name(self): @@ -451,13 +453,13 @@ def friendly_name(self): Raises: ValueError: for invalid value types. """ - return self._properties.get('friendlyName') + return self._properties.get("friendlyName") @friendly_name.setter def friendly_name(self, value): if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") - self._properties['friendlyName'] = value + self._properties["friendlyName"] = value @property def location(self): @@ -467,13 +469,13 @@ def location(self): Raises: ValueError: for invalid value types. """ - return self._properties.get('location') + return self._properties.get("location") @location.setter def location(self, value): if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") - self._properties['location'] = value + self._properties["location"] = value @property def labels(self): @@ -487,13 +489,13 @@ def labels(self): Raises: ValueError: for invalid value types. """ - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @labels.setter def labels(self, value): if not isinstance(value, dict): raise ValueError("Pass a dict") - self._properties['labels'] = value + self._properties["labels"] = value @classmethod def from_string(cls, full_dataset_id): @@ -531,12 +533,16 @@ def from_api_repr(cls, resource): google.cloud.bigquery.dataset.Dataset: Dataset parsed from ``resource``. """ - if ('datasetReference' not in resource - or 'datasetId' not in resource['datasetReference']): - raise KeyError('Resource lacks required identity information:' - '["datasetReference"]["datasetId"]') - project_id = resource['datasetReference']['projectId'] - dataset_id = resource['datasetReference']['datasetId'] + if ( + "datasetReference" not in resource + or "datasetId" not in resource["datasetReference"] + ): + raise KeyError( + "Resource lacks required identity information:" + '["datasetReference"]["datasetId"]' + ) + project_id = resource["datasetReference"]["projectId"] + dataset_id = resource["datasetReference"]["datasetId"] dataset = cls(DatasetReference(project_id, dataset_id)) dataset._properties = copy.deepcopy(resource) return dataset @@ -555,7 +561,7 @@ def _build_resource(self, filter_fields): for filter_field in filter_fields: api_field = self._PROPERTY_TO_API_FIELD.get(filter_field) if api_field is None and filter_field not in self._properties: - raise ValueError('No Dataset property %s' % filter_field) + raise ValueError("No Dataset property %s" % filter_field) elif api_field is not None: partial[api_field] = self._properties.get(api_field) else: @@ -578,7 +584,7 @@ def table(self, table_id): return TableReference(self.reference, table_id) def __repr__(self): - return 'Dataset({})'.format(repr(self.reference)) + return "Dataset({})".format(repr(self.reference)) class DatasetListItem(object): @@ -605,25 +611,27 @@ class DatasetListItem(object): """ def __init__(self, resource): - if 'datasetReference' not in resource: - raise ValueError('resource must contain a datasetReference value') - if 'projectId' not in resource['datasetReference']: + if "datasetReference" not in resource: + raise ValueError("resource must contain a datasetReference value") + if "projectId" not in resource["datasetReference"]: raise ValueError( - "resource['datasetReference'] must contain a projectId value") - if 'datasetId' not in resource['datasetReference']: + "resource['datasetReference'] must contain a projectId value" + ) + if "datasetId" not in resource["datasetReference"]: raise ValueError( - "resource['datasetReference'] must contain a datasetId value") + "resource['datasetReference'] must contain a datasetId value" + ) self._properties = resource @property def project(self): """str: Project bound to the dataset.""" - return self._properties['datasetReference']['projectId'] + return self._properties["datasetReference"]["projectId"] @property def dataset_id(self): """str: Dataset ID.""" - return self._properties['datasetReference']['datasetId'] + return self._properties["datasetReference"]["datasetId"] @property def full_dataset_id(self): @@ -632,19 +640,19 @@ def full_dataset_id(self): In the format ``project_id:dataset_id``. """ - return self._properties.get('id') + return self._properties.get("id") @property def friendly_name(self): """Union[str, None]: Title of the dataset as set by the user (defaults to :data:`None`). """ - return self._properties.get('friendlyName') + return self._properties.get("friendlyName") @property def labels(self): """Dict[str, str]: Labels for the dataset.""" - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @property def reference(self): diff --git a/bigquery/google/cloud/bigquery/dbapi/__init__.py b/bigquery/google/cloud/bigquery/dbapi/__init__.py index de34d5553315..d1a723949b10 100644 --- a/bigquery/google/cloud/bigquery/dbapi/__init__.py +++ b/bigquery/google/cloud/bigquery/dbapi/__init__.py @@ -48,18 +48,40 @@ from google.cloud.bigquery.dbapi.types import STRING -apilevel = '2.0' +apilevel = "2.0" # Threads may share the module and connections, but not cursors. threadsafety = 2 -paramstyle = 'pyformat' +paramstyle = "pyformat" __all__ = [ - 'apilevel', 'threadsafety', 'paramstyle', 'connect', 'Connection', - 'Cursor', 'Warning', 'Error', 'InterfaceError', 'DatabaseError', - 'DataError', 'OperationalError', 'IntegrityError', 'InternalError', - 'ProgrammingError', 'NotSupportedError', 'Binary', 'Date', 'DateFromTicks', - 'Time', 'TimeFromTicks', 'Timestamp', 'TimestampFromTicks', 'BINARY', - 'DATETIME', 'NUMBER', 'ROWID', 'STRING', + "apilevel", + "threadsafety", + "paramstyle", + "connect", + "Connection", + "Cursor", + "Warning", + "Error", + "InterfaceError", + "DatabaseError", + "DataError", + "OperationalError", + "IntegrityError", + "InternalError", + "ProgrammingError", + "NotSupportedError", + "Binary", + "Date", + "DateFromTicks", + "Time", + "TimeFromTicks", + "Timestamp", + "TimestampFromTicks", + "BINARY", + "DATETIME", + "NUMBER", + "ROWID", + "STRING", ] diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py index ee9198cbada4..6e7f58bd4944 100644 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -46,27 +46,29 @@ def scalar_to_query_parameter(value, name=None): parameter_type = None if isinstance(value, bool): - parameter_type = 'BOOL' + parameter_type = "BOOL" elif isinstance(value, numbers.Integral): - parameter_type = 'INT64' + parameter_type = "INT64" elif isinstance(value, numbers.Real): - parameter_type = 'FLOAT64' + parameter_type = "FLOAT64" elif isinstance(value, decimal.Decimal): - parameter_type = 'NUMERIC' + parameter_type = "NUMERIC" elif isinstance(value, six.text_type): - parameter_type = 'STRING' + parameter_type = "STRING" elif isinstance(value, six.binary_type): - parameter_type = 'BYTES' + parameter_type = "BYTES" elif isinstance(value, datetime.datetime): - parameter_type = 'DATETIME' if value.tzinfo is None else 'TIMESTAMP' + parameter_type = "DATETIME" if value.tzinfo is None else "TIMESTAMP" elif isinstance(value, datetime.date): - parameter_type = 'DATE' + parameter_type = "DATE" elif isinstance(value, datetime.time): - parameter_type = 'TIME' + parameter_type = "TIME" else: raise exceptions.ProgrammingError( - 'encountered parameter {} with value {} of unexpected type'.format( - name, value)) + "encountered parameter {} with value {} of unexpected type".format( + name, value + ) + ) return bigquery.ScalarQueryParameter(name, parameter_type, value) @@ -93,8 +95,8 @@ def to_query_parameters_dict(parameters): """ return [ scalar_to_query_parameter(value, name=name) - for name, value - in six.iteritems(parameters)] + for name, value in six.iteritems(parameters) + ] def to_query_parameters(parameters): diff --git a/bigquery/google/cloud/bigquery/dbapi/connection.py b/bigquery/google/cloud/bigquery/dbapi/connection.py index 5f962df97412..0dbc9143b255 100644 --- a/bigquery/google/cloud/bigquery/dbapi/connection.py +++ b/bigquery/google/cloud/bigquery/dbapi/connection.py @@ -24,6 +24,7 @@ class Connection(object): :type client: :class:`~google.cloud.bigquery.Client` :param client: A client used to connect to BigQuery. """ + def __init__(self, client): self._client = client diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index e56a343c362d..1fbd9fb10cc4 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -33,11 +33,17 @@ # five are optional and are set to None if no meaningful values can be # provided. Column = collections.namedtuple( - 'Column', + "Column", [ - 'name', 'type_code', 'display_size', 'internal_size', 'precision', - 'scale', 'null_ok', - ]) + "name", + "type_code", + "display_size", + "internal_size", + "precision", + "scale", + "null_ok", + ], +) class Cursor(object): @@ -46,6 +52,7 @@ class Cursor(object): :type connection: :class:`~google.cloud.bigquery.dbapi.Connection` :param connection: A DB-API connection to Google BigQuery. """ + def __init__(self, connection): self.connection = connection self.description = None @@ -72,16 +79,20 @@ def _set_description(self, schema): self.description = None return - self.description = tuple([ - Column( - name=field.name, - type_code=field.field_type, - display_size=None, - internal_size=None, - precision=None, - scale=None, - null_ok=field.is_nullable) - for field in schema]) + self.description = tuple( + [ + Column( + name=field.name, + type_code=field.field_type, + display_size=None, + internal_size=None, + precision=None, + scale=None, + null_ok=field.is_nullable, + ) + for field in schema + ] + ) def _set_rowcount(self, query_results): """Set the rowcount from query results. @@ -97,8 +108,7 @@ def _set_rowcount(self, query_results): total_rows = 0 num_dml_affected_rows = query_results.num_dml_affected_rows - if (query_results.total_rows is not None - and query_results.total_rows > 0): + if query_results.total_rows is not None and query_results.total_rows > 0: total_rows = query_results.total_rows if num_dml_affected_rows is not None and num_dml_affected_rows > 0: total_rows = num_dml_affected_rows @@ -145,15 +155,15 @@ def execute(self, operation, parameters=None, job_id=None): # query parameters was not one of the standard options. Convert both # the query and the parameters to the format expected by the client # libraries. - formatted_operation = _format_operation( - operation, parameters=parameters) + formatted_operation = _format_operation(operation, parameters=parameters) query_parameters = _helpers.to_query_parameters(parameters) config = job.QueryJobConfig() config.query_parameters = query_parameters config.use_legacy_sql = False self._query_job = client.query( - formatted_operation, job_config=config, job_id=job_id) + formatted_operation, job_config=config, job_id=job_id + ) # Wait for the query to finish. try: @@ -184,11 +194,13 @@ def _try_fetch(self, size=None): """ if self._query_job is None: raise exceptions.InterfaceError( - 'No query results: execute() must be called before fetch.') + "No query results: execute() must be called before fetch." + ) is_dml = ( self._query_job.statement_type - and self._query_job.statement_type.upper() != 'SELECT') + and self._query_job.statement_type.upper() != "SELECT" + ) if is_dml: self._query_data = iter([]) return @@ -198,7 +210,7 @@ def _try_fetch(self, size=None): rows_iter = client.list_rows( self._query_job.destination, selected_fields=self._query_job._query_results.schema, - page_size=self.arraysize + page_size=self.arraysize, ) self._query_data = iter(rows_iter) @@ -285,7 +297,7 @@ def _format_operation_list(operation, parameters): if a parameter used in the operation is not found in the ``parameters`` argument. """ - formatted_params = ['?' for _ in parameters] + formatted_params = ["?" for _ in parameters] try: return operation % tuple(formatted_params) @@ -313,8 +325,8 @@ def _format_operation_dict(operation, parameters): """ formatted_params = {} for name in parameters: - escaped_name = name.replace('`', r'\`') - formatted_params[name] = '@`{}`'.format(escaped_name) + escaped_name = name.replace("`", r"\`") + formatted_params[name] = "@`{}`".format(escaped_name) try: return operation % formatted_params diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py index feb3e320bcca..3c8c454a011a 100644 --- a/bigquery/google/cloud/bigquery/dbapi/types.py +++ b/bigquery/google/cloud/bigquery/dbapi/types.py @@ -39,7 +39,7 @@ def Binary(string): :rtype: bytes :returns: The UTF-8 encoded bytes representing the string. """ - return string.encode('utf-8') + return string.encode("utf-8") def TimeFromTicks(ticks, tz=None): @@ -76,9 +76,10 @@ def __eq__(self, other): return other in self.values -STRING = 'STRING' -BINARY = _DBAPITypeObject('BYTES', 'RECORD', 'STRUCT') +STRING = "STRING" +BINARY = _DBAPITypeObject("BYTES", "RECORD", "STRUCT") NUMBER = _DBAPITypeObject( - 'INTEGER', 'INT64', 'FLOAT', 'FLOAT64', 'NUMERIC', 'BOOLEAN', 'BOOL') -DATETIME = _DBAPITypeObject('TIMESTAMP', 'DATE', 'TIME', 'DATETIME') -ROWID = 'ROWID' + "INTEGER", "INT64", "FLOAT", "FLOAT64", "NUMERIC", "BOOLEAN", "BOOL" +) +DATETIME = _DBAPITypeObject("TIMESTAMP", "DATE", "TIME", "DATETIME") +ROWID = "ROWID" diff --git a/bigquery/google/cloud/bigquery/external_config.py b/bigquery/google/cloud/bigquery/external_config.py index 1c7b055e2953..048c2178a654 100644 --- a/bigquery/google/cloud/bigquery/external_config.py +++ b/bigquery/google/cloud/bigquery/external_config.py @@ -37,22 +37,22 @@ class ExternalSourceFormat(object): :class:`~google.cloud.bigquery.job.SourceFormat`). """ - CSV = 'CSV' + CSV = "CSV" """Specifies CSV format.""" - GOOGLE_SHEETS = 'GOOGLE_SHEETS' + GOOGLE_SHEETS = "GOOGLE_SHEETS" """Specifies Google Sheets format.""" - NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" """Specifies newline delimited JSON format.""" - AVRO = 'AVRO' + AVRO = "AVRO" """Specifies Avro format.""" - DATASTORE_BACKUP = 'DATASTORE_BACKUP' + DATASTORE_BACKUP = "DATASTORE_BACKUP" """Specifies datastore backup format""" - BIGTABLE = 'BIGTABLE' + BIGTABLE = "BIGTABLE" """Specifies Bigtable format.""" @@ -70,11 +70,11 @@ def encoding(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.encoding https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.encoding """ - return self._properties.get('encoding') + return self._properties.get("encoding") @encoding.setter def encoding(self, value): - self._properties['encoding'] = value + self._properties["encoding"] = value @property def field_name(self): @@ -85,11 +85,11 @@ def field_name(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.fieldName https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.fieldName """ - return self._properties.get('fieldName') + return self._properties.get("fieldName") @field_name.setter def field_name(self, value): - self._properties['fieldName'] = value + self._properties["fieldName"] = value @property def only_read_latest(self): @@ -100,11 +100,11 @@ def only_read_latest(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.onlyReadLatest https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.onlyReadLatest """ - return self._properties.get('onlyReadLatest') + return self._properties.get("onlyReadLatest") @only_read_latest.setter def only_read_latest(self, value): - self._properties['onlyReadLatest'] = value + self._properties["onlyReadLatest"] = value @property def qualifier_encoded(self): @@ -117,14 +117,14 @@ def qualifier_encoded(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.qualifierEncoded https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.qualifierEncoded """ - prop = self._properties.get('qualifierEncoded') + prop = self._properties.get("qualifierEncoded") if prop is None: return None return base64.standard_b64decode(_to_bytes(prop)) @qualifier_encoded.setter def qualifier_encoded(self, value): - self._properties['qualifierEncoded'] = _bytes_to_json(value) + self._properties["qualifierEncoded"] = _bytes_to_json(value) @property def qualifier_string(self): @@ -134,11 +134,11 @@ def qualifier_string(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.qualifierEncoded https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.qualifierEncoded """ - return self._properties.get('qualifierString') + return self._properties.get("qualifierString") @qualifier_string.setter def qualifier_string(self, value): - self._properties['qualifierString'] = value + self._properties["qualifierString"] = value @property def type_(self): @@ -148,11 +148,11 @@ def type_(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.%28key%29.bigtableOptions.columnFamilies.columns.type https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns.type """ - return self._properties.get('type') + return self._properties.get("type") @type_.setter def type_(self, value): - self._properties['type'] = value + self._properties["type"] = value def to_api_repr(self): """Build an API representation of this object. @@ -197,11 +197,11 @@ def encoding(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies.encoding https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.encoding """ - return self._properties.get('encoding') + return self._properties.get("encoding") @encoding.setter def encoding(self, value): - self._properties['encoding'] = value + self._properties["encoding"] = value @property def family_id(self): @@ -211,11 +211,11 @@ def family_id(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies.familyId https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.familyId """ - return self._properties.get('familyId') + return self._properties.get("familyId") @family_id.setter def family_id(self, value): - self._properties['familyId'] = value + self._properties["familyId"] = value @property def only_read_latest(self): @@ -226,11 +226,11 @@ def only_read_latest(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies.onlyReadLatest https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.onlyReadLatest """ - return self._properties.get('onlyReadLatest') + return self._properties.get("onlyReadLatest") @only_read_latest.setter def only_read_latest(self, value): - self._properties['onlyReadLatest'] = value + self._properties["onlyReadLatest"] = value @property def type_(self): @@ -240,11 +240,11 @@ def type_(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies.type https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.type """ - return self._properties.get('type') + return self._properties.get("type") @type_.setter def type_(self, value): - self._properties['type'] = value + self._properties["type"] = value @property def columns(self): @@ -255,12 +255,12 @@ def columns(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies.columns https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies.columns """ - prop = self._properties.get('columns', []) + prop = self._properties.get("columns", []) return [BigtableColumn.from_api_repr(col) for col in prop] @columns.setter def columns(self, value): - self._properties['columns'] = [col.to_api_repr() for col in value] + self._properties["columns"] = [col.to_api_repr() for col in value] def to_api_repr(self): """Build an API representation of this object. @@ -295,8 +295,8 @@ class BigtableOptions(object): """Options that describe how to treat Bigtable tables as BigQuery tables. """ - _SOURCE_FORMAT = 'BIGTABLE' - _RESOURCE_NAME = 'bigtableOptions' + _SOURCE_FORMAT = "BIGTABLE" + _RESOURCE_NAME = "bigtableOptions" def __init__(self): self._properties = {} @@ -310,11 +310,11 @@ def ignore_unspecified_column_families(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.ignoreUnspecifiedColumnFamilies https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.ignoreUnspecifiedColumnFamilies """ - return self._properties.get('ignoreUnspecifiedColumnFamilies') + return self._properties.get("ignoreUnspecifiedColumnFamilies") @ignore_unspecified_column_families.setter def ignore_unspecified_column_families(self, value): - self._properties['ignoreUnspecifiedColumnFamilies'] = value + self._properties["ignoreUnspecifiedColumnFamilies"] = value @property def read_rowkey_as_string(self): @@ -325,11 +325,11 @@ def read_rowkey_as_string(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.readRowkeyAsString https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.readRowkeyAsString """ - return self._properties.get('readRowkeyAsString') + return self._properties.get("readRowkeyAsString") @read_rowkey_as_string.setter def read_rowkey_as_string(self, value): - self._properties['readRowkeyAsString'] = value + self._properties["readRowkeyAsString"] = value @property def column_families(self): @@ -340,12 +340,12 @@ def column_families(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).bigtableOptions.columnFamilies https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.bigtableOptions.columnFamilies """ - prop = self._properties.get('columnFamilies', []) + prop = self._properties.get("columnFamilies", []) return [BigtableColumnFamily.from_api_repr(cf) for cf in prop] @column_families.setter def column_families(self, value): - self._properties['columnFamilies'] = [cf.to_api_repr() for cf in value] + self._properties["columnFamilies"] = [cf.to_api_repr() for cf in value] def to_api_repr(self): """Build an API representation of this object. @@ -379,8 +379,8 @@ def from_api_repr(cls, resource): class CSVOptions(object): """Options that describe how to treat CSV files as BigQuery tables.""" - _SOURCE_FORMAT = 'CSV' - _RESOURCE_NAME = 'csvOptions' + _SOURCE_FORMAT = "CSV" + _RESOURCE_NAME = "csvOptions" def __init__(self): self._properties = {} @@ -394,11 +394,11 @@ def allow_jagged_rows(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.allowJaggedRows https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.allowJaggedRows """ - return self._properties.get('allowJaggedRows') + return self._properties.get("allowJaggedRows") @allow_jagged_rows.setter def allow_jagged_rows(self, value): - self._properties['allowJaggedRows'] = value + self._properties["allowJaggedRows"] = value @property def allow_quoted_newlines(self): @@ -409,11 +409,11 @@ def allow_quoted_newlines(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.allowQuotedNewlines https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.allowQuotedNewlines """ - return self._properties.get('allowQuotedNewlines') + return self._properties.get("allowQuotedNewlines") @allow_quoted_newlines.setter def allow_quoted_newlines(self, value): - self._properties['allowQuotedNewlines'] = value + self._properties["allowQuotedNewlines"] = value @property def encoding(self): @@ -423,11 +423,11 @@ def encoding(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.encoding https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.encoding """ - return self._properties.get('encoding') + return self._properties.get("encoding") @encoding.setter def encoding(self, value): - self._properties['encoding'] = value + self._properties["encoding"] = value @property def field_delimiter(self): @@ -437,11 +437,11 @@ def field_delimiter(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.fieldDelimiter https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.fieldDelimiter """ - return self._properties.get('fieldDelimiter') + return self._properties.get("fieldDelimiter") @field_delimiter.setter def field_delimiter(self, value): - self._properties['fieldDelimiter'] = value + self._properties["fieldDelimiter"] = value @property def quote_character(self): @@ -451,11 +451,11 @@ def quote_character(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.quote https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.quote """ - return self._properties.get('quote') + return self._properties.get("quote") @quote_character.setter def quote_character(self, value): - self._properties['quote'] = value + self._properties["quote"] = value @property def skip_leading_rows(self): @@ -465,11 +465,11 @@ def skip_leading_rows(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).csvOptions.skipLeadingRows https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.csvOptions.skipLeadingRows """ - return _int_or_none(self._properties.get('skipLeadingRows')) + return _int_or_none(self._properties.get("skipLeadingRows")) @skip_leading_rows.setter def skip_leading_rows(self, value): - self._properties['skipLeadingRows'] = str(value) + self._properties["skipLeadingRows"] = str(value) def to_api_repr(self): """Build an API representation of this object. @@ -503,8 +503,8 @@ def from_api_repr(cls, resource): class GoogleSheetsOptions(object): """Options that describe how to treat Google Sheets as BigQuery tables.""" - _SOURCE_FORMAT = 'GOOGLE_SHEETS' - _RESOURCE_NAME = 'googleSheetsOptions' + _SOURCE_FORMAT = "GOOGLE_SHEETS" + _RESOURCE_NAME = "googleSheetsOptions" def __init__(self): self._properties = {} @@ -518,11 +518,11 @@ def skip_leading_rows(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).googleSheetsOptions.skipLeadingRows https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.googleSheetsOptions.skipLeadingRows """ - return _int_or_none(self._properties.get('skipLeadingRows')) + return _int_or_none(self._properties.get("skipLeadingRows")) @skip_leading_rows.setter def skip_leading_rows(self, value): - self._properties['skipLeadingRows'] = str(value) + self._properties["skipLeadingRows"] = str(value) def to_api_repr(self): """Build an API representation of this object. @@ -565,7 +565,7 @@ class ExternalConfig(object): """ def __init__(self, source_format): - self._properties = {'sourceFormat': source_format} + self._properties = {"sourceFormat": source_format} self._options = None for optcls in _OPTION_CLASSES: if source_format == optcls._SOURCE_FORMAT: @@ -580,7 +580,7 @@ def source_format(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.sourceFormat """ - return self._properties['sourceFormat'] + return self._properties["sourceFormat"] @property def options(self): @@ -596,11 +596,11 @@ def autodetect(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).autodetect https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.autodetect """ - return self._properties.get('autodetect') + return self._properties.get("autodetect") @autodetect.setter def autodetect(self, value): - self._properties['autodetect'] = value + self._properties["autodetect"] = value @property def compression(self): @@ -610,11 +610,11 @@ def compression(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).compression https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.compression """ - return self._properties.get('compression') + return self._properties.get("compression") @compression.setter def compression(self, value): - self._properties['compression'] = value + self._properties["compression"] = value @property def ignore_unknown_values(self): @@ -625,11 +625,11 @@ def ignore_unknown_values(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).ignoreUnknownValues https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.ignoreUnknownValues """ - return self._properties.get('ignoreUnknownValues') + return self._properties.get("ignoreUnknownValues") @ignore_unknown_values.setter def ignore_unknown_values(self, value): - self._properties['ignoreUnknownValues'] = value + self._properties["ignoreUnknownValues"] = value @property def max_bad_records(self): @@ -640,11 +640,11 @@ def max_bad_records(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).maxBadRecords https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.maxBadRecords """ - return self._properties.get('maxBadRecords') + return self._properties.get("maxBadRecords") @max_bad_records.setter def max_bad_records(self, value): - self._properties['maxBadRecords'] = value + self._properties["maxBadRecords"] = value @property def source_uris(self): @@ -654,11 +654,11 @@ def source_uris(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceUris https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.sourceUris """ - return self._properties.get('sourceUris', []) + return self._properties.get("sourceUris", []) @source_uris.setter def source_uris(self, value): - self._properties['sourceUris'] = value + self._properties["sourceUris"] = value @property def schema(self): @@ -669,16 +669,15 @@ def schema(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).schema https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externalDataConfiguration.schema """ - prop = self._properties.get('schema', {}) - return [SchemaField.from_api_repr(field) - for field in prop.get('fields', [])] + prop = self._properties.get("schema", {}) + return [SchemaField.from_api_repr(field) for field in prop.get("fields", [])] @schema.setter def schema(self, value): prop = value if value is not None: - prop = {'fields': [field.to_api_repr() for field in value]} - self._properties['schema'] = prop + prop = {"fields": [field.to_api_repr() for field in value]} + self._properties["schema"] = prop def to_api_repr(self): """Build an API representation of this object. @@ -709,7 +708,7 @@ def from_api_repr(cls, resource): :class:`~.external_config.ExternalConfig`: Configuration parsed from ``resource``. """ - config = cls(resource['sourceFormat']) + config = cls(resource["sourceFormat"]) for optcls in _OPTION_CLASSES: opts = resource.get(optcls._RESOURCE_NAME) if opts is not None: diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index b625dbf51f76..cdb275ed5f83 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -38,29 +38,29 @@ from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery import _helpers -_DONE_STATE = 'DONE' -_STOPPED_REASON = 'stopped' +_DONE_STATE = "DONE" +_STOPPED_REASON = "stopped" _TIMEOUT_BUFFER_SECS = 0.1 _ERROR_REASON_TO_EXCEPTION = { - 'accessDenied': http_client.FORBIDDEN, - 'backendError': http_client.INTERNAL_SERVER_ERROR, - 'billingNotEnabled': http_client.FORBIDDEN, - 'billingTierLimitExceeded': http_client.BAD_REQUEST, - 'blocked': http_client.FORBIDDEN, - 'duplicate': http_client.CONFLICT, - 'internalError': http_client.INTERNAL_SERVER_ERROR, - 'invalid': http_client.BAD_REQUEST, - 'invalidQuery': http_client.BAD_REQUEST, - 'notFound': http_client.NOT_FOUND, - 'notImplemented': http_client.NOT_IMPLEMENTED, - 'quotaExceeded': http_client.FORBIDDEN, - 'rateLimitExceeded': http_client.FORBIDDEN, - 'resourceInUse': http_client.BAD_REQUEST, - 'resourcesExceeded': http_client.BAD_REQUEST, - 'responseTooLarge': http_client.FORBIDDEN, - 'stopped': http_client.OK, - 'tableUnavailable': http_client.BAD_REQUEST, + "accessDenied": http_client.FORBIDDEN, + "backendError": http_client.INTERNAL_SERVER_ERROR, + "billingNotEnabled": http_client.FORBIDDEN, + "billingTierLimitExceeded": http_client.BAD_REQUEST, + "blocked": http_client.FORBIDDEN, + "duplicate": http_client.CONFLICT, + "internalError": http_client.INTERNAL_SERVER_ERROR, + "invalid": http_client.BAD_REQUEST, + "invalidQuery": http_client.BAD_REQUEST, + "notFound": http_client.NOT_FOUND, + "notImplemented": http_client.NOT_IMPLEMENTED, + "quotaExceeded": http_client.FORBIDDEN, + "rateLimitExceeded": http_client.FORBIDDEN, + "resourceInUse": http_client.BAD_REQUEST, + "resourcesExceeded": http_client.BAD_REQUEST, + "responseTooLarge": http_client.FORBIDDEN, + "stopped": http_client.OK, + "tableUnavailable": http_client.BAD_REQUEST, } @@ -79,12 +79,12 @@ def _error_result_to_exception(error_result): :rtype google.cloud.exceptions.GoogleCloudError: :returns: The mapped exception. """ - reason = error_result.get('reason') + reason = error_result.get("reason") status_code = _ERROR_REASON_TO_EXCEPTION.get( reason, http_client.INTERNAL_SERVER_ERROR ) return exceptions.from_http_status( - status_code, error_result.get('message', ''), errors=[error_result] + status_code, error_result.get("message", ""), errors=[error_result] ) @@ -96,16 +96,16 @@ class Compression(object): only supported for Avro. """ - GZIP = 'GZIP' + GZIP = "GZIP" """Specifies GZIP format.""" - DEFLATE = 'DEFLATE' + DEFLATE = "DEFLATE" """Specifies DEFLATE format.""" - SNAPPY = 'SNAPPY' + SNAPPY = "SNAPPY" """Specifies SNAPPY format.""" - NONE = 'NONE' + NONE = "NONE" """Specifies no compression.""" @@ -117,10 +117,10 @@ class CreateDisposition(object): upon job completion. """ - CREATE_IF_NEEDED = 'CREATE_IF_NEEDED' + CREATE_IF_NEEDED = "CREATE_IF_NEEDED" """If the table does not exist, BigQuery creates the table.""" - CREATE_NEVER = 'CREATE_NEVER' + CREATE_NEVER = "CREATE_NEVER" """The table must already exist. If it does not, a 'notFound' error is returned in the job result.""" @@ -131,13 +131,13 @@ class DestinationFormat(object): Tables with nested or repeated fields cannot be exported as CSV. """ - CSV = 'CSV' + CSV = "CSV" """Specifies CSV format.""" - NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" """Specifies newline delimited JSON format.""" - AVRO = 'AVRO' + AVRO = "AVRO" """Specifies Avro format.""" @@ -148,10 +148,10 @@ class Encoding(object): split using the values of the quote and fieldDelimiter properties. """ - UTF_8 = 'UTF-8' + UTF_8 = "UTF-8" """Specifies UTF-8 encoding.""" - ISO_8859_1 = 'ISO-8859-1' + ISO_8859_1 = "ISO-8859-1" """Specifies ISO-8859-1 encoding.""" @@ -160,10 +160,10 @@ class QueryPriority(object): :attr:`INTERACTIVE`. """ - INTERACTIVE = 'INTERACTIVE' + INTERACTIVE = "INTERACTIVE" """Specifies interactive priority.""" - BATCH = 'BATCH' + BATCH = "BATCH" """Specifies batch priority.""" @@ -175,22 +175,22 @@ class SourceFormat(object): :class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`). """ - CSV = 'CSV' + CSV = "CSV" """Specifies CSV format.""" - DATASTORE_BACKUP = 'DATASTORE_BACKUP' + DATASTORE_BACKUP = "DATASTORE_BACKUP" """Specifies datastore backup format""" - NEWLINE_DELIMITED_JSON = 'NEWLINE_DELIMITED_JSON' + NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON" """Specifies newline delimited JSON format.""" - AVRO = 'AVRO' + AVRO = "AVRO" """Specifies Avro format.""" - PARQUET = 'PARQUET' + PARQUET = "PARQUET" """Specifies Parquet format.""" - ORC = 'ORC' + ORC = "ORC" """Specifies Orc format.""" @@ -204,13 +204,13 @@ class WriteDisposition(object): atomic update upon job completion. """ - WRITE_APPEND = 'WRITE_APPEND' + WRITE_APPEND = "WRITE_APPEND" """If the table already exists, BigQuery appends the data to the table.""" - WRITE_TRUNCATE = 'WRITE_TRUNCATE' + WRITE_TRUNCATE = "WRITE_TRUNCATE" """If the table already exists, BigQuery overwrites the table data.""" - WRITE_EMPTY = 'WRITE_EMPTY' + WRITE_EMPTY = "WRITE_EMPTY" """If the table already exists and contains data, a 'duplicate' error is returned in the job result.""" @@ -220,10 +220,10 @@ class SchemaUpdateOption(object): a load job. """ - ALLOW_FIELD_ADDITION = 'ALLOW_FIELD_ADDITION' + ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION" """Allow adding a nullable field to the schema.""" - ALLOW_FIELD_RELAXATION = 'ALLOW_FIELD_RELAXATION' + ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION" """Allow relaxing a required field in the original schema to nullable.""" @@ -237,25 +237,25 @@ class _JobReference(object): """ def __init__(self, job_id, project, location): - self._properties = {'jobId': job_id, 'projectId': project} + self._properties = {"jobId": job_id, "projectId": project} # The location field must not be populated if it is None. if location: - self._properties['location'] = location + self._properties["location"] = location @property def job_id(self): """str: ID of the job.""" - return self._properties.get('jobId') + return self._properties.get("jobId") @property def project(self): """str: ID of the project where the job runs.""" - return self._properties.get('projectId') + return self._properties.get("projectId") @property def location(self): """str: Location where the job runs.""" - return self._properties.get('location') + return self._properties.get("location") def _to_api_repr(self): """Returns the API resource representation of the job reference.""" @@ -264,9 +264,9 @@ def _to_api_repr(self): @classmethod def _from_api_repr(cls, resource): """Returns a job reference for an API resource representation.""" - job_id = resource.get('jobId') - project = resource.get('projectId') - location = resource.get('location') + job_id = resource.get("jobId") + project = resource.get("projectId") + location = resource.get("location") job_ref = cls(job_id, project, location) return job_ref @@ -291,7 +291,7 @@ def __init__(self, job_id, client): job_ref = job_id if not isinstance(job_id, _JobReference): job_ref = _JobReference(job_id, client.project, None) - self._properties = {'jobReference': job_ref._to_api_repr()} + self._properties = {"jobReference": job_ref._to_api_repr()} self._client = client self._result_set = False @@ -300,7 +300,7 @@ def __init__(self, job_id, client): @property def job_id(self): """str: ID of the job.""" - return _helpers._get_sub_prop(self._properties, ['jobReference', 'jobId']) + return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"]) @property def project(self): @@ -309,12 +309,12 @@ def project(self): :rtype: str :returns: the project (derived from the client). """ - return _helpers._get_sub_prop(self._properties, ['jobReference', 'projectId']) + return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"]) @property def location(self): """str: Location where the job runs.""" - return _helpers._get_sub_prop(self._properties, ['jobReference', 'location']) + return _helpers._get_sub_prop(self._properties, ["jobReference", "location"]) def _require_client(self, client): """Check client or verify over-ride. @@ -347,12 +347,12 @@ def path(self): :rtype: str :returns: the path based on project and job ID. """ - return '/projects/%s/jobs/%s' % (self.project, self.job_id) + return "/projects/%s/jobs/%s" % (self.project, self.job_id) @property def labels(self): """Dict[str, str]: Labels for the job.""" - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @property def etag(self): @@ -361,7 +361,7 @@ def etag(self): :rtype: str, or ``NoneType`` :returns: the ETag (None until set from the server). """ - return self._properties.get('etag') + return self._properties.get("etag") @property def self_link(self): @@ -370,7 +370,7 @@ def self_link(self): :rtype: str, or ``NoneType`` :returns: the URL (None until set from the server). """ - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def user_email(self): @@ -379,7 +379,7 @@ def user_email(self): :rtype: str, or ``NoneType`` :returns: the URL (None until set from the server). """ - return self._properties.get('user_email') + return self._properties.get("user_email") @property def created(self): @@ -388,9 +388,9 @@ def created(self): :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the creation time (None until set from the server). """ - statistics = self._properties.get('statistics') + statistics = self._properties.get("statistics") if statistics is not None: - millis = statistics.get('creationTime') + millis = statistics.get("creationTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0) @@ -401,9 +401,9 @@ def started(self): :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the start time (None until set from the server). """ - statistics = self._properties.get('statistics') + statistics = self._properties.get("statistics") if statistics is not None: - millis = statistics.get('startTime') + millis = statistics.get("startTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0) @@ -414,15 +414,15 @@ def ended(self): :rtype: ``datetime.datetime``, or ``NoneType`` :returns: the end time (None until set from the server). """ - statistics = self._properties.get('statistics') + statistics = self._properties.get("statistics") if statistics is not None: - millis = statistics.get('endTime') + millis = statistics.get("endTime") if millis is not None: return _helpers._datetime_from_microseconds(millis * 1000.0) def _job_statistics(self): """Helper for job-type specific statistics-based properties.""" - statistics = self._properties.get('statistics', {}) + statistics = self._properties.get("statistics", {}) return statistics.get(self._JOB_TYPE, {}) @property @@ -432,9 +432,9 @@ def error_result(self): :rtype: mapping, or ``NoneType`` :returns: the error information (None until set from the server). """ - status = self._properties.get('status') + status = self._properties.get("status") if status is not None: - return status.get('errorResult') + return status.get("errorResult") @property def errors(self): @@ -443,9 +443,9 @@ def errors(self): :rtype: list of mappings, or ``NoneType`` :returns: the error information (None until set from the server). """ - status = self._properties.get('status') + status = self._properties.get("status") if status is not None: - return status.get('errors') + return status.get("errors") @property def state(self): @@ -454,9 +454,9 @@ def state(self): :rtype: str, or ``NoneType`` :returns: the state (None until set from the server). """ - status = self._properties.get('status') + status = self._properties.get("status") if status is not None: - return status.get('state') + return status.get("state") def _scrub_local_properties(self, cleaned): """Helper: handle subclass properties in cleaned.""" @@ -475,17 +475,17 @@ def _set_properties(self, api_response): cleaned = api_response.copy() self._scrub_local_properties(cleaned) - statistics = cleaned.get('statistics', {}) - if 'creationTime' in statistics: - statistics['creationTime'] = float(statistics['creationTime']) - if 'startTime' in statistics: - statistics['startTime'] = float(statistics['startTime']) - if 'endTime' in statistics: - statistics['endTime'] = float(statistics['endTime']) + statistics = cleaned.get("statistics", {}) + if "creationTime" in statistics: + statistics["creationTime"] = float(statistics["creationTime"]) + if "startTime" in statistics: + statistics["startTime"] = float(statistics["startTime"]) + if "endTime" in statistics: + statistics["endTime"] = float(statistics["endTime"]) self._properties.clear() self._properties.update(cleaned) - self._copy_configuration_properties(cleaned.get('configuration', {})) + self._copy_configuration_properties(cleaned.get("configuration", {})) # For Future interface self._set_future_result() @@ -503,21 +503,21 @@ def _get_resource_config(cls, resource): :raises: :class:`KeyError` if the resource has no identifier, or is missing the appropriate configuration. """ - if 'jobReference' not in resource or 'jobId' not in resource['jobReference']: + if "jobReference" not in resource or "jobId" not in resource["jobReference"]: raise KeyError( - 'Resource lacks required identity information: ' + "Resource lacks required identity information: " '["jobReference"]["jobId"]' ) - job_id = resource['jobReference']['jobId'] + job_id = resource["jobReference"]["jobId"] if ( - 'configuration' not in resource - or cls._JOB_TYPE not in resource['configuration'] + "configuration" not in resource + or cls._JOB_TYPE not in resource["configuration"] ): raise KeyError( - 'Resource lacks required configuration: ' + "Resource lacks required configuration: " '["configuration"]["%s"]' % cls._JOB_TYPE ) - return job_id, resource['configuration'] + return job_id, resource["configuration"] def to_api_repr(self): """Generate a resource for the job.""" @@ -545,12 +545,12 @@ def _begin(self, client=None, retry=DEFAULT_RETRY): raise ValueError("Job already begun.") client = self._require_client(client) - path = '/projects/%s/jobs' % (self.project,) + path = "/projects/%s/jobs" % (self.project,) # jobs.insert is idempotent because we ensure that every new # job has an ID. api_response = client._call_api( - retry, method='POST', path=path, data=self.to_api_repr() + retry, method="POST", path=path, data=self.to_api_repr() ) self._set_properties(api_response) @@ -573,13 +573,13 @@ def exists(self, client=None, retry=DEFAULT_RETRY): """ client = self._require_client(client) - extra_params = {'fields': 'id'} + extra_params = {"fields": "id"} if self.location: - extra_params['location'] = self.location + extra_params["location"] = self.location try: client._call_api( - retry, method='GET', path=self.path, query_params=extra_params + retry, method="GET", path=self.path, query_params=extra_params ) except NotFound: return False @@ -604,10 +604,10 @@ def reload(self, client=None, retry=DEFAULT_RETRY): extra_params = {} if self.location: - extra_params['location'] = self.location + extra_params["location"] = self.location api_response = client._call_api( - retry, method='GET', path=self.path, query_params=extra_params + retry, method="GET", path=self.path, query_params=extra_params ) self._set_properties(api_response) @@ -629,12 +629,12 @@ def cancel(self, client=None): extra_params = {} if self.location: - extra_params['location'] = self.location + extra_params["location"] = self.location api_response = client._connection.api_request( - method='POST', path='%s/cancel' % (self.path,), query_params=extra_params + method="POST", path="%s/cancel" % (self.path,), query_params=extra_params ) - self._set_properties(api_response['job']) + self._set_properties(api_response["job"]) # The Future interface requires that we return True if the *attempt* # to cancel was successful. return True @@ -711,7 +711,7 @@ def cancelled(self): """ return ( self.error_result is not None - and self.error_result.get('reason') == _STOPPED_REASON + and self.error_result.get("reason") == _STOPPED_REASON ) @@ -739,13 +739,13 @@ def labels(self): Raises: ValueError: If ``value`` type is invalid. """ - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @labels.setter def labels(self, value): if not isinstance(value, dict): raise ValueError("Pass a dict") - self._properties['labels'] = value + self._properties["labels"] = value def _get_sub_prop(self, key, default=None): """Get a value in the ``self._properties[self._job_type]`` dictionary. @@ -845,7 +845,7 @@ def _fill_from_default(self, default_job_config): raise TypeError( "attempted to merge two incompatible job types: " + repr(self._job_type) - + ', ' + + ", " + repr(default_job_config._job_type) ) @@ -887,7 +887,7 @@ class LoadJobConfig(_JobConfig): """ def __init__(self, **kwargs): - super(LoadJobConfig, self).__init__('load', **kwargs) + super(LoadJobConfig, self).__init__("load", **kwargs) @property def allow_jagged_rows(self): @@ -896,11 +896,11 @@ def allow_jagged_rows(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowJaggedRows """ - return self._get_sub_prop('allowJaggedRows') + return self._get_sub_prop("allowJaggedRows") @allow_jagged_rows.setter def allow_jagged_rows(self, value): - self._set_sub_prop('allowJaggedRows', value) + self._set_sub_prop("allowJaggedRows", value) @property def allow_quoted_newlines(self): @@ -909,11 +909,11 @@ def allow_quoted_newlines(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowQuotedNewlines """ - return self._get_sub_prop('allowQuotedNewlines') + return self._get_sub_prop("allowQuotedNewlines") @allow_quoted_newlines.setter def allow_quoted_newlines(self, value): - self._set_sub_prop('allowQuotedNewlines', value) + self._set_sub_prop("allowQuotedNewlines", value) @property def autodetect(self): @@ -922,11 +922,11 @@ def autodetect(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.autodetect """ - return self._get_sub_prop('autodetect') + return self._get_sub_prop("autodetect") @autodetect.setter def autodetect(self, value): - self._set_sub_prop('autodetect', value) + self._set_sub_prop("autodetect", value) @property def clustering_fields(self): @@ -941,9 +941,9 @@ def clustering_fields(self): As of 2018-06-29, clustering fields cannot be set on a table which does not also have time partioning defined. """ - prop = self._get_sub_prop('clustering') + prop = self._get_sub_prop("clustering") if prop is not None: - return list(prop.get('fields', ())) + return list(prop.get("fields", ())) @clustering_fields.setter def clustering_fields(self, value): @@ -952,9 +952,9 @@ def clustering_fields(self, value): (Defaults to :data:`None`). """ if value is not None: - self._set_sub_prop('clustering', {'fields': value}) + self._set_sub_prop("clustering", {"fields": value}) else: - self._del_sub_prop('clustering') + self._del_sub_prop("clustering") @property def create_disposition(self): @@ -964,11 +964,11 @@ def create_disposition(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.createDisposition """ - return self._get_sub_prop('createDisposition') + return self._get_sub_prop("createDisposition") @create_disposition.setter def create_disposition(self, value): - self._set_sub_prop('createDisposition', value) + self._set_sub_prop("createDisposition", value) @property def destination_encryption_configuration(self): @@ -981,7 +981,7 @@ def destination_encryption_configuration(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationEncryptionConfiguration """ - prop = self._get_sub_prop('destinationEncryptionConfiguration') + prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: prop = EncryptionConfiguration.from_api_repr(prop) return prop @@ -991,9 +991,9 @@ def destination_encryption_configuration(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._set_sub_prop('destinationEncryptionConfiguration', api_repr) + self._set_sub_prop("destinationEncryptionConfiguration", api_repr) else: - self._del_sub_prop('destinationEncryptionConfiguration') + self._del_sub_prop("destinationEncryptionConfiguration") @property def destination_table_description(self): @@ -1002,13 +1002,13 @@ def destination_table_description(self): See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description """ - prop = self._get_sub_prop('destinationTableProperties') + prop = self._get_sub_prop("destinationTableProperties") if prop is not None: - return prop['description'] + return prop["description"] @destination_table_description.setter def destination_table_description(self, value): - keys = [self._job_type, 'destinationTableProperties', 'description'] + keys = [self._job_type, "destinationTableProperties", "description"] if value is not None: _helpers._set_sub_prop(self._properties, keys, value) else: @@ -1021,13 +1021,13 @@ def destination_table_friendly_name(self): See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName """ - prop = self._get_sub_prop('destinationTableProperties') + prop = self._get_sub_prop("destinationTableProperties") if prop is not None: - return prop['friendlyName'] + return prop["friendlyName"] @destination_table_friendly_name.setter def destination_table_friendly_name(self, value): - keys = [self._job_type, 'destinationTableProperties', 'friendlyName'] + keys = [self._job_type, "destinationTableProperties", "friendlyName"] if value is not None: _helpers._set_sub_prop(self._properties, keys, value) else: @@ -1041,11 +1041,11 @@ def encoding(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.encoding """ - return self._get_sub_prop('encoding') + return self._get_sub_prop("encoding") @encoding.setter def encoding(self, value): - self._set_sub_prop('encoding', value) + self._set_sub_prop("encoding", value) @property def field_delimiter(self): @@ -1054,11 +1054,11 @@ def field_delimiter(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.fieldDelimiter """ - return self._get_sub_prop('fieldDelimiter') + return self._get_sub_prop("fieldDelimiter") @field_delimiter.setter def field_delimiter(self, value): - self._set_sub_prop('fieldDelimiter', value) + self._set_sub_prop("fieldDelimiter", value) @property def ignore_unknown_values(self): @@ -1067,11 +1067,11 @@ def ignore_unknown_values(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.ignoreUnknownValues """ - return self._get_sub_prop('ignoreUnknownValues') + return self._get_sub_prop("ignoreUnknownValues") @ignore_unknown_values.setter def ignore_unknown_values(self, value): - self._set_sub_prop('ignoreUnknownValues', value) + self._set_sub_prop("ignoreUnknownValues", value) @property def max_bad_records(self): @@ -1080,11 +1080,11 @@ def max_bad_records(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.maxBadRecords """ - return _helpers._int_or_none(self._get_sub_prop('maxBadRecords')) + return _helpers._int_or_none(self._get_sub_prop("maxBadRecords")) @max_bad_records.setter def max_bad_records(self, value): - self._set_sub_prop('maxBadRecords', value) + self._set_sub_prop("maxBadRecords", value) @property def null_marker(self): @@ -1093,11 +1093,11 @@ def null_marker(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.nullMarker """ - return self._get_sub_prop('nullMarker') + return self._get_sub_prop("nullMarker") @null_marker.setter def null_marker(self, value): - self._set_sub_prop('nullMarker', value) + self._set_sub_prop("nullMarker", value) @property def quote_character(self): @@ -1106,11 +1106,11 @@ def quote_character(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.quote """ - return self._get_sub_prop('quote') + return self._get_sub_prop("quote") @quote_character.setter def quote_character(self, value): - self._set_sub_prop('quote', value) + self._set_sub_prop("quote", value) @property def schema(self): @@ -1120,18 +1120,18 @@ def schema(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema """ - schema = _helpers._get_sub_prop(self._properties, ['load', 'schema', 'fields']) + schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"]) if schema is None: return return [SchemaField.from_api_repr(field) for field in schema] @schema.setter def schema(self, value): - if not all(hasattr(field, 'to_api_repr') for field in value): - raise ValueError('Schema items must be fields') + if not all(hasattr(field, "to_api_repr") for field in value): + raise ValueError("Schema items must be fields") _helpers._set_sub_prop( self._properties, - ['load', 'schema', 'fields'], + ["load", "schema", "fields"], [field.to_api_repr() for field in value], ) @@ -1141,11 +1141,11 @@ def schema_update_options(self): updates to the destination table schema to allow as a side effect of the load job. """ - return self._get_sub_prop('schemaUpdateOptions') + return self._get_sub_prop("schemaUpdateOptions") @schema_update_options.setter def schema_update_options(self, values): - self._set_sub_prop('schemaUpdateOptions', values) + self._set_sub_prop("schemaUpdateOptions", values) @property def skip_leading_rows(self): @@ -1154,11 +1154,11 @@ def skip_leading_rows(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.skipLeadingRows """ - return _helpers._int_or_none(self._get_sub_prop('skipLeadingRows')) + return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows")) @skip_leading_rows.setter def skip_leading_rows(self, value): - self._set_sub_prop('skipLeadingRows', str(value)) + self._set_sub_prop("skipLeadingRows", str(value)) @property def source_format(self): @@ -1167,18 +1167,18 @@ def source_format(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceFormat """ - return self._get_sub_prop('sourceFormat') + return self._get_sub_prop("sourceFormat") @source_format.setter def source_format(self, value): - self._set_sub_prop('sourceFormat', value) + self._set_sub_prop("sourceFormat", value) @property def time_partitioning(self): """google.cloud.bigquery.table.TimePartitioning: Specifies time-based partitioning for the destination table. """ - prop = self._get_sub_prop('timePartitioning') + prop = self._get_sub_prop("timePartitioning") if prop is not None: prop = TimePartitioning.from_api_repr(prop) return prop @@ -1188,9 +1188,9 @@ def time_partitioning(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._set_sub_prop('timePartitioning', api_repr) + self._set_sub_prop("timePartitioning", api_repr) else: - self._del_sub_prop('timePartitioning') + self._del_sub_prop("timePartitioning") @property def write_disposition(self): @@ -1200,11 +1200,11 @@ def write_disposition(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.writeDisposition """ - return self._get_sub_prop('writeDisposition') + return self._get_sub_prop("writeDisposition") @write_disposition.setter def write_disposition(self, value): - self._set_sub_prop('writeDisposition', value) + self._set_sub_prop("writeDisposition", value) class LoadJob(_AsyncJob): @@ -1229,7 +1229,7 @@ class LoadJob(_AsyncJob): for the dataset (which requires a project). """ - _JOB_TYPE = 'load' + _JOB_TYPE = "load" def __init__(self, job_id, source_uris, destination, client, job_config=None): super(LoadJob, self).__init__(job_id, client) @@ -1383,7 +1383,7 @@ def input_file_bytes(self): """ return _helpers._int_or_none( _helpers._get_sub_prop( - self._properties, ['statistics', 'load', 'inputFileBytes'] + self._properties, ["statistics", "load", "inputFileBytes"] ) ) @@ -1396,7 +1396,7 @@ def input_files(self): """ return _helpers._int_or_none( _helpers._get_sub_prop( - self._properties, ['statistics', 'load', 'inputFiles'] + self._properties, ["statistics", "load", "inputFiles"] ) ) @@ -1409,7 +1409,7 @@ def output_bytes(self): """ return _helpers._int_or_none( _helpers._get_sub_prop( - self._properties, ['statistics', 'load', 'outputBytes'] + self._properties, ["statistics", "load", "outputBytes"] ) ) @@ -1422,7 +1422,7 @@ def output_rows(self): """ return _helpers._int_or_none( _helpers._get_sub_prop( - self._properties, ['statistics', 'load', 'outputRows'] + self._properties, ["statistics", "load", "outputRows"] ) ) @@ -1431,15 +1431,15 @@ def to_api_repr(self): configuration = self._configuration.to_api_repr() if self.source_uris is not None: _helpers._set_sub_prop( - configuration, ['load', 'sourceUris'], self.source_uris + configuration, ["load", "sourceUris"], self.source_uris ) _helpers._set_sub_prop( - configuration, ['load', 'destinationTable'], self.destination.to_api_repr() + configuration, ["load", "destinationTable"], self.destination.to_api_repr() ) return { - 'jobReference': self._properties['jobReference'], - 'configuration': configuration, + "jobReference": self._properties["jobReference"], + "configuration": configuration, } def _copy_configuration_properties(self, configuration): @@ -1465,15 +1465,15 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.bigquery.job.LoadJob` :returns: Job parsed from ``resource``. """ - config_resource = resource.get('configuration', {}) + config_resource = resource.get("configuration", {}) config = LoadJobConfig.from_api_repr(config_resource) # A load job requires a destination table. - dest_config = config_resource['load']['destinationTable'] - ds_ref = DatasetReference(dest_config['projectId'], dest_config['datasetId']) - destination = TableReference(ds_ref, dest_config['tableId']) + dest_config = config_resource["load"]["destinationTable"] + ds_ref = DatasetReference(dest_config["projectId"], dest_config["datasetId"]) + destination = TableReference(ds_ref, dest_config["tableId"]) # sourceUris will be absent if this is a file upload. - source_uris = _helpers._get_sub_prop(config_resource, ['load', 'sourceUris']) - job_ref = _JobReference._from_api_repr(resource['jobReference']) + source_uris = _helpers._get_sub_prop(config_resource, ["load", "sourceUris"]) + job_ref = _JobReference._from_api_repr(resource["jobReference"]) job = cls(job_ref, source_uris, destination, client, config) job._set_properties(resource) return job @@ -1488,7 +1488,7 @@ class CopyJobConfig(_JobConfig): """ def __init__(self, **kwargs): - super(CopyJobConfig, self).__init__('copy', **kwargs) + super(CopyJobConfig, self).__init__("copy", **kwargs) @property def create_disposition(self): @@ -1498,11 +1498,11 @@ def create_disposition(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.createDisposition """ - return self._get_sub_prop('createDisposition') + return self._get_sub_prop("createDisposition") @create_disposition.setter def create_disposition(self, value): - self._set_sub_prop('createDisposition', value) + self._set_sub_prop("createDisposition", value) @property def write_disposition(self): @@ -1512,11 +1512,11 @@ def write_disposition(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.writeDisposition """ - return self._get_sub_prop('writeDisposition') + return self._get_sub_prop("writeDisposition") @write_disposition.setter def write_disposition(self, value): - self._set_sub_prop('writeDisposition', value) + self._set_sub_prop("writeDisposition", value) @property def destination_encryption_configuration(self): @@ -1529,7 +1529,7 @@ def destination_encryption_configuration(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.destinationEncryptionConfiguration """ - prop = self._get_sub_prop('destinationEncryptionConfiguration') + prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: prop = EncryptionConfiguration.from_api_repr(prop) return prop @@ -1539,7 +1539,7 @@ def destination_encryption_configuration(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._set_sub_prop('destinationEncryptionConfiguration', api_repr) + self._set_sub_prop("destinationEncryptionConfiguration", api_repr) class CopyJob(_AsyncJob): @@ -1563,7 +1563,7 @@ class CopyJob(_AsyncJob): (Optional) Extra configuration options for the copy job. """ - _JOB_TYPE = 'copy' + _JOB_TYPE = "copy" def __init__(self, job_id, sources, destination, client, job_config=None): super(CopyJob, self).__init__(job_id, client) @@ -1607,28 +1607,28 @@ def to_api_repr(self): source_refs = [ { - 'projectId': table.project, - 'datasetId': table.dataset_id, - 'tableId': table.table_id, + "projectId": table.project, + "datasetId": table.dataset_id, + "tableId": table.table_id, } for table in self.sources ] configuration = self._configuration.to_api_repr() - _helpers._set_sub_prop(configuration, ['copy', 'sourceTables'], source_refs) + _helpers._set_sub_prop(configuration, ["copy", "sourceTables"], source_refs) _helpers._set_sub_prop( configuration, - ['copy', 'destinationTable'], + ["copy", "destinationTable"], { - 'projectId': self.destination.project, - 'datasetId': self.destination.dataset_id, - 'tableId': self.destination.table_id, + "projectId": self.destination.project, + "datasetId": self.destination.dataset_id, + "tableId": self.destination.table_id, }, ) return { - 'jobReference': self._properties['jobReference'], - 'configuration': configuration, + "jobReference": self._properties["jobReference"], + "configuration": configuration, } def _copy_configuration_properties(self, configuration): @@ -1657,12 +1657,12 @@ def from_api_repr(cls, resource, client): job_id, config_resource = cls._get_resource_config(resource) config = CopyJobConfig.from_api_repr(config_resource) # Copy required fields to the job. - copy_resource = config_resource['copy'] - destination = TableReference.from_api_repr(copy_resource['destinationTable']) + copy_resource = config_resource["copy"] + destination = TableReference.from_api_repr(copy_resource["destinationTable"]) sources = [] - source_configs = copy_resource.get('sourceTables') + source_configs = copy_resource.get("sourceTables") if source_configs is None: - single = copy_resource.get('sourceTable') + single = copy_resource.get("sourceTable") if single is None: raise KeyError("Resource missing 'sourceTables' / 'sourceTable'") source_configs = [single] @@ -1683,7 +1683,7 @@ class ExtractJobConfig(_JobConfig): """ def __init__(self, **kwargs): - super(ExtractJobConfig, self).__init__('extract', **kwargs) + super(ExtractJobConfig, self).__init__("extract", **kwargs) @property def compression(self): @@ -1693,11 +1693,11 @@ def compression(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.compression """ - return self._get_sub_prop('compression') + return self._get_sub_prop("compression") @compression.setter def compression(self, value): - self._set_sub_prop('compression', value) + self._set_sub_prop("compression", value) @property def destination_format(self): @@ -1706,11 +1706,11 @@ def destination_format(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.destinationFormat """ - return self._get_sub_prop('destinationFormat') + return self._get_sub_prop("destinationFormat") @destination_format.setter def destination_format(self, value): - self._set_sub_prop('destinationFormat', value) + self._set_sub_prop("destinationFormat", value) @property def field_delimiter(self): @@ -1719,11 +1719,11 @@ def field_delimiter(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.fieldDelimiter """ - return self._get_sub_prop('fieldDelimiter') + return self._get_sub_prop("fieldDelimiter") @field_delimiter.setter def field_delimiter(self, value): - self._set_sub_prop('fieldDelimiter', value) + self._set_sub_prop("fieldDelimiter", value) @property def print_header(self): @@ -1732,11 +1732,11 @@ def print_header(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.printHeader """ - return self._get_sub_prop('printHeader') + return self._get_sub_prop("printHeader") @print_header.setter def print_header(self, value): - self._set_sub_prop('printHeader', value) + self._set_sub_prop("printHeader", value) class ExtractJob(_AsyncJob): @@ -1762,7 +1762,7 @@ class ExtractJob(_AsyncJob): (Optional) Extra configuration options for the extract job. """ - _JOB_TYPE = 'extract' + _JOB_TYPE = "extract" def __init__(self, job_id, source, destination_uris, client, job_config=None): super(ExtractJob, self).__init__(job_id, client) @@ -1816,7 +1816,7 @@ def destination_uri_file_counts(self): specified in the 'destinationUris' field. Returns None if job is not yet complete. """ - counts = self._job_statistics().get('destinationUriFileCounts') + counts = self._job_statistics().get("destinationUriFileCounts") if counts is not None: return [int(count) for count in counts] return None @@ -1825,20 +1825,20 @@ def to_api_repr(self): """Generate a resource for :meth:`_begin`.""" source_ref = { - 'projectId': self.source.project, - 'datasetId': self.source.dataset_id, - 'tableId': self.source.table_id, + "projectId": self.source.project, + "datasetId": self.source.dataset_id, + "tableId": self.source.table_id, } configuration = self._configuration.to_api_repr() - _helpers._set_sub_prop(configuration, ['extract', 'sourceTable'], source_ref) + _helpers._set_sub_prop(configuration, ["extract", "sourceTable"], source_ref) _helpers._set_sub_prop( - configuration, ['extract', 'destinationUris'], self.destination_uris + configuration, ["extract", "destinationUris"], self.destination_uris ) return { - 'jobReference': self._properties['jobReference'], - 'configuration': configuration, + "jobReference": self._properties["jobReference"], + "configuration": configuration, } def _copy_configuration_properties(self, configuration): @@ -1867,14 +1867,14 @@ def from_api_repr(cls, resource, client): job_id, config_resource = cls._get_resource_config(resource) config = ExtractJobConfig.from_api_repr(config_resource) source_config = _helpers._get_sub_prop( - config_resource, ['extract', 'sourceTable'] + config_resource, ["extract", "sourceTable"] ) dataset = DatasetReference( - source_config['projectId'], source_config['datasetId'] + source_config["projectId"], source_config["datasetId"] ) - source = dataset.table(source_config['tableId']) + source = dataset.table(source_config["tableId"]) destination_uris = _helpers._get_sub_prop( - config_resource, ['extract', 'destinationUris'] + config_resource, ["extract", "destinationUris"] ) job = cls(job_id, source, destination_uris, client=client, job_config=config) @@ -1919,7 +1919,7 @@ class QueryJobConfig(_JobConfig): """ def __init__(self, **kwargs): - super(QueryJobConfig, self).__init__('query', **kwargs) + super(QueryJobConfig, self).__init__("query", **kwargs) @property def destination_encryption_configuration(self): @@ -1932,7 +1932,7 @@ def destination_encryption_configuration(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationEncryptionConfiguration """ - prop = self._get_sub_prop('destinationEncryptionConfiguration') + prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: prop = EncryptionConfiguration.from_api_repr(prop) return prop @@ -1942,7 +1942,7 @@ def destination_encryption_configuration(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._set_sub_prop('destinationEncryptionConfiguration', api_repr) + self._set_sub_prop("destinationEncryptionConfiguration", api_repr) @property def allow_large_results(self): @@ -1951,11 +1951,11 @@ def allow_large_results(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.allowLargeResults """ - return self._get_sub_prop('allowLargeResults') + return self._get_sub_prop("allowLargeResults") @allow_large_results.setter def allow_large_results(self, value): - self._set_sub_prop('allowLargeResults', value) + self._set_sub_prop("allowLargeResults", value) @property def create_disposition(self): @@ -1965,11 +1965,11 @@ def create_disposition(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.createDisposition """ - return self._get_sub_prop('createDisposition') + return self._get_sub_prop("createDisposition") @create_disposition.setter def create_disposition(self, value): - self._set_sub_prop('createDisposition', value) + self._set_sub_prop("createDisposition", value) @property def default_dataset(self): @@ -1980,7 +1980,7 @@ def default_dataset(self): See https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.defaultDataset """ - prop = self._get_sub_prop('defaultDataset') + prop = self._get_sub_prop("defaultDataset") if prop is not None: prop = DatasetReference.from_api_repr(prop) return prop @@ -1990,7 +1990,7 @@ def default_dataset(self, value): resource = None if value is not None: resource = value.to_api_repr() - self._set_sub_prop('defaultDataset', resource) + self._set_sub_prop("defaultDataset", resource) @property def destination(self): @@ -2000,7 +2000,7 @@ def destination(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationTable """ - prop = self._get_sub_prop('destinationTable') + prop = self._get_sub_prop("destinationTable") if prop is not None: prop = TableReference.from_api_repr(prop) return prop @@ -2010,7 +2010,7 @@ def destination(self, value): resource = None if value is not None: resource = value.to_api_repr() - self._set_sub_prop('destinationTable', resource) + self._set_sub_prop("destinationTable", resource) @property def dry_run(self): @@ -2020,11 +2020,11 @@ def dry_run(self): See https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.dryRun """ - return self._properties.get('dryRun') + return self._properties.get("dryRun") @dry_run.setter def dry_run(self, value): - self._properties['dryRun'] = value + self._properties["dryRun"] = value @property def flatten_results(self): @@ -2033,11 +2033,11 @@ def flatten_results(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.flattenResults """ - return self._get_sub_prop('flattenResults') + return self._get_sub_prop("flattenResults") @flatten_results.setter def flatten_results(self, value): - self._set_sub_prop('flattenResults', value) + self._set_sub_prop("flattenResults", value) @property def maximum_billing_tier(self): @@ -2047,11 +2047,11 @@ def maximum_billing_tier(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBillingTier """ - return self._get_sub_prop('maximumBillingTier') + return self._get_sub_prop("maximumBillingTier") @maximum_billing_tier.setter def maximum_billing_tier(self, value): - self._set_sub_prop('maximumBillingTier', value) + self._set_sub_prop("maximumBillingTier", value) @property def maximum_bytes_billed(self): @@ -2060,11 +2060,11 @@ def maximum_bytes_billed(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBytesBilled """ - return _helpers._int_or_none(self._get_sub_prop('maximumBytesBilled')) + return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled")) @maximum_bytes_billed.setter def maximum_bytes_billed(self, value): - self._set_sub_prop('maximumBytesBilled', str(value)) + self._set_sub_prop("maximumBytesBilled", str(value)) @property def priority(self): @@ -2073,11 +2073,11 @@ def priority(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.priority """ - return self._get_sub_prop('priority') + return self._get_sub_prop("priority") @priority.setter def priority(self, value): - self._set_sub_prop('priority', value) + self._set_sub_prop("priority", value) @property def query_parameters(self): @@ -2089,12 +2089,12 @@ def query_parameters(self): See: https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.queryParameters """ - prop = self._get_sub_prop('queryParameters', default=[]) + prop = self._get_sub_prop("queryParameters", default=[]) return _from_api_repr_query_parameters(prop) @query_parameters.setter def query_parameters(self, values): - self._set_sub_prop('queryParameters', _to_api_repr_query_parameters(values)) + self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values)) @property def udf_resources(self): @@ -2104,13 +2104,13 @@ def udf_resources(self): See: https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.userDefinedFunctionResources """ - prop = self._get_sub_prop('userDefinedFunctionResources', default=[]) + prop = self._get_sub_prop("userDefinedFunctionResources", default=[]) return _from_api_repr_udf_resources(prop) @udf_resources.setter def udf_resources(self, values): self._set_sub_prop( - 'userDefinedFunctionResources', _to_api_repr_udf_resources(values) + "userDefinedFunctionResources", _to_api_repr_udf_resources(values) ) @property @@ -2120,11 +2120,11 @@ def use_legacy_sql(self): See https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.useLegacySql """ - return self._get_sub_prop('useLegacySql') + return self._get_sub_prop("useLegacySql") @use_legacy_sql.setter def use_legacy_sql(self, value): - self._set_sub_prop('useLegacySql', value) + self._set_sub_prop("useLegacySql", value) @property def use_query_cache(self): @@ -2133,11 +2133,11 @@ def use_query_cache(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.useQueryCache """ - return self._get_sub_prop('useQueryCache') + return self._get_sub_prop("useQueryCache") @use_query_cache.setter def use_query_cache(self, value): - self._set_sub_prop('useQueryCache', value) + self._set_sub_prop("useQueryCache", value) @property def write_disposition(self): @@ -2147,11 +2147,11 @@ def write_disposition(self): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.writeDisposition """ - return self._get_sub_prop('writeDisposition') + return self._get_sub_prop("writeDisposition") @write_disposition.setter def write_disposition(self, value): - self._set_sub_prop('writeDisposition', value) + self._set_sub_prop("writeDisposition", value) @property def table_definitions(self): @@ -2161,21 +2161,21 @@ def table_definitions(self): See https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions """ - prop = self._get_sub_prop('tableDefinitions') + prop = self._get_sub_prop("tableDefinitions") if prop is not None: prop = _from_api_repr_table_defs(prop) return prop @table_definitions.setter def table_definitions(self, values): - self._set_sub_prop('tableDefinitions', _to_api_repr_table_defs(values)) + self._set_sub_prop("tableDefinitions", _to_api_repr_table_defs(values)) @property def time_partitioning(self): """google.cloud.bigquery.table.TimePartitioning: Specifies time-based partitioning for the destination table. """ - prop = self._get_sub_prop('timePartitioning') + prop = self._get_sub_prop("timePartitioning") if prop is not None: prop = TimePartitioning.from_api_repr(prop) return prop @@ -2185,7 +2185,7 @@ def time_partitioning(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._set_sub_prop('timePartitioning', api_repr) + self._set_sub_prop("timePartitioning", api_repr) @property def clustering_fields(self): @@ -2200,9 +2200,9 @@ def clustering_fields(self): As of 2018-06-29, clustering fields cannot be set on a table which does not also have time partioning defined. """ - prop = self._get_sub_prop('clustering') + prop = self._get_sub_prop("clustering") if prop is not None: - return list(prop.get('fields', ())) + return list(prop.get("fields", ())) @clustering_fields.setter def clustering_fields(self, value): @@ -2211,9 +2211,9 @@ def clustering_fields(self, value): (Defaults to :data:`None`). """ if value is not None: - self._set_sub_prop('clustering', {'fields': value}) + self._set_sub_prop("clustering", {"fields": value}) else: - self._del_sub_prop('clustering') + self._del_sub_prop("clustering") @property def schema_update_options(self): @@ -2221,11 +2221,11 @@ def schema_update_options(self): updates to the destination table schema to allow as a side effect of the query job. """ - return self._get_sub_prop('schemaUpdateOptions') + return self._get_sub_prop("schemaUpdateOptions") @schema_update_options.setter def schema_update_options(self, values): - self._set_sub_prop('schemaUpdateOptions', values) + self._set_sub_prop("schemaUpdateOptions", values) def to_api_repr(self): """Build an API representation of the query job config. @@ -2237,12 +2237,12 @@ def to_api_repr(self): # Query parameters have an addition property associated with them # to indicate if the query is using named or positional parameters. - query_parameters = resource['query'].get('queryParameters') + query_parameters = resource["query"].get("queryParameters") if query_parameters: - if query_parameters[0].get('name') is None: - resource['query']['parameterMode'] = 'POSITIONAL' + if query_parameters[0].get("name") is None: + resource["query"]["parameterMode"] = "POSITIONAL" else: - resource['query']['parameterMode'] = 'NAMED' + resource["query"]["parameterMode"] = "NAMED" return resource @@ -2265,8 +2265,8 @@ class QueryJob(_AsyncJob): (Optional) Extra configuration options for the query job. """ - _JOB_TYPE = 'query' - _UDF_KEY = 'userDefinedFunctionResources' + _JOB_TYPE = "query" + _UDF_KEY = "userDefinedFunctionResources" def __init__(self, job_id, query, client, job_config=None): super(QueryJob, self).__init__(job_id, client) @@ -2425,17 +2425,17 @@ def to_api_repr(self): configuration = self._configuration.to_api_repr() resource = { - 'jobReference': self._properties['jobReference'], - 'configuration': configuration, + "jobReference": self._properties["jobReference"], + "configuration": configuration, } - configuration['query']['query'] = self.query + configuration["query"]["query"] = self.query return resource def _copy_configuration_properties(self, configuration): """Helper: assign subclass configuration properties in cleaned.""" self._configuration._properties = copy.deepcopy(configuration) - self.query = _helpers._get_sub_prop(configuration, ['query', 'query']) + self.query = _helpers._get_sub_prop(configuration, ["query", "query"]) @classmethod def from_api_repr(cls, resource, client): @@ -2452,7 +2452,7 @@ def from_api_repr(cls, resource, client): :returns: Job parsed from ``resource``. """ job_id, config = cls._get_resource_config(resource) - query = config['query']['query'] + query = config["query"]["query"] job = cls(job_id, query, client=client) job._set_properties(resource) return job @@ -2468,7 +2468,7 @@ def query_plan(self): :returns: mappings describing the query plan, or an empty list if the query has not yet completed. """ - plan_entries = self._job_statistics().get('queryPlan', ()) + plan_entries = self._job_statistics().get("queryPlan", ()) return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries] @property @@ -2476,7 +2476,7 @@ def timeline(self): """List(TimelineEntry): Return the query execution timeline from job statistics. """ - raw = self._job_statistics().get('timeline', ()) + raw = self._job_statistics().get("timeline", ()) return [TimelineEntry.from_api_repr(entry) for entry in raw] @property @@ -2490,7 +2490,7 @@ def total_bytes_processed(self): :returns: total bytes processed by the job, or None if job is not yet complete. """ - result = self._job_statistics().get('totalBytesProcessed') + result = self._job_statistics().get("totalBytesProcessed") if result is not None: result = int(result) return result @@ -2506,7 +2506,7 @@ def total_bytes_billed(self): :returns: total bytes processed by the job, or None if job is not yet complete. """ - result = self._job_statistics().get('totalBytesBilled') + result = self._job_statistics().get("totalBytesBilled") if result is not None: result = int(result) return result @@ -2522,7 +2522,7 @@ def billing_tier(self): :returns: billing tier used by the job, or None if job is not yet complete. """ - return self._job_statistics().get('billingTier') + return self._job_statistics().get("billingTier") @property def cache_hit(self): @@ -2535,7 +2535,7 @@ def cache_hit(self): :returns: whether the query results were returned from cache, or None if job is not yet complete. """ - return self._job_statistics().get('cacheHit') + return self._job_statistics().get("cacheHit") @property def ddl_operation_performed(self): @@ -2545,7 +2545,7 @@ def ddl_operation_performed(self): https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlOperationPerformed """ - return self._job_statistics().get('ddlOperationPerformed') + return self._job_statistics().get("ddlOperationPerformed") @property def ddl_target_table(self): @@ -2555,7 +2555,7 @@ def ddl_target_table(self): See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable """ - prop = self._job_statistics().get('ddlTargetTable') + prop = self._job_statistics().get("ddlTargetTable") if prop is not None: prop = TableReference.from_api_repr(prop) return prop @@ -2571,7 +2571,7 @@ def num_dml_affected_rows(self): :returns: number of DML rows affected by the job, or None if job is not yet complete. """ - result = self._job_statistics().get('numDmlAffectedRows') + result = self._job_statistics().get("numDmlAffectedRows") if result is not None: result = int(result) return result @@ -2579,7 +2579,7 @@ def num_dml_affected_rows(self): @property def slot_millis(self): """Union[int, None]: Slot-milliseconds used by this query job.""" - return _helpers._int_or_none(self._job_statistics().get('totalSlotMs')) + return _helpers._int_or_none(self._job_statistics().get("totalSlotMs")) @property def statement_type(self): @@ -2592,7 +2592,7 @@ def statement_type(self): :returns: type of statement used by the job, or None if job is not yet complete. """ - return self._job_statistics().get('statementType') + return self._job_statistics().get("statementType") @property def referenced_tables(self): @@ -2608,17 +2608,17 @@ def referenced_tables(self): tables = [] datasets_by_project_name = {} - for table in self._job_statistics().get('referencedTables', ()): + for table in self._job_statistics().get("referencedTables", ()): - t_project = table['projectId'] + t_project = table["projectId"] - ds_id = table['datasetId'] + ds_id = table["datasetId"] t_dataset = datasets_by_project_name.get((t_project, ds_id)) if t_dataset is None: t_dataset = DatasetReference(t_project, ds_id) datasets_by_project_name[(t_project, ds_id)] = t_dataset - t_name = table['tableId'] + t_name = table["tableId"] tables.append(t_dataset.table(t_name)) return tables @@ -2639,14 +2639,14 @@ def undeclared_query_parameters(self): not yet completed. """ parameters = [] - undeclared = self._job_statistics().get('undeclaredQueryParameters', ()) + undeclared = self._job_statistics().get("undeclaredQueryParameters", ()) for parameter in undeclared: - p_type = parameter['parameterType'] + p_type = parameter["parameterType"] - if 'arrayType' in p_type: + if "arrayType" in p_type: klass = ArrayQueryParameter - elif 'structTypes' in p_type: + elif "structTypes" in p_type: klass = StructQueryParameter else: klass = ScalarQueryParameter @@ -2666,7 +2666,7 @@ def estimated_bytes_processed(self): :returns: number of DML rows affected by the job, or None if job is not yet complete. """ - result = self._job_statistics().get('estimatedBytesProcessed') + result = self._job_statistics().get("estimatedBytesProcessed") if result is not None: result = int(result) return result @@ -2797,7 +2797,7 @@ def from_api_repr(cls, resource): :rtype: :class:`QueryPlanEntryStep` :return: new instance built from the resource """ - return cls(kind=resource.get('kind'), substeps=resource.get('substeps', ())) + return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ())) def __eq__(self, other): if not isinstance(other, self.__class__): @@ -2836,39 +2836,39 @@ def from_api_repr(cls, resource): @property def name(self): """Union[str, None]: Human-readable name of the stage.""" - return self._properties.get('name') + return self._properties.get("name") @property def entry_id(self): """Union[str, None]: Unique ID for the stage within the plan.""" - return self._properties.get('id') + return self._properties.get("id") @property def start(self): """Union[Datetime, None]: Datetime when the stage started.""" - if self._properties.get('startMs') is None: + if self._properties.get("startMs") is None: return None return _helpers._datetime_from_microseconds( - int(self._properties.get('startMs')) * 1000.0 + int(self._properties.get("startMs")) * 1000.0 ) @property def end(self): """Union[Datetime, None]: Datetime when the stage ended.""" - if self._properties.get('endMs') is None: + if self._properties.get("endMs") is None: return None return _helpers._datetime_from_microseconds( - int(self._properties.get('endMs')) * 1000.0 + int(self._properties.get("endMs")) * 1000.0 ) @property def input_stages(self): """List(int): Entry IDs for stages that were inputs for this stage.""" - if self._properties.get('inputStages') is None: + if self._properties.get("inputStages") is None: return [] return [ _helpers._int_or_none(entry) - for entry in self._properties.get('inputStages') + for entry in self._properties.get("inputStages") ] @property @@ -2876,26 +2876,26 @@ def parallel_inputs(self): """Union[int, None]: Number of parallel input segments within the stage. """ - return _helpers._int_or_none(self._properties.get('parallelInputs')) + return _helpers._int_or_none(self._properties.get("parallelInputs")) @property def completed_parallel_inputs(self): """Union[int, None]: Number of parallel input segments completed.""" - return _helpers._int_or_none(self._properties.get('completedParallelInputs')) + return _helpers._int_or_none(self._properties.get("completedParallelInputs")) @property def wait_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent waiting to be scheduled. """ - return _helpers._int_or_none(self._properties.get('waitMsAvg')) + return _helpers._int_or_none(self._properties.get("waitMsAvg")) @property def wait_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent waiting to be scheduled. """ - return _helpers._int_or_none(self._properties.get('waitMsMax')) + return _helpers._int_or_none(self._properties.get("waitMsMax")) @property def wait_ratio_avg(self): @@ -2903,7 +2903,7 @@ def wait_ratio_avg(self): to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('waitRatioAvg') + return self._properties.get("waitRatioAvg") @property def wait_ratio_max(self): @@ -2911,21 +2911,21 @@ def wait_ratio_max(self): to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('waitRatioMax') + return self._properties.get("waitRatioMax") @property def read_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent reading input. """ - return _helpers._int_or_none(self._properties.get('readMsAvg')) + return _helpers._int_or_none(self._properties.get("readMsAvg")) @property def read_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent reading input. """ - return _helpers._int_or_none(self._properties.get('readMsMax')) + return _helpers._int_or_none(self._properties.get("readMsMax")) @property def read_ratio_avg(self): @@ -2933,7 +2933,7 @@ def read_ratio_avg(self): input, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('readRatioAvg') + return self._properties.get("readRatioAvg") @property def read_ratio_max(self): @@ -2941,21 +2941,21 @@ def read_ratio_max(self): to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('readRatioMax') + return self._properties.get("readRatioMax") @property def compute_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent on CPU-bound processing. """ - return _helpers._int_or_none(self._properties.get('computeMsAvg')) + return _helpers._int_or_none(self._properties.get("computeMsAvg")) @property def compute_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent on CPU-bound processing. """ - return _helpers._int_or_none(self._properties.get('computeMsMax')) + return _helpers._int_or_none(self._properties.get("computeMsMax")) @property def compute_ratio_avg(self): @@ -2963,7 +2963,7 @@ def compute_ratio_avg(self): CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('computeRatioAvg') + return self._properties.get("computeRatioAvg") @property def compute_ratio_max(self): @@ -2971,21 +2971,21 @@ def compute_ratio_max(self): CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('computeRatioMax') + return self._properties.get("computeRatioMax") @property def write_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent writing output data. """ - return _helpers._int_or_none(self._properties.get('writeMsAvg')) + return _helpers._int_or_none(self._properties.get("writeMsAvg")) @property def write_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent writing output data. """ - return _helpers._int_or_none(self._properties.get('writeMsMax')) + return _helpers._int_or_none(self._properties.get("writeMsMax")) @property def write_ratio_avg(self): @@ -2993,7 +2993,7 @@ def write_ratio_avg(self): output data, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('writeRatioAvg') + return self._properties.get("writeRatioAvg") @property def write_ratio_max(self): @@ -3001,36 +3001,36 @@ def write_ratio_max(self): output data, relative to the longest time spent by any worker in any stage of the overall plan. """ - return self._properties.get('writeRatioMax') + return self._properties.get("writeRatioMax") @property def records_read(self): """Union[int, None]: Number of records read by this stage.""" - return _helpers._int_or_none(self._properties.get('recordsRead')) + return _helpers._int_or_none(self._properties.get("recordsRead")) @property def records_written(self): """Union[int, None]: Number of records written by this stage.""" - return _helpers._int_or_none(self._properties.get('recordsWritten')) + return _helpers._int_or_none(self._properties.get("recordsWritten")) @property def status(self): """Union[str, None]: status of this stage.""" - return self._properties.get('status') + return self._properties.get("status") @property def shuffle_output_bytes(self): """Union[int, None]: Number of bytes written by this stage to intermediate shuffle. """ - return _helpers._int_or_none(self._properties.get('shuffleOutputBytes')) + return _helpers._int_or_none(self._properties.get("shuffleOutputBytes")) @property def shuffle_output_bytes_spilled(self): """Union[int, None]: Number of bytes written by this stage to intermediate shuffle and spilled to disk. """ - return _helpers._int_or_none(self._properties.get('shuffleOutputBytesSpilled')) + return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled")) @property def steps(self): @@ -3039,7 +3039,7 @@ def steps(self): """ return [ QueryPlanEntryStep.from_api_repr(step) - for step in self._properties.get('steps', []) + for step in self._properties.get("steps", []) ] @@ -3076,31 +3076,31 @@ def from_api_repr(cls, resource): def elapsed_ms(self): """Union[int, None]: Milliseconds elapsed since start of query execution.""" - return _helpers._int_or_none(self._properties.get('elapsedMs')) + return _helpers._int_or_none(self._properties.get("elapsedMs")) @property def active_units(self): """Union[int, None]: Current number of input units being processed by workers, reported as largest value since the last sample.""" - return _helpers._int_or_none(self._properties.get('activeUnits')) + return _helpers._int_or_none(self._properties.get("activeUnits")) @property def pending_units(self): """Union[int, None]: Current number of input units remaining for query stages active at this sample time.""" - return _helpers._int_or_none(self._properties.get('pendingUnits')) + return _helpers._int_or_none(self._properties.get("pendingUnits")) @property def completed_units(self): """Union[int, None]: Current number of input units completed by this query.""" - return _helpers._int_or_none(self._properties.get('completedUnits')) + return _helpers._int_or_none(self._properties.get("completedUnits")) @property def slot_millis(self): """Union[int, None]: Cumulative slot-milliseconds consumed by this query.""" - return _helpers._int_or_none(self._properties.get('totalSlotMs')) + return _helpers._int_or_none(self._properties.get("totalSlotMs")) class UnknownJob(_AsyncJob): @@ -3118,11 +3118,11 @@ def from_api_repr(cls, resource, client): Returns: UnknownJob: Job corresponding to the resource. """ - job_ref_properties = resource.get('jobReference', {'projectId': client.project}) + job_ref_properties = resource.get("jobReference", {"projectId": client.project}) job_ref = _JobReference._from_api_repr(job_ref_properties) job = cls(job_ref, client) # Populate the job reference with the project, even if it has been # redacted, because we know it should equal that of the request. - resource['jobReference'] = job_ref_properties + resource["jobReference"] = job_ref_properties job._properties = resource return job diff --git a/bigquery/google/cloud/bigquery/magics.py b/bigquery/google/cloud/bigquery/magics.py index 05e8e52c7ffa..1b8f1f2ee923 100644 --- a/bigquery/google/cloud/bigquery/magics.py +++ b/bigquery/google/cloud/bigquery/magics.py @@ -138,7 +138,7 @@ from IPython import display from IPython.core import magic_arguments except ImportError: # pragma: NO COVER - raise ImportError('This module can only be loaded in IPython.') + raise ImportError("This module can only be loaded in IPython.") import google.auth from google.cloud import bigquery @@ -151,6 +151,7 @@ class Context(object): A Context object is initialized when the ``magics`` module is imported, and can be found at ``google.cloud.bigquery.magics.context``. """ + def __init__(self): self._credentials = None self._project = None @@ -244,52 +245,68 @@ def _run_query(client, query, job_config=None): """ start_time = time.time() query_job = client.query(query, job_config=job_config) - print('Executing query with job ID: {}'.format(query_job.job_id)) + print("Executing query with job ID: {}".format(query_job.job_id)) while True: - print('\rQuery executing: {:0.2f}s'.format( - time.time() - start_time), end='') + print("\rQuery executing: {:0.2f}s".format(time.time() - start_time), end="") try: query_job.result(timeout=0.5) break except futures.TimeoutError: continue - print('\nQuery complete after {:0.2f}s'.format(time.time() - start_time)) + print("\nQuery complete after {:0.2f}s".format(time.time() - start_time)) return query_job @magic_arguments.magic_arguments() @magic_arguments.argument( - 'destination_var', - nargs='?', - help=('If provided, save the output to this variable in addition ' - 'to displaying it.')) + "destination_var", + nargs="?", + help=( + "If provided, save the output to this variable in addition " "to displaying it." + ), +) @magic_arguments.argument( - '--project', + "--project", type=str, default=None, - help=('Project to use for executing this query. Defaults to the context ' - 'project.')) + help=( + "Project to use for executing this query. Defaults to the context " "project." + ), +) @magic_arguments.argument( - '--use_legacy_sql', action='store_true', default=False, - help=('Sets query to use Legacy SQL instead of Standard SQL. Defaults to ' - 'Standard SQL if this argument is not used.')) + "--use_legacy_sql", + action="store_true", + default=False, + help=( + "Sets query to use Legacy SQL instead of Standard SQL. Defaults to " + "Standard SQL if this argument is not used." + ), +) @magic_arguments.argument( - '--verbose', action='store_true', default=False, - help=('If set, print verbose output, including the query job ID and the ' - 'amount of time for the query to finish. By default, this ' - 'information will be displayed as the query runs, but will be ' - 'cleared after the query is finished.')) + "--verbose", + action="store_true", + default=False, + help=( + "If set, print verbose output, including the query job ID and the " + "amount of time for the query to finish. By default, this " + "information will be displayed as the query runs, but will be " + "cleared after the query is finished." + ), +) @magic_arguments.argument( - '--params', - nargs='+', + "--params", + nargs="+", default=None, - help=('Parameters to format the query string. If present, the --params ' - 'flag should be followed by a string representation of a dictionary ' - 'in the format {\'param_name\': \'param_value\'} (ex. {"num": 17}), ' - 'or a reference to a dictionary in the same format. The dictionary ' - 'reference can be made by including a \'$\' before the variable ' - 'name (ex. $my_dict_var).')) + help=( + "Parameters to format the query string. If present, the --params " + "flag should be followed by a string representation of a dictionary " + "in the format {'param_name': 'param_value'} (ex. {\"num\": 17}), " + "or a reference to a dictionary in the same format. The dictionary " + "reference can be made by including a '$' before the variable " + "name (ex. $my_dict_var)." + ), +) def _cell_magic(line, query): """Underlying function for bigquery cell magic @@ -310,11 +327,13 @@ def _cell_magic(line, query): if args.params is not None: try: params = _helpers.to_query_parameters( - ast.literal_eval(''.join(args.params))) + ast.literal_eval("".join(args.params)) + ) except Exception: raise SyntaxError( - '--params is not a correctly formatted JSON string or a JSON ' - 'serializable dictionary') + "--params is not a correctly formatted JSON string or a JSON " + "serializable dictionary" + ) project = args.project or context.project client = bigquery.Client(project=project, credentials=context.credentials) diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index fb22c680e2b1..685d83cf9c7f 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -35,6 +35,7 @@ class UDFResource(object): See https://cloud.google.com/bigquery/user-defined-functions#api """ + def __init__(self, udf_type, value): self.udf_type = udf_type self.value = value @@ -42,9 +43,7 @@ def __init__(self, udf_type, value): def __eq__(self, other): if not isinstance(other, UDFResource): return NotImplemented - return( - self.udf_type == other.udf_type - and self.value == other.value) + return self.udf_type == other.udf_type and self.value == other.value def __ne__(self, other): return not self == other @@ -53,6 +52,7 @@ def __ne__(self, other): class _AbstractQueryParameter(object): """Base class for named / positional query parameters. """ + @classmethod def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. @@ -88,6 +88,7 @@ class ScalarQueryParameter(_AbstractQueryParameter): :class:`datetime.datetime`, or :class:`datetime.date`. :param value: the scalar parameter value. """ + def __init__(self, name, type_, value): self.name = name self.type_ = type_ @@ -123,9 +124,9 @@ def from_api_repr(cls, resource): :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` :returns: instance """ - name = resource.get('name') - type_ = resource['parameterType']['type'] - value = resource['parameterValue']['value'] + name = resource.get("name") + type_ = resource["parameterType"]["type"] + value = resource["parameterValue"]["value"] converted = _QUERY_PARAMS_FROM_JSON[type_](value, None) return cls(name, type_, converted) @@ -140,15 +141,11 @@ def to_api_repr(self): if converter is not None: value = converter(value) resource = { - 'parameterType': { - 'type': self.type_, - }, - 'parameterValue': { - 'value': value, - }, + "parameterType": {"type": self.type_}, + "parameterValue": {"value": value}, } if self.name is not None: - resource['name'] = self.name + resource["name"] = self.name return resource def _key(self): @@ -160,11 +157,7 @@ def _key(self): tuple: The contents of this :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. """ - return ( - self.name, - self.type_.upper(), - self.value, - ) + return (self.name, self.type_.upper(), self.value) def __eq__(self, other): if not isinstance(other, ScalarQueryParameter): @@ -175,7 +168,7 @@ def __ne__(self, other): return not self == other def __repr__(self): - return 'ScalarQueryParameter{}'.format(self._key()) + return "ScalarQueryParameter{}".format(self._key()) class ArrayQueryParameter(_AbstractQueryParameter): @@ -193,6 +186,7 @@ class ArrayQueryParameter(_AbstractQueryParameter): :type values: list of appropriate scalar type. :param values: the parameter array values. """ + def __init__(self, name, array_type, values): self.name = name self.array_type = array_type @@ -217,32 +211,28 @@ def positional(cls, array_type, values): @classmethod def _from_api_repr_struct(cls, resource): - name = resource.get('name') + name = resource.get("name") converted = [] # We need to flatten the array to use the StructQueryParameter # parse code. resource_template = { # The arrayType includes all the types of the fields of the STRUCT - 'parameterType': resource['parameterType']['arrayType'] + "parameterType": resource["parameterType"]["arrayType"] } - for array_value in resource['parameterValue']['arrayValues']: + for array_value in resource["parameterValue"]["arrayValues"]: struct_resource = copy.deepcopy(resource_template) - struct_resource['parameterValue'] = array_value + struct_resource["parameterValue"] = array_value struct_value = StructQueryParameter.from_api_repr(struct_resource) converted.append(struct_value) - return cls(name, 'STRUCT', converted) + return cls(name, "STRUCT", converted) @classmethod def _from_api_repr_scalar(cls, resource): - name = resource.get('name') - array_type = resource['parameterType']['arrayType']['type'] - values = [ - value['value'] - for value - in resource['parameterValue']['arrayValues']] + name = resource.get("name") + array_type = resource["parameterType"]["arrayType"]["type"] + values = [value["value"] for value in resource["parameterValue"]["arrayValues"]] converted = [ - _QUERY_PARAMS_FROM_JSON[array_type](value, None) - for value in values + _QUERY_PARAMS_FROM_JSON[array_type](value, None) for value in values ] return cls(name, array_type, converted) @@ -256,8 +246,8 @@ def from_api_repr(cls, resource): :rtype: :class:`~google.cloud.bigquery.query.ArrayQueryParameter` :returns: instance """ - array_type = resource['parameterType']['arrayType']['type'] - if array_type == 'STRUCT': + array_type = resource["parameterType"]["arrayType"]["type"] + if array_type == "STRUCT": return cls._from_api_repr_struct(resource) return cls._from_api_repr_scalar(resource) @@ -268,27 +258,22 @@ def to_api_repr(self): :returns: JSON mapping """ values = self.values - if self.array_type == 'RECORD' or self.array_type == 'STRUCT': + if self.array_type == "RECORD" or self.array_type == "STRUCT": reprs = [value.to_api_repr() for value in values] - a_type = reprs[0]['parameterType'] - a_values = [repr_['parameterValue'] for repr_ in reprs] + a_type = reprs[0]["parameterType"] + a_values = [repr_["parameterValue"] for repr_ in reprs] else: - a_type = {'type': self.array_type} + a_type = {"type": self.array_type} converter = _SCALAR_VALUE_TO_JSON_PARAM.get(self.array_type) if converter is not None: values = [converter(value) for value in values] - a_values = [{'value': value} for value in values] + a_values = [{"value": value} for value in values] resource = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': a_type, - }, - 'parameterValue': { - 'arrayValues': a_values, - }, + "parameterType": {"type": "ARRAY", "arrayType": a_type}, + "parameterValue": {"arrayValues": a_values}, } if self.name is not None: - resource['name'] = self.name + resource["name"] = self.name return resource def _key(self): @@ -300,11 +285,7 @@ def _key(self): tuple: The contents of this :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. """ - return ( - self.name, - self.array_type.upper(), - self.values, - ) + return (self.name, self.array_type.upper(), self.values) def __eq__(self, other): if not isinstance(other, ArrayQueryParameter): @@ -315,7 +296,7 @@ def __ne__(self, other): return not self == other def __repr__(self): - return 'ArrayQueryParameter{}'.format(self._key()) + return "ArrayQueryParameter{}".format(self._key()) class StructQueryParameter(_AbstractQueryParameter): @@ -331,16 +312,17 @@ class StructQueryParameter(_AbstractQueryParameter): :class:`~google.cloud.bigquery.query.StructQueryParameter` :param sub_params: the sub-parameters for the struct """ + def __init__(self, name, *sub_params): self.name = name types = self.struct_types = OrderedDict() values = self.struct_values = {} for sub in sub_params: if isinstance(sub, self.__class__): - types[sub.name] = 'STRUCT' + types[sub.name] = "STRUCT" values[sub.name] = sub elif isinstance(sub, ArrayQueryParameter): - types[sub.name] = 'ARRAY' + types[sub.name] = "ARRAY" values[sub.name] = sub else: types[sub.name] = sub.type_ @@ -372,33 +354,33 @@ def from_api_repr(cls, resource): :rtype: :class:`~google.cloud.bigquery.query.StructQueryParameter` :returns: instance """ - name = resource.get('name') + name = resource.get("name") instance = cls(name) type_resources = {} types = instance.struct_types - for item in resource['parameterType']['structTypes']: - types[item['name']] = item['type']['type'] - type_resources[item['name']] = item['type'] - struct_values = resource['parameterValue']['structValues'] + for item in resource["parameterType"]["structTypes"]: + types[item["name"]] = item["type"]["type"] + type_resources[item["name"]] = item["type"] + struct_values = resource["parameterValue"]["structValues"] for key, value in struct_values.items(): type_ = types[key] converted = None - if type_ == 'STRUCT': + if type_ == "STRUCT": struct_resource = { - 'name': key, - 'parameterType': type_resources[key], - 'parameterValue': value, + "name": key, + "parameterType": type_resources[key], + "parameterValue": value, } converted = StructQueryParameter.from_api_repr(struct_resource) - elif type_ == 'ARRAY': + elif type_ == "ARRAY": struct_resource = { - 'name': key, - 'parameterType': type_resources[key], - 'parameterValue': value, + "name": key, + "parameterType": type_resources[key], + "parameterValue": value, } converted = ArrayQueryParameter.from_api_repr(struct_resource) else: - value = value['value'] + value = value["value"] converted = _QUERY_PARAMS_FROM_JSON[type_](value, None) instance.struct_values[key] = converted return instance @@ -413,28 +395,26 @@ def to_api_repr(self): values = {} for name, value in self.struct_values.items(): type_ = self.struct_types[name] - if type_ in ('STRUCT', 'ARRAY'): + if type_ in ("STRUCT", "ARRAY"): repr_ = value.to_api_repr() - s_types[name] = {'name': name, 'type': repr_['parameterType']} - values[name] = repr_['parameterValue'] + s_types[name] = {"name": name, "type": repr_["parameterType"]} + values[name] = repr_["parameterValue"] else: - s_types[name] = {'name': name, 'type': {'type': type_}} + s_types[name] = {"name": name, "type": {"type": type_}} converter = _SCALAR_VALUE_TO_JSON_PARAM.get(type_) if converter is not None: value = converter(value) - values[name] = {'value': value} + values[name] = {"value": value} resource = { - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [s_types[key] for key in self.struct_types], - }, - 'parameterValue': { - 'structValues': values, + "parameterType": { + "type": "STRUCT", + "structTypes": [s_types[key] for key in self.struct_types], }, + "parameterValue": {"structValues": values}, } if self.name is not None: - resource['name'] = self.name + resource["name"] = self.name return resource def _key(self): @@ -446,11 +426,7 @@ def _key(self): tuple: The contents of this :class:`~google.cloud.biquery.ArrayQueryParameter`. """ - return ( - self.name, - self.struct_types, - self.struct_values, - ) + return (self.name, self.struct_types, self.struct_values) def __eq__(self, other): if not isinstance(other, StructQueryParameter): @@ -461,7 +437,7 @@ def __ne__(self, other): return not self == other def __repr__(self): - return 'StructQueryParameter{}'.format(self._key()) + return "StructQueryParameter{}".format(self._key()) class _QueryResults(object): @@ -486,7 +462,7 @@ def project(self): :rtype: str :returns: the project that the query job is associated with. """ - return self._properties.get('jobReference', {}).get('projectId') + return self._properties.get("jobReference", {}).get("projectId") @property def cache_hit(self): @@ -499,7 +475,7 @@ def cache_hit(self): :returns: True if the query results were served from cache (None until set by the server). """ - return self._properties.get('cacheHit') + return self._properties.get("cacheHit") @property def complete(self): @@ -512,7 +488,7 @@ def complete(self): :returns: True if the query completed on the server (None until set by the server). """ - return self._properties.get('jobComplete') + return self._properties.get("jobComplete") @property def errors(self): @@ -525,7 +501,7 @@ def errors(self): :returns: Mappings describing errors generated on the server (None until set by the server). """ - return self._properties.get('errors') + return self._properties.get("errors") @property def job_id(self): @@ -537,7 +513,7 @@ def job_id(self): :rtype: string :returns: Job ID of the query job. """ - return self._properties.get('jobReference', {}).get('jobId') + return self._properties.get("jobReference", {}).get("jobId") @property def page_token(self): @@ -549,7 +525,7 @@ def page_token(self): :rtype: str, or ``NoneType`` :returns: Token generated on the server (None until set by the server). """ - return self._properties.get('pageToken') + return self._properties.get("pageToken") @property def total_rows(self): @@ -561,7 +537,7 @@ def total_rows(self): :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ - total_rows = self._properties.get('totalRows') + total_rows = self._properties.get("totalRows") if total_rows is not None: return int(total_rows) @@ -575,7 +551,7 @@ def total_bytes_processed(self): :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ - total_bytes_processed = self._properties.get('totalBytesProcessed') + total_bytes_processed = self._properties.get("totalBytesProcessed") if total_bytes_processed is not None: return int(total_bytes_processed) @@ -589,7 +565,7 @@ def num_dml_affected_rows(self): :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ - num_dml_affected_rows = self._properties.get('numDmlAffectedRows') + num_dml_affected_rows = self._properties.get("numDmlAffectedRows") if num_dml_affected_rows is not None: return int(num_dml_affected_rows) @@ -603,7 +579,7 @@ def rows(self): :rtype: list of :class:`~google.cloud.bigquery.table.Row` :returns: fields describing the schema (None until set by the server). """ - return _rows_from_json(self._properties.get('rows', ()), self.schema) + return _rows_from_json(self._properties.get("rows", ()), self.schema) @property def schema(self): @@ -615,7 +591,7 @@ def schema(self): :rtype: list of :class:`SchemaField`, or ``NoneType`` :returns: fields describing the schema (None until set by the server). """ - return _parse_schema_resource(self._properties.get('schema', {})) + return _parse_schema_resource(self._properties.get("schema", {})) def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` @@ -624,11 +600,12 @@ def _set_properties(self, api_response): :param api_response: response returned from an API call """ job_id_present = ( - 'jobReference' in api_response - and 'jobId' in api_response['jobReference'] - and 'projectId' in api_response['jobReference']) + "jobReference" in api_response + and "jobId" in api_response["jobReference"] + and "projectId" in api_response["jobReference"] + ) if not job_id_present: - raise ValueError('QueryResult requires a job reference') + raise ValueError("QueryResult requires a job reference") self._properties.clear() self._properties.update(copy.deepcopy(api_response)) @@ -636,10 +613,10 @@ def _set_properties(self, api_response): def _query_param_from_api_repr(resource): """Helper: construct concrete query parameter from JSON resource.""" - qp_type = resource['parameterType'] - if 'arrayType' in qp_type: + qp_type = resource["parameterType"] + if "arrayType" in qp_type: klass = ArrayQueryParameter - elif 'structTypes' in qp_type: + elif "structTypes" in qp_type: klass = StructQueryParameter else: klass = ScalarQueryParameter diff --git a/bigquery/google/cloud/bigquery/retry.py b/bigquery/google/cloud/bigquery/retry.py index 40e54113d309..4bc4b757f45d 100644 --- a/bigquery/google/cloud/bigquery/retry.py +++ b/bigquery/google/cloud/bigquery/retry.py @@ -1,4 +1,3 @@ - # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,12 +16,9 @@ from google.api_core import retry -_RETRYABLE_REASONS = frozenset([ - 'rateLimitExceeded', - 'backendError', - 'internalError', - 'badGateway', -]) +_RETRYABLE_REASONS = frozenset( + ["rateLimitExceeded", "backendError", "internalError", "badGateway"] +) _UNSTRUCTURED_RETRYABLE_TYPES = ( exceptions.TooManyRequests, @@ -37,14 +33,14 @@ def _should_retry(exc): We retry if and only if the 'reason' is 'backendError' or 'rateLimitExceeded'. """ - if not hasattr(exc, 'errors'): + if not hasattr(exc, "errors"): return False if len(exc.errors) == 0: # Check for unstructured error returns, e.g. from GFE return isinstance(exc, _UNSTRUCTURED_RETRYABLE_TYPES) - reason = exc.errors[0]['reason'] + reason = exc.errors[0]["reason"] return reason in _RETRYABLE_REASONS diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index 759d7c3cbe65..99fc65fc0c46 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -32,8 +32,8 @@ class SchemaField(object): fields (Tuple[:class:`~google.cloud.bigquery.schema.SchemaField`]): subfields (requires ``field_type`` of 'RECORD'). """ - def __init__(self, name, field_type, mode='NULLABLE', - description=None, fields=()): + + def __init__(self, name, field_type, mode="NULLABLE", description=None, fields=()): self._name = name self._field_type = field_type self._mode = mode @@ -54,15 +54,15 @@ def from_api_repr(cls, api_repr): The ``SchemaField`` object. """ # Handle optional properties with default values - mode = api_repr.get('mode', 'NULLABLE') - description = api_repr.get('description') - fields = api_repr.get('fields', ()) + mode = api_repr.get("mode", "NULLABLE") + description = api_repr.get("description") + fields = api_repr.get("fields", ()) return cls( - field_type=api_repr['type'].upper(), + field_type=api_repr["type"].upper(), fields=[cls.from_api_repr(f) for f in fields], mode=mode.upper(), description=description, - name=api_repr['name'], + name=api_repr["name"], ) @property @@ -91,7 +91,7 @@ def mode(self): @property def is_nullable(self): """bool: whether 'mode' is 'nullable'.""" - return self._mode == 'NULLABLE' + return self._mode == "NULLABLE" @property def description(self): @@ -115,16 +115,16 @@ def to_api_repr(self): """ # Put together the basic representation. See http://bit.ly/2hOAT5u. answer = { - 'mode': self.mode.upper(), - 'name': self.name, - 'type': self.field_type.upper(), - 'description': self.description, + "mode": self.mode.upper(), + "name": self.name, + "type": self.field_type.upper(), + "description": self.description, } # If this is a RECORD type, then sub-fields are also included, # add this to the serialized representation. - if self.field_type.upper() == 'RECORD': - answer['fields'] = [f.to_api_repr() for f in self.fields] + if self.field_type.upper() == "RECORD": + answer["fields"] = [f.to_api_repr() for f in self.fields] # Done; return the serialized dictionary. return answer @@ -158,7 +158,7 @@ def __hash__(self): return hash(self._key()) def __repr__(self): - return 'SchemaField{}'.format(self._key()) + return "SchemaField{}".format(self._key()) def _parse_schema_resource(info): @@ -171,18 +171,17 @@ def _parse_schema_resource(info): (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) a list of parsed fields, or ``None`` if no "fields" key found. """ - if 'fields' not in info: + if "fields" not in info: return () schema = [] - for r_field in info['fields']: - name = r_field['name'] - field_type = r_field['type'] - mode = r_field.get('mode', 'NULLABLE') - description = r_field.get('description') + for r_field in info["fields"]: + name = r_field["name"] + field_type = r_field["type"] + mode = r_field.get("mode", "NULLABLE") + description = r_field.get("description") sub_fields = _parse_schema_resource(r_field) - schema.append( - SchemaField(name, field_type, mode, description, sub_fields)) + schema.append(SchemaField(name, field_type, mode, description, sub_fields)) return schema diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 28036fba74ac..c145d6a9dc1c 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -22,6 +22,7 @@ import warnings import six + try: import pandas except ImportError: # pragma: NO COVER @@ -38,8 +39,8 @@ _NO_PANDAS_ERROR = ( - 'The pandas library is not installed, please install ' - 'pandas to use the to_dataframe() function.' + "The pandas library is not installed, please install " + "pandas to use the to_dataframe() function." ) _TABLE_HAS_NO_SCHEMA = 'Table has no schema: call "client.get_table()"' _MARKER = object() @@ -69,13 +70,13 @@ def _view_use_legacy_sql_getter(table): Raises: ValueError: For invalid value types. """ - view = table._properties.get('view') + view = table._properties.get("view") if view is not None: # The server-side default for useLegacySql is True. - return view.get('useLegacySql', True) + return view.get("useLegacySql", True) # In some cases, such as in a table list no view object is present, but the # resource still represents a view. Use the type as a fallback. - if table.table_type == 'VIEW': + if table.table_type == "VIEW": # The server-side default for useLegacySql is True. return True @@ -90,7 +91,7 @@ class EncryptionConfiguration(object): def __init__(self, kms_key_name=None): self._properties = {} if kms_key_name is not None: - self._properties['kmsKeyName'] = kms_key_name + self._properties["kmsKeyName"] = kms_key_name @property def kms_key_name(self): @@ -99,11 +100,11 @@ def kms_key_name(self): Resource ID of Cloud KMS key or :data:`None` if using default encryption. """ - return self._properties.get('kmsKeyName') + return self._properties.get("kmsKeyName") @kms_key_name.setter def kms_key_name(self, value): - self._properties['kmsKeyName'] = value + self._properties["kmsKeyName"] = value @classmethod def from_api_repr(cls, resource): @@ -144,7 +145,7 @@ def __hash__(self): return hash(self.kms_key_name) def __repr__(self): - return 'EncryptionConfiguration({})'.format(self.kms_key_name) + return "EncryptionConfiguration({})".format(self.kms_key_name) class TableReference(object): @@ -182,8 +183,11 @@ def table_id(self): @property def path(self): """str: URL path for the table's APIs.""" - return '/projects/%s/datasets/%s/tables/%s' % ( - self._project, self._dataset_id, self._table_id) + return "/projects/%s/datasets/%s/tables/%s" % ( + self._project, + self._dataset_id, + self._table_id, + ) @classmethod def from_string(cls, table_id, default_project=None): @@ -215,32 +219,33 @@ def from_string(cls, table_id, default_project=None): output_project_id = default_project output_dataset_id = None output_table_id = None - parts = table_id.split('.') + parts = table_id.split(".") if len(parts) < 2: raise ValueError( - 'table_id must be a fully-qualified table ID in ' + "table_id must be a fully-qualified table ID in " 'standard SQL format. e.g. "project.dataset.table", got ' - '{}'.format(table_id)) + "{}".format(table_id) + ) elif len(parts) == 2: if not default_project: raise ValueError( - 'When default_project is not set, table_id must be a ' - 'fully-qualified table ID in standard SQL format. ' - 'e.g. "project.dataset_id.table_id", got {}'.format( - table_id)) + "When default_project is not set, table_id must be a " + "fully-qualified table ID in standard SQL format. " + 'e.g. "project.dataset_id.table_id", got {}'.format(table_id) + ) output_dataset_id, output_table_id = parts elif len(parts) == 3: output_project_id, output_dataset_id, output_table_id = parts if len(parts) > 3: raise ValueError( - 'Too many parts in table_id. Must be a fully-qualified table ' + "Too many parts in table_id. Must be a fully-qualified table " 'ID in standard SQL format. e.g. "project.dataset.table", ' - 'got {}'.format(table_id)) + "got {}".format(table_id) + ) return cls( - DatasetReference(output_project_id, output_dataset_id), - output_table_id, + DatasetReference(output_project_id, output_dataset_id), output_table_id ) @classmethod @@ -257,9 +262,9 @@ def from_api_repr(cls, resource): """ from google.cloud.bigquery.dataset import DatasetReference - project = resource['projectId'] - dataset_id = resource['datasetId'] - table_id = resource['tableId'] + project = resource["projectId"] + dataset_id = resource["datasetId"] + table_id = resource["tableId"] return cls(DatasetReference(project, dataset_id), table_id) def to_api_repr(self): @@ -269,9 +274,9 @@ def to_api_repr(self): Dict[str, object]: Table reference represented as an API resource """ return { - 'projectId': self._project, - 'datasetId': self._dataset_id, - 'tableId': self._table_id, + "projectId": self._project, + "datasetId": self._dataset_id, + "tableId": self._table_id, } def _key(self): @@ -282,11 +287,7 @@ def _key(self): Returns: Tuple[str]: The contents of this :class:`DatasetReference`. """ - return ( - self._project, - self._dataset_id, - self._table_id, - ) + return (self._project, self._dataset_id, self._table_id) def __eq__(self, other): if not isinstance(other, TableReference): @@ -301,9 +302,9 @@ def __hash__(self): def __repr__(self): from google.cloud.bigquery.dataset import DatasetReference + dataset_ref = DatasetReference(self._project, self._dataset_id) - return "TableReference({}, '{}')".format( - repr(dataset_ref), self._table_id) + return "TableReference({}, '{}')".format(repr(dataset_ref), self._table_id) class Table(object): @@ -320,22 +321,19 @@ class Table(object): """ _PROPERTY_TO_API_FIELD = { - 'friendly_name': 'friendlyName', - 'expires': 'expirationTime', - 'time_partitioning': 'timePartitioning', - 'partitioning_type': 'timePartitioning', - 'partition_expiration': 'timePartitioning', - 'view_use_legacy_sql': 'view', - 'view_query': 'view', - 'external_data_configuration': 'externalDataConfiguration', - 'encryption_configuration': 'encryptionConfiguration', + "friendly_name": "friendlyName", + "expires": "expirationTime", + "time_partitioning": "timePartitioning", + "partitioning_type": "timePartitioning", + "partition_expiration": "timePartitioning", + "view_use_legacy_sql": "view", + "view_query": "view", + "external_data_configuration": "externalDataConfiguration", + "encryption_configuration": "encryptionConfiguration", } def __init__(self, table_ref, schema=None): - self._properties = { - 'tableReference': table_ref.to_api_repr(), - 'labels': {}, - } + self._properties = {"tableReference": table_ref.to_api_repr(), "labels": {}} # Let the @property do validation. if schema is not None: self.schema = schema @@ -343,25 +341,28 @@ def __init__(self, table_ref, schema=None): @property def project(self): """str: Project bound to the table.""" - return self._properties['tableReference']['projectId'] + return self._properties["tableReference"]["projectId"] @property def dataset_id(self): """str: ID of dataset containing the table.""" - return self._properties['tableReference']['datasetId'] + return self._properties["tableReference"]["datasetId"] @property def table_id(self): """str: ID of the table.""" - return self._properties['tableReference']['tableId'] + return self._properties["tableReference"]["tableId"] reference = property(_reference_getter) @property def path(self): """str: URL path for the table's APIs.""" - return '/projects/%s/datasets/%s/tables/%s' % ( - self.project, self.dataset_id, self.table_id) + return "/projects/%s/datasets/%s/tables/%s" % ( + self.project, + self.dataset_id, + self.table_id, + ) @property def schema(self): @@ -373,7 +374,7 @@ def schema(self): If any item in the sequence is not a :class:`~google.cloud.bigquery.schema.SchemaField` """ - prop = self._properties.get('schema') + prop = self._properties.get("schema") if not prop: return [] else: @@ -382,13 +383,11 @@ def schema(self): @schema.setter def schema(self, value): if value is None: - self._properties['schema'] = None + self._properties["schema"] = None elif not all(isinstance(field, SchemaField) for field in value): - raise ValueError('Schema items must be fields') + raise ValueError("Schema items must be fields") else: - self._properties['schema'] = { - 'fields': _build_schema_resource(value) - } + self._properties["schema"] = {"fields": _build_schema_resource(value)} @property def labels(self): @@ -401,13 +400,13 @@ def labels(self): Raises: ValueError: If ``value`` type is invalid. """ - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @labels.setter def labels(self, value): if not isinstance(value, dict): raise ValueError("Pass a dict") - self._properties['labels'] = value + self._properties["labels"] = value @property def encryption_configuration(self): @@ -421,7 +420,7 @@ def encryption_configuration(self): `_ in the BigQuery documentation. """ - prop = self._properties.get('encryptionConfiguration') + prop = self._properties.get("encryptionConfiguration") if prop is not None: prop = EncryptionConfiguration.from_api_repr(prop) return prop @@ -431,57 +430,59 @@ def encryption_configuration(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._properties['encryptionConfiguration'] = api_repr + self._properties["encryptionConfiguration"] = api_repr @property def created(self): """Union[datetime.datetime, None]: Datetime at which the table was created (:data:`None` until set from the server). """ - creation_time = self._properties.get('creationTime') + creation_time = self._properties.get("creationTime") if creation_time is not None: # creation_time will be in milliseconds. return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(creation_time)) + 1000.0 * float(creation_time) + ) @property def etag(self): """Union[str, None]: ETag for the table resource (:data:`None` until set from the server). """ - return self._properties.get('etag') + return self._properties.get("etag") @property def modified(self): """Union[datetime.datetime, None]: Datetime at which the table was last modified (:data:`None` until set from the server). """ - modified_time = self._properties.get('lastModifiedTime') + modified_time = self._properties.get("lastModifiedTime") if modified_time is not None: # modified_time will be in milliseconds. return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(modified_time)) + 1000.0 * float(modified_time) + ) @property def num_bytes(self): """Union[int, None]: The size of the table in bytes (:data:`None` until set from the server). """ - return _helpers._int_or_none(self._properties.get('numBytes')) + return _helpers._int_or_none(self._properties.get("numBytes")) @property def num_rows(self): """Union[int, None]: The number of rows in the table (:data:`None` until set from the server). """ - return _helpers._int_or_none(self._properties.get('numRows')) + return _helpers._int_or_none(self._properties.get("numRows")) @property def self_link(self): """Union[str, None]: URL for the table resource (:data:`None` until set from the server). """ - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def full_table_id(self): @@ -490,7 +491,7 @@ def full_table_id(self): In the format ``project_id:dataset_id.table_id``. """ - return self._properties.get('id') + return self._properties.get("id") @property def table_type(self): @@ -499,7 +500,7 @@ def table_type(self): Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``. """ - return self._properties.get('type') + return self._properties.get("type") @property def time_partitioning(self): @@ -510,7 +511,7 @@ def time_partitioning(self): ValueError: If the value is not :class:`TimePartitioning` or :data:`None`. """ - prop = self._properties.get('timePartitioning') + prop = self._properties.get("timePartitioning") if prop is not None: return TimePartitioning.from_api_repr(prop) @@ -521,9 +522,9 @@ def time_partitioning(self, value): api_repr = value.to_api_repr() elif value is not None: raise ValueError( - "value must be google.cloud.bigquery.table.TimePartitioning " - "or None") - self._properties['timePartitioning'] = api_repr + "value must be google.cloud.bigquery.table.TimePartitioning " "or None" + ) + self._properties["timePartitioning"] = api_repr @property def partitioning_type(self): @@ -536,7 +537,9 @@ def partitioning_type(self): warnings.warn( "This method will be deprecated in future versions. Please use " "Table.time_partitioning.type_ instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is not None: return self.time_partitioning.type_ @@ -545,10 +548,12 @@ def partitioning_type(self, value): warnings.warn( "This method will be deprecated in future versions. Please use " "Table.time_partitioning.type_ instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is None: - self._properties['timePartitioning'] = {} - self._properties['timePartitioning']['type'] = value + self._properties["timePartitioning"] = {} + self._properties["timePartitioning"]["type"] = value @property def partition_expiration(self): @@ -561,7 +566,9 @@ def partition_expiration(self): warnings.warn( "This method will be deprecated in future versions. Please use " "Table.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is not None: return self.time_partitioning.expiration_ms @@ -570,11 +577,12 @@ def partition_expiration(self, value): warnings.warn( "This method will be deprecated in future versions. Please use " "Table.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is None: - self._properties['timePartitioning'] = { - 'type': TimePartitioningType.DAY} - self._properties['timePartitioning']['expirationMs'] = str(value) + self._properties["timePartitioning"] = {"type": TimePartitioningType.DAY} + self._properties["timePartitioning"]["expirationMs"] = str(value) @property def clustering_fields(self): @@ -589,9 +597,9 @@ def clustering_fields(self): As of 2018-06-29, clustering fields cannot be set on a table which does not also have time partioning defined. """ - prop = self._properties.get('clustering') + prop = self._properties.get("clustering") if prop is not None: - return list(prop.get('fields', ())) + return list(prop.get("fields", ())) @clustering_fields.setter def clustering_fields(self, value): @@ -600,11 +608,11 @@ def clustering_fields(self, value): (Defaults to :data:`None`). """ if value is not None: - prop = self._properties.setdefault('clustering', {}) - prop['fields'] = value + prop = self._properties.setdefault("clustering", {}) + prop["fields"] = value else: - if 'clustering' in self._properties: - del self._properties['clustering'] + if "clustering" in self._properties: + del self._properties["clustering"] @property def description(self): @@ -614,13 +622,13 @@ def description(self): Raises: ValueError: For invalid value types. """ - return self._properties.get('description') + return self._properties.get("description") @description.setter def description(self, value): if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") - self._properties['description'] = value + self._properties["description"] = value @property def expires(self): @@ -630,18 +638,19 @@ def expires(self): Raises: ValueError: For invalid value types. """ - expiration_time = self._properties.get('expirationTime') + expiration_time = self._properties.get("expirationTime") if expiration_time is not None: # expiration_time will be in milliseconds. return google.cloud._helpers._datetime_from_microseconds( - 1000.0 * float(expiration_time)) + 1000.0 * float(expiration_time) + ) @expires.setter def expires(self, value): if not isinstance(value, datetime.datetime) and value is not None: raise ValueError("Pass a datetime, or None") value_ms = google.cloud._helpers._millis_from_datetime(value) - self._properties['expirationTime'] = _helpers._str_or_none(value_ms) + self._properties["expirationTime"] = _helpers._str_or_none(value_ms) @property def friendly_name(self): @@ -650,13 +659,13 @@ def friendly_name(self): Raises: ValueError: For invalid value types. """ - return self._properties.get('friendlyName') + return self._properties.get("friendlyName") @friendly_name.setter def friendly_name(self, value): if not isinstance(value, six.string_types) and value is not None: raise ValueError("Pass a string, or None") - self._properties['friendlyName'] = value + self._properties["friendlyName"] = value @property def location(self): @@ -664,7 +673,7 @@ def location(self): Defaults to :data:`None`. """ - return self._properties.get('location') + return self._properties.get("location") @property def view_query(self): @@ -677,27 +686,27 @@ def view_query(self): Raises: ValueError: For invalid value types. """ - view = self._properties.get('view') + view = self._properties.get("view") if view is not None: - return view.get('query') + return view.get("query") @view_query.setter def view_query(self, value): if not isinstance(value, six.string_types): raise ValueError("Pass a string") - view = self._properties.get('view') + view = self._properties.get("view") if view is None: - view = self._properties['view'] = {} - view['query'] = value + view = self._properties["view"] = {} + view["query"] = value # The service defaults useLegacySql to True, but this # client uses Standard SQL by default. - if view.get('useLegacySql') is None: - view['useLegacySql'] = False + if view.get("useLegacySql") is None: + view["useLegacySql"] = False @view_query.deleter def view_query(self): """Delete SQL query defining the table as a view.""" - self._properties.pop('view', None) + self._properties.pop("view", None) view_use_legacy_sql = property(_view_use_legacy_sql_getter) @@ -705,16 +714,16 @@ def view_query(self): def view_use_legacy_sql(self, value): if not isinstance(value, bool): raise ValueError("Pass a boolean") - if self._properties.get('view') is None: - self._properties['view'] = {} - self._properties['view']['useLegacySql'] = value + if self._properties.get("view") is None: + self._properties["view"] = {} + self._properties["view"]["useLegacySql"] = value @property def streaming_buffer(self): """google.cloud.bigquery.StreamingBuffer: Information about a table's streaming buffer. """ - sb = self._properties.get('streamingBuffer') + sb = self._properties.get("streamingBuffer") if sb is not None: return StreamingBuffer(sb) @@ -726,7 +735,7 @@ def external_data_configuration(self): Raises: ValueError: For invalid value types. """ - prop = self._properties.get('externalDataConfiguration') + prop = self._properties.get("externalDataConfiguration") if prop is not None: prop = ExternalConfig.from_api_repr(prop) return prop @@ -738,7 +747,7 @@ def external_data_configuration(self, value): api_repr = value if value is not None: api_repr = value.to_api_repr() - self._properties['externalDataConfiguration'] = api_repr + self._properties["externalDataConfiguration"] = api_repr @classmethod def from_string(cls, full_table_id): @@ -785,12 +794,17 @@ def from_api_repr(cls, resource): """ from google.cloud.bigquery import dataset - if ('tableReference' not in resource or 'tableId' not in resource['tableReference']): - raise KeyError('Resource lacks required identity information:' - '["tableReference"]["tableId"]') - project_id = resource['tableReference']['projectId'] - table_id = resource['tableReference']['tableId'] - dataset_id = resource['tableReference']['datasetId'] + if ( + "tableReference" not in resource + or "tableId" not in resource["tableReference"] + ): + raise KeyError( + "Resource lacks required identity information:" + '["tableReference"]["tableId"]' + ) + project_id = resource["tableReference"]["projectId"] + table_id = resource["tableReference"]["tableId"] + dataset_id = resource["tableReference"]["datasetId"] dataset_ref = dataset.DatasetReference(project_id, dataset_id) table = cls(dataset_ref.table(table_id)) @@ -812,7 +826,7 @@ def _build_resource(self, filter_fields): for filter_field in filter_fields: api_field = self._PROPERTY_TO_API_FIELD.get(filter_field) if api_field is None and filter_field not in self._properties: - raise ValueError('No Table property %s' % filter_field) + raise ValueError("No Table property %s" % filter_field) elif api_field is not None: partial[api_field] = self._properties.get(api_field) else: @@ -823,7 +837,7 @@ def _build_resource(self, filter_fields): return partial def __repr__(self): - return 'Table({})'.format(repr(self.reference)) + return "Table({})".format(repr(self.reference)) class TableListItem(object): @@ -851,34 +865,35 @@ class TableListItem(object): """ def __init__(self, resource): - if 'tableReference' not in resource: - raise ValueError('resource must contain a tableReference value') - if 'projectId' not in resource['tableReference']: + if "tableReference" not in resource: + raise ValueError("resource must contain a tableReference value") + if "projectId" not in resource["tableReference"]: raise ValueError( - "resource['tableReference'] must contain a projectId value") - if 'datasetId' not in resource['tableReference']: + "resource['tableReference'] must contain a projectId value" + ) + if "datasetId" not in resource["tableReference"]: raise ValueError( - "resource['tableReference'] must contain a datasetId value") - if 'tableId' not in resource['tableReference']: - raise ValueError( - "resource['tableReference'] must contain a tableId value") + "resource['tableReference'] must contain a datasetId value" + ) + if "tableId" not in resource["tableReference"]: + raise ValueError("resource['tableReference'] must contain a tableId value") self._properties = resource @property def project(self): """str: Project bound to the table.""" - return self._properties['tableReference']['projectId'] + return self._properties["tableReference"]["projectId"] @property def dataset_id(self): """str: ID of dataset containing the table.""" - return self._properties['tableReference']['datasetId'] + return self._properties["tableReference"]["datasetId"] @property def table_id(self): """str: ID of the table.""" - return self._properties['tableReference']['tableId'] + return self._properties["tableReference"]["tableId"] reference = property(_reference_getter) @@ -890,7 +905,7 @@ def labels(self): modify the dict, then call ``Client.update_table``. To delete a label, set its value to :data:`None` before updating. """ - return self._properties.setdefault('labels', {}) + return self._properties.setdefault("labels", {}) @property def full_table_id(self): @@ -899,7 +914,7 @@ def full_table_id(self): In the format ``project_id:dataset_id.table_id``. """ - return self._properties.get('id') + return self._properties.get("id") @property def table_type(self): @@ -908,14 +923,14 @@ def table_type(self): Possible values are ``'TABLE'``, ``'VIEW'``, or ``'EXTERNAL'``. """ - return self._properties.get('type') + return self._properties.get("type") @property def time_partitioning(self): """google.cloud.bigquery.table.TimePartitioning: Configures time-based partitioning for a table. """ - prop = self._properties.get('timePartitioning') + prop = self._properties.get("timePartitioning") if prop is not None: return TimePartitioning.from_api_repr(prop) @@ -927,7 +942,9 @@ def partitioning_type(self): warnings.warn( "This method will be deprecated in future versions. Please use " "TableListItem.time_partitioning.type_ instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is not None: return self.time_partitioning.type_ @@ -941,14 +958,16 @@ def partition_expiration(self): warnings.warn( "This method will be deprecated in future versions. Please use " "TableListItem.time_partitioning.expiration_ms instead.", - PendingDeprecationWarning, stacklevel=2) + PendingDeprecationWarning, + stacklevel=2, + ) if self.time_partitioning is not None: return self.time_partitioning.expiration_ms @property def friendly_name(self): """Union[str, None]: Title of the table (defaults to :data:`None`).""" - return self._properties.get('friendlyName') + return self._properties.get("friendlyName") view_use_legacy_sql = property(_view_use_legacy_sql_getter) @@ -976,15 +995,14 @@ def _row_from_mapping(mapping, schema): row = [] for field in schema: - if field.mode == 'REQUIRED': + if field.mode == "REQUIRED": row.append(mapping[field.name]) - elif field.mode == 'REPEATED': + elif field.mode == "REPEATED": row.append(mapping.get(field.name, ())) - elif field.mode == 'NULLABLE': + elif field.mode == "NULLABLE": row.append(mapping.get(field.name)) else: - raise ValueError( - "Unknown field mode: {}".format(field.mode)) + raise ValueError("Unknown field mode: {}".format(field.mode)) return tuple(row) @@ -999,12 +1017,12 @@ class StreamingBuffer(object): """ def __init__(self, resource): - self.estimated_bytes = int(resource['estimatedBytes']) - self.estimated_rows = int(resource['estimatedRows']) + self.estimated_bytes = int(resource["estimatedBytes"]) + self.estimated_rows = int(resource["estimatedRows"]) # time is in milliseconds since the epoch. - self.oldest_entry_time = ( - google.cloud._helpers._datetime_from_microseconds( - 1000.0 * int(resource['oldestEntryTime']))) + self.oldest_entry_time = google.cloud._helpers._datetime_from_microseconds( + 1000.0 * int(resource["oldestEntryTime"]) + ) class Row(object): @@ -1020,7 +1038,7 @@ class Row(object): """ # Choose unusual field names to try to avoid conflict with schema fields. - __slots__ = ('_xxx_values', '_xxx_field_to_index') + __slots__ = ("_xxx_values", "_xxx_field_to_index") def __init__(self, values, field_to_index): self._xxx_values = values @@ -1102,7 +1120,7 @@ def get(self, key, default=None): def __getattr__(self, name): value = self._xxx_field_to_index.get(name) if value is None: - raise AttributeError('no row field {!r}'.format(name)) + raise AttributeError("no row field {!r}".format(name)) return self._xxx_values[value] def __len__(self): @@ -1112,26 +1130,26 @@ def __getitem__(self, key): if isinstance(key, six.string_types): value = self._xxx_field_to_index.get(key) if value is None: - raise KeyError('no row field {!r}'.format(key)) + raise KeyError("no row field {!r}".format(key)) key = value return self._xxx_values[key] def __eq__(self, other): if not isinstance(other, Row): return NotImplemented - return( + return ( self._xxx_values == other._xxx_values - and self._xxx_field_to_index == other._xxx_field_to_index) + and self._xxx_field_to_index == other._xxx_field_to_index + ) def __ne__(self, other): return not self == other def __repr__(self): # sort field dict by value, for determinism - items = sorted(self._xxx_field_to_index.items(), - key=operator.itemgetter(1)) - f2i = '{' + ', '.join('%r: %d' % item for item in items) + '}' - return 'Row({}, {})'.format(self._xxx_values, f2i) + items = sorted(self._xxx_field_to_index.items(), key=operator.itemgetter(1)) + f2i = "{" + ", ".join("%r: %d" % item for item in items) + "}" + return "Row({}, {})".format(self._xxx_values, f2i) class RowIterator(HTTPIterator): @@ -1150,13 +1168,29 @@ class RowIterator(HTTPIterator): Extra query string parameters for the API call. """ - def __init__(self, client, api_request, path, schema, page_token=None, - max_results=None, page_size=None, extra_params=None): + def __init__( + self, + client, + api_request, + path, + schema, + page_token=None, + max_results=None, + page_size=None, + extra_params=None, + ): super(RowIterator, self).__init__( - client, api_request, path, item_to_value=_item_to_row, - items_key='rows', page_token=page_token, max_results=max_results, - extra_params=extra_params, page_start=_rows_page_start, - next_token='pageToken') + client, + api_request, + path, + item_to_value=_item_to_row, + items_key="rows", + page_token=page_token, + max_results=max_results, + extra_params=extra_params, + page_start=_rows_page_start, + next_token="pageToken", + ) self._schema = schema self._field_to_index = _helpers._field_to_index_mapping(schema) self._total_rows = None @@ -1171,11 +1205,10 @@ def _get_next_page_response(self): """ params = self._get_query_params() if self._page_size is not None: - params['maxResults'] = self._page_size + params["maxResults"] = self._page_size return self.api_request( - method=self._HTTP_METHOD, - path=self.path, - query_params=params) + method=self._HTTP_METHOD, path=self.path, query_params=params + ) @property def schema(self): @@ -1217,6 +1250,7 @@ class _EmptyRowIterator(object): are impossible to fetch, such as with query results for DDL CREATE VIEW statements. """ + schema = () pages = () total_rows = 0 @@ -1233,7 +1267,7 @@ def __iter__(self): class TimePartitioningType(object): """Specifies the type of time partitioning to perform.""" - DAY = 'DAY' + DAY = "DAY" """str: Generates one partition per day.""" @@ -1258,8 +1292,10 @@ class TimePartitioning(object): partition filter that can be used for partition elimination to be specified. """ - def __init__(self, type_=None, field=None, expiration_ms=None, - require_partition_filter=None): + + def __init__( + self, type_=None, field=None, expiration_ms=None, require_partition_filter=None + ): self._properties = {} if type_ is None: self.type_ = TimePartitioningType.DAY @@ -1277,39 +1313,39 @@ def type_(self): """google.cloud.bigquery.table.TimePartitioningType: The type of time partitioning to use. """ - return self._properties['type'] + return self._properties["type"] @type_.setter def type_(self, value): - self._properties['type'] = value + self._properties["type"] = value @property def field(self): """str: Field in the table to use for partitioning""" - return self._properties.get('field') + return self._properties.get("field") @field.setter def field(self, value): - self._properties['field'] = value + self._properties["field"] = value @property def expiration_ms(self): """int: Number of milliseconds to keep the storage for a partition.""" - return _helpers._int_or_none(self._properties.get('expirationMs')) + return _helpers._int_or_none(self._properties.get("expirationMs")) @expiration_ms.setter def expiration_ms(self, value): - self._properties['expirationMs'] = str(value) + self._properties["expirationMs"] = str(value) @property def require_partition_filter(self): """bool: Specifies whether partition filters are required for queries """ - return self._properties.get('requirePartitionFilter') + return self._properties.get("requirePartitionFilter") @require_partition_filter.setter def require_partition_filter(self, value): - self._properties['requirePartitionFilter'] = value + self._properties["requirePartitionFilter"] = value @classmethod def from_api_repr(cls, api_repr): @@ -1336,7 +1372,7 @@ def from_api_repr(cls, api_repr): google.cloud.bigquery.table.TimePartitioning: The ``TimePartitioning`` object. """ - instance = cls(api_repr['type']) + instance = cls(api_repr["type"]) instance._properties = api_repr return instance @@ -1370,8 +1406,8 @@ def __hash__(self): return hash(self._key()) def __repr__(self): - key_vals = ['{}={}'.format(key, val) for key, val in self._key()] - return 'TimePartitioning({})'.format(','.join(key_vals)) + key_vals = ["{}={}".format(key, val) for key, val in self._key()] + return "TimePartitioning({})".format(",".join(key_vals)) def _item_to_row(iterator, resource): @@ -1392,8 +1428,10 @@ def _item_to_row(iterator, resource): :rtype: :class:`~google.cloud.bigquery.table.Row` :returns: The next row in the page. """ - return Row(_helpers._row_tuple_from_json(resource, iterator.schema), - iterator._field_to_index) + return Row( + _helpers._row_tuple_from_json(resource, iterator.schema), + iterator._field_to_index, + ) # pylint: disable=unused-argument @@ -1409,8 +1447,10 @@ def _rows_page_start(iterator, page, response): :type response: dict :param response: The JSON API response for a page of rows in a table. """ - total_rows = response.get('totalRows') + total_rows = response.get("totalRows") if total_rows is not None: total_rows = int(total_rows) iterator._total_rows = total_rows + + # pylint: enable=unused-argument diff --git a/bigquery/tests/scrub_datasets.py b/bigquery/tests/scrub_datasets.py index 2e8981aa62e3..9a8ab3e7b7c5 100644 --- a/bigquery/tests/scrub_datasets.py +++ b/bigquery/tests/scrub_datasets.py @@ -8,8 +8,7 @@ def main(prefixes): client = Client() - pattern = re.compile( - '|'.join('^{}.*$'.format(prefix) for prefix in prefixes)) + pattern = re.compile("|".join("^{}.*$".format(prefix) for prefix in prefixes)) ds_items = list(client.list_datasets()) for dataset in ds_items: @@ -22,5 +21,5 @@ def main(prefixes): print(" NOT FOUND") -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 29d5fb908dea..fe5e3ce3dea0 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -27,6 +27,7 @@ import six import pytest + try: import pandas except ImportError: # pragma: NO COVER @@ -64,28 +65,32 @@ # Common table data used for many tests. ROWS = [ - ('Phred Phlyntstone', 32), - ('Bharney Rhubble', 33), - ('Wylma Phlyntstone', 29), - ('Bhettye Rhubble', 27), + ("Phred Phlyntstone", 32), + ("Bharney Rhubble", 33), + ("Wylma Phlyntstone", 29), + ("Bhettye Rhubble", 27), ] -HEADER_ROW = ('Full Name', 'Age') +HEADER_ROW = ("Full Name", "Age") SCHEMA = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), ] TIME_PARTITIONING_CLUSTERING_FIELDS_SCHEMA = [ - bigquery.SchemaField('transaction_time', 'TIMESTAMP', mode='REQUIRED'), - bigquery.SchemaField('transaction_id', 'INTEGER', mode='REQUIRED'), - bigquery.SchemaField('user_email', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('store_code', 'STRING', mode='REQUIRED'), + bigquery.SchemaField("transaction_time", "TIMESTAMP", mode="REQUIRED"), + bigquery.SchemaField("transaction_id", "INTEGER", mode="REQUIRED"), + bigquery.SchemaField("user_email", "STRING", mode="REQUIRED"), + bigquery.SchemaField("store_code", "STRING", mode="REQUIRED"), bigquery.SchemaField( - 'items', 'RECORD', mode='REPEATED', fields=[ - bigquery.SchemaField('item_code', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('quantity', 'INTEGER', mode='REQUIRED'), - bigquery.SchemaField('comments', 'STRING', mode='NULLABLE'), - bigquery.SchemaField('expiration_date', 'DATE', mode='REQUIRED'), - ]), + "items", + "RECORD", + mode="REPEATED", + fields=[ + bigquery.SchemaField("item_code", "STRING", mode="REQUIRED"), + bigquery.SchemaField("quantity", "INTEGER", mode="REQUIRED"), + bigquery.SchemaField("comments", "STRING", mode="NULLABLE"), + bigquery.SchemaField("expiration_date", "DATE", mode="REQUIRED"), + ], + ), ] @@ -94,22 +99,21 @@ def _has_rows(result): def _make_dataset_id(prefix): - return '%s%s' % (prefix, unique_resource_id()) + return "%s%s" % (prefix, unique_resource_id()) -def _load_json_schema(filename='data/schema.json'): +def _load_json_schema(filename="data/schema.json"): from google.cloud.bigquery.table import _parse_schema_resource json_filename = os.path.join(WHERE, filename) - with open(json_filename, 'r') as schema_file: + with open(json_filename, "r") as schema_file: return _parse_schema_resource(json.load(schema_file)) def _rate_limit_exceeded(forbidden): """Predicate: pass only exceptions with 'rateLimitExceeded' as reason.""" - return any(error['reason'] == 'rateLimitExceeded' - for error in forbidden._errors) + return any(error["reason"] == "rateLimitExceeded" for error in forbidden._errors) # We need to wait to stay within the rate limits. @@ -125,6 +129,7 @@ class Config(object): This is a mutable stand-in to allow test set-up to modify global state. """ + CLIENT = None CURSOR = None @@ -135,15 +140,14 @@ def setUpModule(): class TestBigQuery(unittest.TestCase): - def setUp(self): self.to_delete = [] def tearDown(self): - def _still_in_use(bad_request): - return any(error['reason'] == 'resourceInUse' - for error in bad_request._errors) + return any( + error["reason"] == "resourceInUse" for error in bad_request._errors + ) retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) retry_409_429 = RetryErrors((Conflict, TooManyRequests)) @@ -151,8 +155,7 @@ def _still_in_use(bad_request): if isinstance(doomed, storage.Bucket): retry_409_429(doomed.delete)(force=True) elif isinstance(doomed, (Dataset, bigquery.DatasetReference)): - retry_in_use(Config.CLIENT.delete_dataset)( - doomed, delete_contents=True) + retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True) elif isinstance(doomed, (Table, bigquery.TableReference)): retry_in_use(Config.CLIENT.delete_table)(doomed) else: @@ -164,10 +167,10 @@ def test_get_service_account_email(self): got = client.get_service_account_email() self.assertIsInstance(got, six.text_type) - self.assertIn('@', got) + self.assertIn("@", got) def test_create_dataset(self): - DATASET_ID = _make_dataset_id('create_dataset') + DATASET_ID = _make_dataset_id("create_dataset") dataset = self.temp_dataset(DATASET_ID) self.assertTrue(_dataset_exists(dataset)) @@ -175,66 +178,66 @@ def test_create_dataset(self): self.assertEqual(dataset.project, Config.CLIENT.project) def test_get_dataset(self): - dataset_id = _make_dataset_id('get_dataset') + dataset_id = _make_dataset_id("get_dataset") client = Config.CLIENT dataset_arg = Dataset(client.dataset(dataset_id)) - dataset_arg.friendly_name = 'Friendly' - dataset_arg.description = 'Description' + dataset_arg.friendly_name = "Friendly" + dataset_arg.description = "Description" dataset = retry_403(client.create_dataset)(dataset_arg) self.to_delete.append(dataset) dataset_ref = client.dataset(dataset_id) # Get with a reference. got = client.get_dataset(dataset_ref) - self.assertEqual(got.friendly_name, 'Friendly') - self.assertEqual(got.description, 'Description') + self.assertEqual(got.friendly_name, "Friendly") + self.assertEqual(got.description, "Description") # Get with a string. got = client.get_dataset(dataset_id) - self.assertEqual(got.friendly_name, 'Friendly') - self.assertEqual(got.description, 'Description') + self.assertEqual(got.friendly_name, "Friendly") + self.assertEqual(got.description, "Description") # Get with a fully-qualified string. - got = client.get_dataset('{}.{}'.format(client.project, dataset_id)) - self.assertEqual(got.friendly_name, 'Friendly') - self.assertEqual(got.description, 'Description') + got = client.get_dataset("{}.{}".format(client.project, dataset_id)) + self.assertEqual(got.friendly_name, "Friendly") + self.assertEqual(got.description, "Description") def test_update_dataset(self): - dataset = self.temp_dataset(_make_dataset_id('update_dataset')) + dataset = self.temp_dataset(_make_dataset_id("update_dataset")) self.assertTrue(_dataset_exists(dataset)) self.assertIsNone(dataset.friendly_name) self.assertIsNone(dataset.description) self.assertEqual(dataset.labels, {}) - dataset.friendly_name = 'Friendly' - dataset.description = 'Description' - dataset.labels = {'priority': 'high', 'color': 'blue'} + dataset.friendly_name = "Friendly" + dataset.description = "Description" + dataset.labels = {"priority": "high", "color": "blue"} ds2 = Config.CLIENT.update_dataset( - dataset, - ('friendly_name', 'description', 'labels')) - self.assertEqual(ds2.friendly_name, 'Friendly') - self.assertEqual(ds2.description, 'Description') - self.assertEqual(ds2.labels, {'priority': 'high', 'color': 'blue'}) + dataset, ("friendly_name", "description", "labels") + ) + self.assertEqual(ds2.friendly_name, "Friendly") + self.assertEqual(ds2.description, "Description") + self.assertEqual(ds2.labels, {"priority": "high", "color": "blue"}) ds2.labels = { - 'color': 'green', # change - 'shape': 'circle', # add - 'priority': None, # delete + "color": "green", # change + "shape": "circle", # add + "priority": None, # delete } - ds3 = Config.CLIENT.update_dataset(ds2, ['labels']) - self.assertEqual(ds3.labels, {'color': 'green', 'shape': 'circle'}) + ds3 = Config.CLIENT.update_dataset(ds2, ["labels"]) + self.assertEqual(ds3.labels, {"color": "green", "shape": "circle"}) # If we try to update using d2 again, it will fail because the # previous update changed the ETag. - ds2.description = 'no good' + ds2.description = "no good" with self.assertRaises(PreconditionFailed): - Config.CLIENT.update_dataset(ds2, ['description']) + Config.CLIENT.update_dataset(ds2, ["description"]) def test_list_datasets(self): datasets_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), + "new" + unique_resource_id(), + "newer" + unique_resource_id(), + "newest" + unique_resource_id(), ] for dataset_id in datasets_to_create: self.temp_dataset(dataset_id) @@ -243,20 +246,23 @@ def test_list_datasets(self): iterator = Config.CLIENT.list_datasets() all_datasets = list(iterator) self.assertIsNone(iterator.next_page_token) - created = [dataset for dataset in all_datasets - if dataset.dataset_id in datasets_to_create - and dataset.project == Config.CLIENT.project] + created = [ + dataset + for dataset in all_datasets + if dataset.dataset_id in datasets_to_create + and dataset.project == Config.CLIENT.project + ] self.assertEqual(len(created), len(datasets_to_create)) def test_list_datasets_w_project(self): # Retrieve datasets from a different project. - iterator = Config.CLIENT.list_datasets(project='bigquery-public-data') + iterator = Config.CLIENT.list_datasets(project="bigquery-public-data") all_datasets = frozenset([dataset.dataset_id for dataset in iterator]) - self.assertIn('usa_names', all_datasets) + self.assertIn("usa_names", all_datasets) def test_create_table(self): - dataset = self.temp_dataset(_make_dataset_id('create_table')) - table_id = 'test_table' + dataset = self.temp_dataset(_make_dataset_id("create_table")) + table_id = "test_table" table_arg = Table(dataset.table(table_id), schema=SCHEMA) self.assertFalse(_table_exists(table_arg)) @@ -270,17 +276,16 @@ def test_create_table_w_time_partitioning_w_clustering_fields(self): from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery.table import TimePartitioningType - dataset = self.temp_dataset(_make_dataset_id('create_table_tp_cf')) - table_id = 'test_table' + dataset = self.temp_dataset(_make_dataset_id("create_table_tp_cf")) + table_id = "test_table" table_arg = Table( - dataset.table(table_id), - schema=TIME_PARTITIONING_CLUSTERING_FIELDS_SCHEMA) + dataset.table(table_id), schema=TIME_PARTITIONING_CLUSTERING_FIELDS_SCHEMA + ) self.assertFalse(_table_exists(table_arg)) - table_arg.time_partitioning = TimePartitioning( - field='transaction_time') + table_arg.time_partitioning = TimePartitioning(field="transaction_time") - table_arg.clustering_fields = ['user_email', 'store_code'] + table_arg.clustering_fields = ["user_email", "store_code"] table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) @@ -288,11 +293,11 @@ def test_create_table_w_time_partitioning_w_clustering_fields(self): self.assertEqual(table.table_id, table_id) time_partitioning = table.time_partitioning self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) - self.assertEqual(time_partitioning.field, 'transaction_time') - self.assertEqual(table.clustering_fields, ['user_email', 'store_code']) + self.assertEqual(time_partitioning.field, "transaction_time") + self.assertEqual(table.clustering_fields, ["user_email", "store_code"]) def test_delete_dataset_with_string(self): - dataset_id = _make_dataset_id('delete_table_true') + dataset_id = _make_dataset_id("delete_table_true") dataset_ref = Config.CLIENT.dataset(dataset_id) retry_403(Config.CLIENT.create_dataset)(Dataset(dataset_ref)) self.assertTrue(_dataset_exists(dataset_ref)) @@ -300,11 +305,12 @@ def test_delete_dataset_with_string(self): self.assertFalse(_dataset_exists(dataset_ref)) def test_delete_dataset_delete_contents_true(self): - dataset_id = _make_dataset_id('delete_table_true') + dataset_id = _make_dataset_id("delete_table_true") dataset = retry_403(Config.CLIENT.create_dataset)( - Dataset(Config.CLIENT.dataset(dataset_id))) + Dataset(Config.CLIENT.dataset(dataset_id)) + ) - table_id = 'test_table' + table_id = "test_table" table_arg = Table(dataset.table(table_id), schema=SCHEMA) table = retry_403(Config.CLIENT.create_table)(table_arg) Config.CLIENT.delete_dataset(dataset, delete_contents=True) @@ -313,8 +319,9 @@ def test_delete_dataset_delete_contents_true(self): def test_delete_dataset_delete_contents_false(self): from google.api_core import exceptions - dataset = self.temp_dataset(_make_dataset_id('delete_table_false')) - table_id = 'test_table' + + dataset = self.temp_dataset(_make_dataset_id("delete_table_false")) + table_id = "test_table" table_arg = Table(dataset.table(table_id), schema=SCHEMA) retry_403(Config.CLIENT.create_table)(table_arg) @@ -322,9 +329,9 @@ def test_delete_dataset_delete_contents_false(self): Config.CLIENT.delete_dataset(dataset) def test_get_table_w_public_dataset(self): - public = 'bigquery-public-data' - dataset_id = 'samples' - table_id = 'shakespeare' + public = "bigquery-public-data" + dataset_id = "samples" + table_id = "shakespeare" table_ref = DatasetReference(public, dataset_id).table(table_id) # Get table with reference. @@ -333,26 +340,24 @@ def test_get_table_w_public_dataset(self): self.assertEqual(table.dataset_id, dataset_id) self.assertEqual(table.project, public) schema_names = [field.name for field in table.schema] - self.assertEqual( - schema_names, ['word', 'word_count', 'corpus', 'corpus_date']) + self.assertEqual(schema_names, ["word", "word_count", "corpus", "corpus_date"]) # Get table with string. - table = Config.CLIENT.get_table( - '{}.{}.{}'.format(public, dataset_id, table_id)) + table = Config.CLIENT.get_table("{}.{}.{}".format(public, dataset_id, table_id)) self.assertEqual(table.table_id, table_id) self.assertEqual(table.dataset_id, dataset_id) self.assertEqual(table.project, public) def test_list_partitions(self): table_ref = DatasetReference( - 'bigquery-public-data', - 'ethereum_blockchain').table('blocks') + "bigquery-public-data", "ethereum_blockchain" + ).table("blocks") all_rows = Config.CLIENT.list_partitions(table_ref) - self.assertIn('20180801', all_rows) + self.assertIn("20180801", all_rows) self.assertGreater(len(all_rows), 1000) def test_list_tables(self): - dataset_id = _make_dataset_id('list_tables') + dataset_id = _make_dataset_id("list_tables") dataset = self.temp_dataset(dataset_id) # Retrieve tables before any are created for the dataset. iterator = Config.CLIENT.list_tables(dataset) @@ -362,9 +367,9 @@ def test_list_tables(self): # Insert some tables to be listed. tables_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), + "new" + unique_resource_id(), + "newer" + unique_resource_id(), + "newest" + unique_resource_id(), ] for table_name in tables_to_create: table = Table(dataset.table(table_name), schema=SCHEMA) @@ -375,9 +380,11 @@ def test_list_tables(self): iterator = Config.CLIENT.list_tables(dataset) all_tables = list(iterator) self.assertIsNone(iterator.next_page_token) - created = [table for table in all_tables - if (table.table_id in tables_to_create - and table.dataset_id == dataset_id)] + created = [ + table + for table in all_tables + if (table.table_id in tables_to_create and table.dataset_id == dataset_id) + ] self.assertEqual(len(created), len(tables_to_create)) # List tables with a string ID. @@ -386,13 +393,14 @@ def test_list_tables(self): # List tables with a fully-qualified string ID. iterator = Config.CLIENT.list_tables( - '{}.{}'.format(Config.CLIENT.project, dataset_id)) + "{}.{}".format(Config.CLIENT.project, dataset_id) + ) self.assertGreater(len(list(iterator)), 0) def test_update_table(self): - dataset = self.temp_dataset(_make_dataset_id('update_table')) + dataset = self.temp_dataset(_make_dataset_id("update_table")) - TABLE_NAME = 'test_table' + TABLE_NAME = "test_table" table_arg = Table(dataset.table(TABLE_NAME), schema=SCHEMA) self.assertFalse(_table_exists(table_arg)) table = retry_403(Config.CLIENT.create_table)(table_arg) @@ -401,48 +409,49 @@ def test_update_table(self): self.assertIsNone(table.friendly_name) self.assertIsNone(table.description) self.assertEqual(table.labels, {}) - table.friendly_name = 'Friendly' - table.description = 'Description' - table.labels = {'priority': 'high', 'color': 'blue'} + table.friendly_name = "Friendly" + table.description = "Description" + table.labels = {"priority": "high", "color": "blue"} table2 = Config.CLIENT.update_table( - table, ['friendly_name', 'description', 'labels']) + table, ["friendly_name", "description", "labels"] + ) - self.assertEqual(table2.friendly_name, 'Friendly') - self.assertEqual(table2.description, 'Description') - self.assertEqual(table2.labels, {'priority': 'high', 'color': 'blue'}) + self.assertEqual(table2.friendly_name, "Friendly") + self.assertEqual(table2.description, "Description") + self.assertEqual(table2.labels, {"priority": "high", "color": "blue"}) table2.description = None table2.labels = { - 'color': 'green', # change - 'shape': 'circle', # add - 'priority': None, # delete + "color": "green", # change + "shape": "circle", # add + "priority": None, # delete } - table3 = Config.CLIENT.update_table(table2, ['description', 'labels']) + table3 = Config.CLIENT.update_table(table2, ["description", "labels"]) self.assertIsNone(table3.description) - self.assertEqual(table3.labels, {'color': 'green', 'shape': 'circle'}) + self.assertEqual(table3.labels, {"color": "green", "shape": "circle"}) # If we try to update using table2 again, it will fail because the # previous update changed the ETag. - table2.description = 'no good' + table2.description = "no good" with self.assertRaises(PreconditionFailed): - Config.CLIENT.update_table(table2, ['description']) + Config.CLIENT.update_table(table2, ["description"]) def test_update_table_schema(self): - dataset = self.temp_dataset(_make_dataset_id('update_table')) + dataset = self.temp_dataset(_make_dataset_id("update_table")) - TABLE_NAME = 'test_table' + TABLE_NAME = "test_table" table_arg = Table(dataset.table(TABLE_NAME), schema=SCHEMA) self.assertFalse(_table_exists(table_arg)) table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) self.assertTrue(_table_exists(table)) - voter = bigquery.SchemaField('voter', 'BOOLEAN', mode='NULLABLE') + voter = bigquery.SchemaField("voter", "BOOLEAN", mode="NULLABLE") schema = table.schema schema.append(voter) table.schema = schema - updated_table = Config.CLIENT.update_table(table, ['schema']) + updated_table = Config.CLIENT.update_table(table, ["schema"]) self.assertEqual(len(updated_table.schema), len(schema)) for found, expected in zip(updated_table.schema, schema): @@ -452,8 +461,7 @@ def test_update_table_schema(self): @staticmethod def _fetch_single_page(table, selected_fields=None): - iterator = Config.CLIENT.list_rows( - table, selected_fields=selected_fields) + iterator = Config.CLIENT.list_rows(table, selected_fields=selected_fields) page = six.next(iterator.pages) return list(page) @@ -462,13 +470,14 @@ def _create_table_many_columns(self, rowcount): # first column is named 'rowval', and has a value from 1..rowcount # Subsequent column is named col_ and contains the value N*rowval, # where N is between 1 and 9999 inclusive. - dsname = _make_dataset_id('wide_schema') + dsname = _make_dataset_id("wide_schema") dataset = self.temp_dataset(dsname) - table_id = 'many_columns' + table_id = "many_columns" table_ref = dataset.table(table_id) self.to_delete.insert(0, table_ref) - colprojections = ','.join( - ['r * {} as col_{}'.format(n, n) for n in range(1, 10000)]) + colprojections = ",".join( + ["r * {} as col_{}".format(n, n) for n in range(1, 10000)] + ) sql = """ CREATE TABLE {}.{} AS @@ -477,11 +486,13 @@ def _create_table_many_columns(self, rowcount): {} FROM UNNEST(GENERATE_ARRAY(1,{},1)) as r - """.format(dsname, table_id, colprojections, rowcount) + """.format( + dsname, table_id, colprojections, rowcount + ) query_job = Config.CLIENT.query(sql) query_job.result() - self.assertEqual(query_job.statement_type, 'CREATE_TABLE_AS_SELECT') - self.assertEqual(query_job.ddl_operation_performed, 'CREATE') + self.assertEqual(query_job.statement_type, "CREATE_TABLE_AS_SELECT") + self.assertEqual(query_job.ddl_operation_performed, "CREATE") self.assertEqual(query_job.ddl_target_table, table_ref) return table_ref @@ -490,9 +501,11 @@ def test_query_many_columns(self): # Test working with the widest schema BigQuery supports, 10k columns. row_count = 2 table_ref = self._create_table_many_columns(row_count) - rows = list(Config.CLIENT.query( - 'SELECT * FROM `{}.{}`'.format( - table_ref.dataset_id, table_ref.table_id))) + rows = list( + Config.CLIENT.query( + "SELECT * FROM `{}.{}`".format(table_ref.dataset_id, table_ref.table_id) + ) + ) self.assertEqual(len(rows), row_count) @@ -512,22 +525,21 @@ def test_query_many_columns(self): def test_insert_rows_then_dump_table(self): NOW_SECONDS = 1448911495.484366 - NOW = datetime.datetime.utcfromtimestamp( - NOW_SECONDS).replace(tzinfo=UTC) + NOW = datetime.datetime.utcfromtimestamp(NOW_SECONDS).replace(tzinfo=UTC) ROWS = [ - ('Phred Phlyntstone', 32, NOW), - ('Bharney Rhubble', 33, NOW + datetime.timedelta(seconds=10)), - ('Wylma Phlyntstone', 29, NOW + datetime.timedelta(seconds=20)), - ('Bhettye Rhubble', 27, None), + ("Phred Phlyntstone", 32, NOW), + ("Bharney Rhubble", 33, NOW + datetime.timedelta(seconds=10)), + ("Wylma Phlyntstone", 29, NOW + datetime.timedelta(seconds=20)), + ("Bhettye Rhubble", 27, None), ] ROW_IDS = range(len(ROWS)) - dataset = self.temp_dataset(_make_dataset_id('insert_rows_then_dump')) - TABLE_ID = 'test_table' + dataset = self.temp_dataset(_make_dataset_id("insert_rows_then_dump")) + TABLE_ID = "test_table" schema = [ - bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), - bigquery.SchemaField('now', 'TIMESTAMP'), + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + bigquery.SchemaField("now", "TIMESTAMP"), ] table_arg = Table(dataset.table(TABLE_ID), schema=schema) self.assertFalse(_table_exists(table_arg)) @@ -547,13 +559,13 @@ def test_insert_rows_then_dump_table(self): rows = retry(self._fetch_single_page)(table) row_tuples = [r.values() for r in rows] by_age = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_age), - sorted(ROWS, key=by_age)) + self.assertEqual(sorted(row_tuples, key=by_age), sorted(ROWS, key=by_age)) def test_load_table_from_local_avro_file_then_dump_table(self): from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition - TABLE_NAME = 'test_table_avro' + + TABLE_NAME = "test_table_avro" ROWS = [ ("violet", 400), ("indigo", 445), @@ -561,19 +573,21 @@ def test_load_table_from_local_avro_file_then_dump_table(self): ("green", 510), ("yellow", 570), ("orange", 590), - ("red", 650)] + ("red", 650), + ] - dataset = self.temp_dataset(_make_dataset_id('load_local_then_dump')) + dataset = self.temp_dataset(_make_dataset_id("load_local_then_dump")) table_ref = dataset.table(TABLE_NAME) table = Table(table_ref) self.to_delete.insert(0, table) - with open(os.path.join(WHERE, 'data', 'colors.avro'), 'rb') as avrof: + with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as avrof: config = bigquery.LoadJobConfig() config.source_format = SourceFormat.AVRO config.write_disposition = WriteDisposition.WRITE_TRUNCATE job = Config.CLIENT.load_table_from_file( - avrof, table_ref, job_config=config) + avrof, table_ref, job_config=config + ) # Retry until done. job.result(timeout=JOB_TIMEOUT) @@ -583,14 +597,16 @@ def test_load_table_from_local_avro_file_then_dump_table(self): rows = self._fetch_single_page(table) row_tuples = [r.values() for r in rows] by_wavelength = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_wavelength), - sorted(ROWS, key=by_wavelength)) + self.assertEqual( + sorted(row_tuples, key=by_wavelength), sorted(ROWS, key=by_wavelength) + ) def test_load_avro_from_uri_then_dump_table(self): from google.cloud.bigquery.job import CreateDisposition from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition - table_name = 'test_table' + + table_name = "test_table" rows = [ ("violet", 400), ("indigo", 445), @@ -598,13 +614,14 @@ def test_load_avro_from_uri_then_dump_table(self): ("green", 510), ("yellow", 570), ("orange", 590), - ("red", 650) + ("red", 650), ] - with open(os.path.join(WHERE, 'data', 'colors.avro'), 'rb') as f: + with open(os.path.join(WHERE, "data", "colors.avro"), "rb") as f: GS_URL = self._write_avro_to_storage( - 'bq_load_test' + unique_resource_id(), 'colors.avro', f) + "bq_load_test" + unique_resource_id(), "colors.avro", f + ) - dataset = self.temp_dataset(_make_dataset_id('bq_load_test')) + dataset = self.temp_dataset(_make_dataset_id("bq_load_test")) table_arg = dataset.table(table_name) table = retry_403(Config.CLIENT.create_table)(Table(table_arg)) self.to_delete.insert(0, table) @@ -613,28 +630,28 @@ def test_load_avro_from_uri_then_dump_table(self): config.create_disposition = CreateDisposition.CREATE_NEVER config.source_format = SourceFormat.AVRO config.write_disposition = WriteDisposition.WRITE_EMPTY - job = Config.CLIENT.load_table_from_uri( - GS_URL, table_arg, job_config=config) + job = Config.CLIENT.load_table_from_uri(GS_URL, table_arg, job_config=config) job.result(timeout=JOB_TIMEOUT) self.assertEqual(job.output_rows, len(rows)) table = Config.CLIENT.get_table(table) fetched = self._fetch_single_page(table) row_tuples = [r.values() for r in fetched] - self.assertEqual(sorted(row_tuples, key=lambda x: x[1]), - sorted(rows, key=lambda x: x[1])) + self.assertEqual( + sorted(row_tuples, key=lambda x: x[1]), sorted(rows, key=lambda x: x[1]) + ) def test_load_table_from_uri_then_dump_table(self): from google.cloud.bigquery.job import CreateDisposition from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition - TABLE_ID = 'test_table' + TABLE_ID = "test_table" GS_URL = self._write_csv_to_storage( - 'bq_load_test' + unique_resource_id(), 'person_ages.csv', - HEADER_ROW, ROWS) + "bq_load_test" + unique_resource_id(), "person_ages.csv", HEADER_ROW, ROWS + ) - dataset = self.temp_dataset(_make_dataset_id('load_gcs_then_dump')) + dataset = self.temp_dataset(_make_dataset_id("load_gcs_then_dump")) table_arg = Table(dataset.table(TABLE_ID), schema=SCHEMA) table = retry_403(Config.CLIENT.create_table)(table_arg) @@ -646,7 +663,8 @@ def test_load_table_from_uri_then_dump_table(self): config.source_format = SourceFormat.CSV config.write_disposition = WriteDisposition.WRITE_EMPTY job = Config.CLIENT.load_table_from_uri( - GS_URL, dataset.table(TABLE_ID), job_config=config) + GS_URL, dataset.table(TABLE_ID), job_config=config + ) # Allow for 90 seconds of "warm up" before rows visible. See # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability @@ -657,62 +675,61 @@ def test_load_table_from_uri_then_dump_table(self): rows = self._fetch_single_page(table) row_tuples = [r.values() for r in rows] by_age = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_age), - sorted(ROWS, key=by_age)) + self.assertEqual(sorted(row_tuples, key=by_age), sorted(ROWS, key=by_age)) def test_load_table_from_file_w_explicit_location(self): # Create a temporary bucket for extract files. storage_client = storage.Client() - bucket_name = 'bq_load_table_eu_extract_test' + unique_resource_id() + bucket_name = "bq_load_table_eu_extract_test" + unique_resource_id() bucket = storage_client.bucket(bucket_name) - bucket.location = 'eu' + bucket.location = "eu" self.to_delete.append(bucket) bucket.create() # Create a temporary dataset & table in the EU. - table_bytes = six.BytesIO(b'a,3\nb,2\nc,1\n') + table_bytes = six.BytesIO(b"a,3\nb,2\nc,1\n") client = Config.CLIENT - dataset = self.temp_dataset( - _make_dataset_id('eu_load_file'), location='EU') - table_ref = dataset.table('letters') + dataset = self.temp_dataset(_make_dataset_id("eu_load_file"), location="EU") + table_ref = dataset.table("letters") job_config = bigquery.LoadJobConfig() job_config.skip_leading_rows = 0 job_config.schema = [ - bigquery.SchemaField('letter', 'STRING'), - bigquery.SchemaField('value', 'INTEGER'), + bigquery.SchemaField("letter", "STRING"), + bigquery.SchemaField("value", "INTEGER"), ] # Load the file to an EU dataset with an EU load job. load_job = client.load_table_from_file( - table_bytes, table_ref, location='EU', job_config=job_config) + table_bytes, table_ref, location="EU", job_config=job_config + ) load_job.result() job_id = load_job.job_id # Can get the job from the EU. - load_job = client.get_job(job_id, location='EU') + load_job = client.get_job(job_id, location="EU") self.assertEqual(job_id, load_job.job_id) - self.assertEqual('EU', load_job.location) + self.assertEqual("EU", load_job.location) self.assertTrue(load_job.exists()) # Cannot get the job from the US. with self.assertRaises(NotFound): - client.get_job(job_id, location='US') + client.get_job(job_id, location="US") load_job_us = client.get_job(job_id) - load_job_us._properties['jobReference']['location'] = 'US' + load_job_us._properties["jobReference"]["location"] = "US" self.assertFalse(load_job_us.exists()) with self.assertRaises(NotFound): load_job_us.reload() # Can cancel the job from the EU. self.assertTrue(load_job.cancel()) - load_job = client.cancel_job(job_id, location='EU') + load_job = client.cancel_job(job_id, location="EU") self.assertEqual(job_id, load_job.job_id) - self.assertEqual('EU', load_job.location) + self.assertEqual("EU", load_job.location) # Cannot cancel the job from the US. with self.assertRaises(NotFound): - client.cancel_job(job_id, location='US') + client.cancel_job(job_id, location="US") with self.assertRaises(NotFound): load_job_us.cancel() @@ -720,41 +737,32 @@ def test_load_table_from_file_w_explicit_location(self): table = client.get_table(table_ref) self.assertEqual(table.num_rows, 3) rows = [(row.letter, row.value) for row in client.list_rows(table)] - self.assertEqual( - list(sorted(rows)), [('a', 3), ('b', 2), ('c', 1)]) + self.assertEqual(list(sorted(rows)), [("a", 3), ("b", 2), ("c", 1)]) # Verify location behavior with queries query_config = bigquery.QueryJobConfig() query_config.dry_run = True - query_string = 'SELECT * FROM `{}.letters` LIMIT 1'.format( - dataset.dataset_id) + query_string = "SELECT * FROM `{}.letters` LIMIT 1".format(dataset.dataset_id) - eu_query = client.query( - query_string, - location='EU', - job_config=query_config) + eu_query = client.query(query_string, location="EU", job_config=query_config) self.assertTrue(eu_query.done) # Cannot query from US. with self.assertRaises(BadRequest): - list(client.query( - query_string, - location='US', - job_config=query_config)) + list(client.query(query_string, location="US", job_config=query_config)) # Cannot copy from US. with self.assertRaises(BadRequest): client.copy_table( - table_ref, dataset.table('letters2_us'), - location='US').result() + table_ref, dataset.table("letters2_us"), location="US" + ).result() # Cannot extract from US. with self.assertRaises(BadRequest): client.extract_table( - table_ref, - 'gs://{}/letters-us.csv'.format(bucket_name), - location='US').result() + table_ref, "gs://{}/letters-us.csv".format(bucket_name), location="US" + ).result() def _create_storage(self, bucket_name, blob_name): storage_client = storage.Client() @@ -766,35 +774,34 @@ def _create_storage(self, bucket_name, blob_name): return bucket.blob(blob_name) - def _write_csv_to_storage(self, bucket_name, blob_name, header_row, - data_rows): + def _write_csv_to_storage(self, bucket_name, blob_name, header_row, data_rows): from google.cloud._testing import _NamedTemporaryFile blob = self._create_storage(bucket_name, blob_name) with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as csv_write: + with open(temp.name, "w") as csv_write: writer = csv.writer(csv_write) writer.writerow(header_row) writer.writerows(data_rows) - with open(temp.name, 'rb') as csv_read: - blob.upload_from_file(csv_read, content_type='text/csv') + with open(temp.name, "rb") as csv_read: + blob.upload_from_file(csv_read, content_type="text/csv") self.to_delete.insert(0, blob) - return 'gs://{}/{}'.format(bucket_name, blob_name) + return "gs://{}/{}".format(bucket_name, blob_name) def _write_avro_to_storage(self, bucket_name, blob_name, avro_file): blob = self._create_storage(bucket_name, blob_name) - blob.upload_from_file(avro_file, - content_type='application/x-avro-binary') + blob.upload_from_file(avro_file, content_type="application/x-avro-binary") self.to_delete.insert(0, blob) - return 'gs://{}/{}'.format(bucket_name, blob_name) + return "gs://{}/{}".format(bucket_name, blob_name) def _load_table_for_extract_table( - self, storage_client, rows, bucket_name, blob_name, table): + self, storage_client, rows, bucket_name, blob_name, table + ): from google.cloud._testing import _NamedTemporaryFile - gs_url = 'gs://{}/{}'.format(bucket_name, blob_name) + gs_url = "gs://{}/{}".format(bucket_name, blob_name) # In the **very** rare case the bucket name is reserved, this # fails with a ConnectionError. @@ -803,21 +810,20 @@ def _load_table_for_extract_table( blob = bucket.blob(blob_name) with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as csv_write: + with open(temp.name, "w") as csv_write: writer = csv.writer(csv_write) writer.writerow(HEADER_ROW) writer.writerows(rows) - with open(temp.name, 'rb') as csv_read: - blob.upload_from_file(csv_read, content_type='text/csv') + with open(temp.name, "rb") as csv_read: + blob.upload_from_file(csv_read, content_type="text/csv") self.to_delete.insert(0, blob) dataset = self.temp_dataset(table.dataset_id) table_ref = dataset.table(table.table_id) config = bigquery.LoadJobConfig() config.autodetect = True - job = Config.CLIENT.load_table_from_uri(gs_url, table_ref, - job_config=config) + job = Config.CLIENT.load_table_from_uri(gs_url, table_ref, job_config=config) # TODO(jba): do we need this retry now that we have job.result()? # Allow for 90 seconds of "warm up" before rows visible. See # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability @@ -830,26 +836,27 @@ def test_extract_table(self): storage_client = StorageClient() local_id = unique_resource_id() - bucket_name = 'bq_extract_test' + local_id - blob_name = 'person_ages.csv' - dataset_id = _make_dataset_id('load_gcs_then_extract') - table_id = 'test_table' + bucket_name = "bq_extract_test" + local_id + blob_name = "person_ages.csv" + dataset_id = _make_dataset_id("load_gcs_then_extract") + table_id = "test_table" table_ref = Config.CLIENT.dataset(dataset_id).table(table_id) table = Table(table_ref) self.to_delete.insert(0, table) self._load_table_for_extract_table( - storage_client, ROWS, bucket_name, blob_name, table_ref) + storage_client, ROWS, bucket_name, blob_name, table_ref + ) bucket = storage_client.bucket(bucket_name) - destination_blob_name = 'person_ages_out.csv' + destination_blob_name = "person_ages_out.csv" destination = bucket.blob(destination_blob_name) - destination_uri = 'gs://{}/person_ages_out.csv'.format(bucket_name) + destination_uri = "gs://{}/person_ages_out.csv".format(bucket_name) job = Config.CLIENT.extract_table(table_ref, destination_uri) job.result(timeout=100) self.to_delete.insert(0, destination) - got = destination.download_as_string().decode('utf-8') - self.assertIn('Bharney Rhubble', got) + got = destination.download_as_string().decode("utf-8") + self.assertIn("Bharney Rhubble", got) def test_copy_table(self): # If we create a new table to copy from, the test won't work @@ -857,13 +864,12 @@ def test_copy_table(self): # and copy jobs don't read the streaming buffer. # We could wait for the streaming buffer to empty, but that could # take minutes. Instead we copy a small public table. - source_dataset = DatasetReference('bigquery-public-data', 'samples') - source_ref = source_dataset.table('shakespeare') - dest_dataset = self.temp_dataset(_make_dataset_id('copy_table')) - dest_ref = dest_dataset.table('destination_table') + source_dataset = DatasetReference("bigquery-public-data", "samples") + source_ref = source_dataset.table("shakespeare") + dest_dataset = self.temp_dataset(_make_dataset_id("copy_table")) + dest_ref = dest_dataset.table("destination_table") job_config = bigquery.CopyJobConfig() - job = Config.CLIENT.copy_table( - source_ref, dest_ref, job_config=job_config) + job = Config.CLIENT.copy_table(source_ref, dest_ref, job_config=job_config) job.result() dest_table = Config.CLIENT.get_table(dest_ref) @@ -873,10 +879,10 @@ def test_copy_table(self): self.assertTrue(len(got_rows) > 0) def test_job_cancel(self): - DATASET_ID = _make_dataset_id('job_cancel') - JOB_ID_PREFIX = 'fetch_' + DATASET_ID - TABLE_NAME = 'test_table' - QUERY = 'SELECT * FROM %s.%s' % (DATASET_ID, TABLE_NAME) + DATASET_ID = _make_dataset_id("job_cancel") + JOB_ID_PREFIX = "fetch_" + DATASET_ID + TABLE_NAME = "test_table" + QUERY = "SELECT * FROM %s.%s" % (DATASET_ID, TABLE_NAME) dataset = self.temp_dataset(DATASET_ID) @@ -900,17 +906,15 @@ def test_get_failed_job(self): # issue 4246 from google.api_core.exceptions import BadRequest - JOB_ID = 'invalid_{}'.format(str(uuid.uuid4())) - QUERY = 'SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);' - PARAM = bigquery.ScalarQueryParameter( - 'ts_value', 'TIMESTAMP', 1.4810976E9) + JOB_ID = "invalid_{}".format(str(uuid.uuid4())) + QUERY = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);" + PARAM = bigquery.ScalarQueryParameter("ts_value", "TIMESTAMP", 1.4810976e9) job_config = bigquery.QueryJobConfig() job_config.query_parameters = [PARAM] with self.assertRaises(BadRequest): - Config.CLIENT.query( - QUERY, job_id=JOB_ID, job_config=job_config).result() + Config.CLIENT.query(QUERY, job_id=JOB_ID, job_config=job_config).result() job = Config.CLIENT.get_job(JOB_ID) @@ -919,176 +923,117 @@ def test_get_failed_job(self): def test_query_w_legacy_sql_types(self): naive = datetime.datetime(2016, 12, 5, 12, 41, 9) - stamp = '%s %s' % (naive.date().isoformat(), naive.time().isoformat()) + stamp = "%s %s" % (naive.date().isoformat(), naive.time().isoformat()) zoned = naive.replace(tzinfo=UTC) examples = [ - { - 'sql': 'SELECT 1', - 'expected': 1, - }, - { - 'sql': 'SELECT 1.3', - 'expected': 1.3, - }, - { - 'sql': 'SELECT TRUE', - 'expected': True, - }, - { - 'sql': 'SELECT "ABC"', - 'expected': 'ABC', - }, - { - 'sql': 'SELECT CAST("foo" AS BYTES)', - 'expected': b'foo', - }, - { - 'sql': 'SELECT CAST("%s" AS TIMESTAMP)' % (stamp,), - 'expected': zoned, - }, + {"sql": "SELECT 1", "expected": 1}, + {"sql": "SELECT 1.3", "expected": 1.3}, + {"sql": "SELECT TRUE", "expected": True}, + {"sql": 'SELECT "ABC"', "expected": "ABC"}, + {"sql": 'SELECT CAST("foo" AS BYTES)', "expected": b"foo"}, + {"sql": 'SELECT CAST("%s" AS TIMESTAMP)' % (stamp,), "expected": zoned}, ] for example in examples: job_config = bigquery.QueryJobConfig() job_config.use_legacy_sql = True - rows = list(Config.CLIENT.query( - example['sql'], job_config=job_config)) + rows = list(Config.CLIENT.query(example["sql"], job_config=job_config)) self.assertEqual(len(rows), 1) self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example['expected']) + self.assertEqual(rows[0][0], example["expected"]) def _generate_standard_sql_types_examples(self): naive = datetime.datetime(2016, 12, 5, 12, 41, 9) naive_microseconds = datetime.datetime(2016, 12, 5, 12, 41, 9, 250000) - stamp = '%s %s' % (naive.date().isoformat(), naive.time().isoformat()) - stamp_microseconds = stamp + '.250000' + stamp = "%s %s" % (naive.date().isoformat(), naive.time().isoformat()) + stamp_microseconds = stamp + ".250000" zoned = naive.replace(tzinfo=UTC) zoned_microseconds = naive_microseconds.replace(tzinfo=UTC) - numeric = decimal.Decimal('123456789.123456789') + numeric = decimal.Decimal("123456789.123456789") return [ + {"sql": "SELECT 1", "expected": 1}, + {"sql": "SELECT 1.3", "expected": 1.3}, + {"sql": "SELECT TRUE", "expected": True}, + {"sql": 'SELECT "ABC"', "expected": "ABC"}, + {"sql": 'SELECT CAST("foo" AS BYTES)', "expected": b"foo"}, + {"sql": 'SELECT TIMESTAMP "%s"' % (stamp,), "expected": zoned}, { - 'sql': 'SELECT 1', - 'expected': 1, - }, - { - 'sql': 'SELECT 1.3', - 'expected': 1.3, - }, - { - 'sql': 'SELECT TRUE', - 'expected': True, - }, - { - 'sql': 'SELECT "ABC"', - 'expected': 'ABC', + "sql": 'SELECT TIMESTAMP "%s"' % (stamp_microseconds,), + "expected": zoned_microseconds, }, + {"sql": 'SELECT DATETIME(TIMESTAMP "%s")' % (stamp,), "expected": naive}, { - 'sql': 'SELECT CAST("foo" AS BYTES)', - 'expected': b'foo', + "sql": 'SELECT DATETIME(TIMESTAMP "%s")' % (stamp_microseconds,), + "expected": naive_microseconds, }, + {"sql": 'SELECT DATE(TIMESTAMP "%s")' % (stamp,), "expected": naive.date()}, + {"sql": 'SELECT TIME(TIMESTAMP "%s")' % (stamp,), "expected": naive.time()}, + {"sql": 'SELECT NUMERIC "%s"' % (numeric,), "expected": numeric}, + {"sql": "SELECT (1, 2)", "expected": {"_field_1": 1, "_field_2": 2}}, { - 'sql': 'SELECT TIMESTAMP "%s"' % (stamp,), - 'expected': zoned, - }, - { - 'sql': 'SELECT TIMESTAMP "%s"' % (stamp_microseconds,), - 'expected': zoned_microseconds, - }, - { - 'sql': 'SELECT DATETIME(TIMESTAMP "%s")' % (stamp,), - 'expected': naive, - }, - { - 'sql': 'SELECT DATETIME(TIMESTAMP "%s")' % ( - stamp_microseconds,), - 'expected': naive_microseconds, - }, - { - 'sql': 'SELECT DATE(TIMESTAMP "%s")' % (stamp,), - 'expected': naive.date(), - }, - { - 'sql': 'SELECT TIME(TIMESTAMP "%s")' % (stamp,), - 'expected': naive.time(), - }, - { - 'sql': 'SELECT NUMERIC "%s"' % (numeric,), - 'expected': numeric, - }, - { - 'sql': 'SELECT (1, 2)', - 'expected': {'_field_1': 1, '_field_2': 2}, - }, - { - 'sql': 'SELECT ((1, 2), (3, 4), 5)', - 'expected': { - '_field_1': {'_field_1': 1, '_field_2': 2}, - '_field_2': {'_field_1': 3, '_field_2': 4}, - '_field_3': 5, + "sql": "SELECT ((1, 2), (3, 4), 5)", + "expected": { + "_field_1": {"_field_1": 1, "_field_2": 2}, + "_field_2": {"_field_1": 3, "_field_2": 4}, + "_field_3": 5, }, }, + {"sql": "SELECT [1, 2, 3]", "expected": [1, 2, 3]}, { - 'sql': 'SELECT [1, 2, 3]', - 'expected': [1, 2, 3], - }, - { - 'sql': 'SELECT ([1, 2], 3, [4, 5])', - 'expected': - {'_field_1': [1, 2], '_field_2': 3, '_field_3': [4, 5]}, + "sql": "SELECT ([1, 2], 3, [4, 5])", + "expected": {"_field_1": [1, 2], "_field_2": 3, "_field_3": [4, 5]}, }, { - 'sql': 'SELECT [(1, 2, 3), (4, 5, 6)]', - 'expected': [ - {'_field_1': 1, '_field_2': 2, '_field_3': 3}, - {'_field_1': 4, '_field_2': 5, '_field_3': 6}, + "sql": "SELECT [(1, 2, 3), (4, 5, 6)]", + "expected": [ + {"_field_1": 1, "_field_2": 2, "_field_3": 3}, + {"_field_1": 4, "_field_2": 5, "_field_3": 6}, ], }, { - 'sql': 'SELECT [([1, 2, 3], 4), ([5, 6], 7)]', - 'expected': [ - {u'_field_1': [1, 2, 3], u'_field_2': 4}, - {u'_field_1': [5, 6], u'_field_2': 7}, + "sql": "SELECT [([1, 2, 3], 4), ([5, 6], 7)]", + "expected": [ + {u"_field_1": [1, 2, 3], u"_field_2": 4}, + {u"_field_1": [5, 6], u"_field_2": 7}, ], }, { - 'sql': 'SELECT ARRAY(SELECT STRUCT([1, 2]))', - 'expected': [{u'_field_1': [1, 2]}], - }, - { - 'sql': 'SELECT ST_GeogPoint(1, 2)', - 'expected': 'POINT(1 2)', + "sql": "SELECT ARRAY(SELECT STRUCT([1, 2]))", + "expected": [{u"_field_1": [1, 2]}], }, + {"sql": "SELECT ST_GeogPoint(1, 2)", "expected": "POINT(1 2)"}, ] def test_query_w_standard_sql_types(self): examples = self._generate_standard_sql_types_examples() for example in examples: - rows = list(Config.CLIENT.query(example['sql'])) + rows = list(Config.CLIENT.query(example["sql"])) self.assertEqual(len(rows), 1) self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example['expected']) + self.assertEqual(rows[0][0], example["expected"]) def test_query_w_failed_query(self): from google.api_core.exceptions import BadRequest with self.assertRaises(BadRequest): - Config.CLIENT.query('invalid syntax;').result() + Config.CLIENT.query("invalid syntax;").result() def test_query_w_wrong_config(self): from google.cloud.bigquery.job import LoadJobConfig - good_query = 'SELECT 1;' - rows = list(Config.CLIENT.query('SELECT 1;').result()) + good_query = "SELECT 1;" + rows = list(Config.CLIENT.query("SELECT 1;").result()) assert rows[0][0] == 1 bad_config = LoadJobConfig() - bad_config.destination = Config.CLIENT.dataset('dset').table('tbl') + bad_config.destination = Config.CLIENT.dataset("dset").table("tbl") with self.assertRaises(Exception): Config.CLIENT.query(good_query, job_config=bad_config).result() def test_query_w_timeout(self): query_job = Config.CLIENT.query( - 'SELECT * FROM `bigquery-public-data.github_repos.commits`;', - job_id_prefix='test_query_w_timeout_') + "SELECT * FROM `bigquery-public-data.github_repos.commits`;", + job_id_prefix="test_query_w_timeout_", + ) with self.assertRaises(concurrent.futures.TimeoutError): # 1 second is much too short for this query. @@ -1130,8 +1075,9 @@ def test_query_statistics(self): ON lside.year = rside.year """, - location='US', - job_config=job_config) + location="US", + job_config=job_config, + ) # run the job to completion query_job.result() @@ -1142,10 +1088,10 @@ def test_query_statistics(self): self.assertTrue(query_job.done) self.assertFalse(query_job.dry_run) self.assertIsNone(query_job.num_dml_affected_rows) - self.assertEqual(query_job.priority, 'INTERACTIVE') + self.assertEqual(query_job.priority, "INTERACTIVE") self.assertGreater(query_job.total_bytes_billed, 1) self.assertGreater(query_job.total_bytes_processed, 1) - self.assertEqual(query_job.statement_type, 'SELECT') + self.assertEqual(query_job.statement_type, "SELECT") self.assertGreater(query_job.slot_millis, 1) # Make assertions on the shape of the query plan. @@ -1159,7 +1105,7 @@ def test_query_statistics(self): self.assertGreater(first_stage.parallel_inputs, 0) self.assertGreater(first_stage.completed_parallel_inputs, 0) self.assertGreater(first_stage.shuffle_output_bytes, 0) - self.assertEqual(first_stage.status, 'COMPLETE') + self.assertEqual(first_stage.status, "COMPLETE") # Query plan is a digraph. Ensure it has inter-stage links, # but not every stage has inputs. @@ -1173,16 +1119,16 @@ def test_query_statistics(self): def test_dbapi_w_standard_sql_types(self): examples = self._generate_standard_sql_types_examples() for example in examples: - Config.CURSOR.execute(example['sql']) + Config.CURSOR.execute(example["sql"]) self.assertEqual(Config.CURSOR.rowcount, 1) row = Config.CURSOR.fetchone() self.assertEqual(len(row), 1) - self.assertEqual(row[0], example['expected']) + self.assertEqual(row[0], example["expected"]) row = Config.CURSOR.fetchone() self.assertIsNone(row) def test_dbapi_fetchall(self): - query = 'SELECT * FROM UNNEST([(1, 2), (3, 4), (5, 6)])' + query = "SELECT * FROM UNNEST([(1, 2), (3, 4), (5, 6)])" for arraysize in range(1, 5): Config.CURSOR.execute(query) @@ -1199,36 +1145,36 @@ def _load_table_for_dml(self, rows, dataset_id, table_id): from google.cloud.bigquery.job import WriteDisposition dataset = self.temp_dataset(dataset_id) - greeting = bigquery.SchemaField( - 'greeting', 'STRING', mode='NULLABLE') + greeting = bigquery.SchemaField("greeting", "STRING", mode="NULLABLE") table_ref = dataset.table(table_id) table_arg = Table(table_ref, schema=[greeting]) table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as csv_write: + with open(temp.name, "w") as csv_write: writer = csv.writer(csv_write) - writer.writerow(('Greeting',)) + writer.writerow(("Greeting",)) writer.writerows(rows) - with open(temp.name, 'rb') as csv_read: + with open(temp.name, "rb") as csv_read: config = bigquery.LoadJobConfig() config.source_format = SourceFormat.CSV config.skip_leading_rows = 1 config.create_disposition = CreateDisposition.CREATE_NEVER config.write_disposition = WriteDisposition.WRITE_EMPTY job = Config.CLIENT.load_table_from_file( - csv_read, table_ref, job_config=config) + csv_read, table_ref, job_config=config + ) # Retry until done. job.result(timeout=JOB_TIMEOUT) self._fetch_single_page(table) def test_query_w_dml(self): - dataset_name = _make_dataset_id('dml_query') - table_name = 'test_table' - self._load_table_for_dml([('Hello World',)], dataset_name, table_name) + dataset_name = _make_dataset_id("dml_query") + table_name = "test_table" + self._load_table_for_dml([("Hello World",)], dataset_name, table_name) query_template = """UPDATE {}.{} SET greeting = 'Guten Tag' WHERE greeting = 'Hello World' @@ -1236,15 +1182,16 @@ def test_query_w_dml(self): query_job = Config.CLIENT.query( query_template.format(dataset_name, table_name), - job_id_prefix='test_query_w_dml_') + job_id_prefix="test_query_w_dml_", + ) query_job.result() self.assertEqual(query_job.num_dml_affected_rows, 1) def test_dbapi_w_dml(self): - dataset_name = _make_dataset_id('dml_dbapi') - table_name = 'test_table' - self._load_table_for_dml([('Hello World',)], dataset_name, table_name) + dataset_name = _make_dataset_id("dml_dbapi") + table_name = "test_table" + self._load_table_for_dml([("Hello World",)], dataset_name, table_name) query_template = """UPDATE {}.{} SET greeting = 'Guten Tag' WHERE greeting = 'Hello World' @@ -1252,7 +1199,8 @@ def test_dbapi_w_dml(self): Config.CURSOR.execute( query_template.format(dataset_name, table_name), - job_id='test_dbapi_w_dml_{}'.format(str(uuid.uuid4()))) + job_id="test_dbapi_w_dml_{}".format(str(uuid.uuid4())), + ) self.assertEqual(Config.CURSOR.rowcount, 1) self.assertIsNone(Config.CURSOR.fetchone()) @@ -1261,297 +1209,269 @@ def test_query_w_query_params(self): from google.cloud.bigquery.query import ArrayQueryParameter from google.cloud.bigquery.query import ScalarQueryParameter from google.cloud.bigquery.query import StructQueryParameter - question = 'What is the answer to life, the universe, and everything?' + + question = "What is the answer to life, the universe, and everything?" question_param = ScalarQueryParameter( - name='question', type_='STRING', value=question) + name="question", type_="STRING", value=question + ) answer = 42 - answer_param = ScalarQueryParameter( - name='answer', type_='INT64', value=answer) + answer_param = ScalarQueryParameter(name="answer", type_="INT64", value=answer) pi = 3.1415926 - pi_param = ScalarQueryParameter( - name='pi', type_='FLOAT64', value=pi) - pi_numeric = decimal.Decimal('3.141592654') + pi_param = ScalarQueryParameter(name="pi", type_="FLOAT64", value=pi) + pi_numeric = decimal.Decimal("3.141592654") pi_numeric_param = ScalarQueryParameter( - name='pi_numeric_param', type_='NUMERIC', - value=pi_numeric) + name="pi_numeric_param", type_="NUMERIC", value=pi_numeric + ) truthy = True - truthy_param = ScalarQueryParameter( - name='truthy', type_='BOOL', value=truthy) - beef = b'DEADBEEF' - beef_param = ScalarQueryParameter( - name='beef', type_='BYTES', value=beef) + truthy_param = ScalarQueryParameter(name="truthy", type_="BOOL", value=truthy) + beef = b"DEADBEEF" + beef_param = ScalarQueryParameter(name="beef", type_="BYTES", value=beef) naive = datetime.datetime(2016, 12, 5, 12, 41, 9) - naive_param = ScalarQueryParameter( - name='naive', type_='DATETIME', value=naive) + naive_param = ScalarQueryParameter(name="naive", type_="DATETIME", value=naive) naive_date_param = ScalarQueryParameter( - name='naive_date', type_='DATE', value=naive.date()) + name="naive_date", type_="DATE", value=naive.date() + ) naive_time_param = ScalarQueryParameter( - name='naive_time', type_='TIME', value=naive.time()) + name="naive_time", type_="TIME", value=naive.time() + ) zoned = naive.replace(tzinfo=UTC) - zoned_param = ScalarQueryParameter( - name='zoned', type_='TIMESTAMP', value=zoned) + zoned_param = ScalarQueryParameter(name="zoned", type_="TIMESTAMP", value=zoned) array_param = ArrayQueryParameter( - name='array_param', array_type='INT64', values=[1, 2]) - struct_param = StructQueryParameter( - 'hitchhiker', question_param, answer_param) - phred_name = 'Phred Phlyntstone' + name="array_param", array_type="INT64", values=[1, 2] + ) + struct_param = StructQueryParameter("hitchhiker", question_param, answer_param) + phred_name = "Phred Phlyntstone" phred_name_param = ScalarQueryParameter( - name='name', type_='STRING', value=phred_name) + name="name", type_="STRING", value=phred_name + ) phred_age = 32 phred_age_param = ScalarQueryParameter( - name='age', type_='INT64', value=phred_age) - phred_param = StructQueryParameter( - None, phred_name_param, phred_age_param) - bharney_name = 'Bharney Rhubbyl' + name="age", type_="INT64", value=phred_age + ) + phred_param = StructQueryParameter(None, phred_name_param, phred_age_param) + bharney_name = "Bharney Rhubbyl" bharney_name_param = ScalarQueryParameter( - name='name', type_='STRING', value=bharney_name) + name="name", type_="STRING", value=bharney_name + ) bharney_age = 31 bharney_age_param = ScalarQueryParameter( - name='age', type_='INT64', value=bharney_age) + name="age", type_="INT64", value=bharney_age + ) bharney_param = StructQueryParameter( - None, bharney_name_param, bharney_age_param) + None, bharney_name_param, bharney_age_param + ) characters_param = ArrayQueryParameter( - name=None, array_type='RECORD', - values=[phred_param, bharney_param]) - hero_param = StructQueryParameter( - 'hero', phred_name_param, phred_age_param) + name=None, array_type="RECORD", values=[phred_param, bharney_param] + ) + hero_param = StructQueryParameter("hero", phred_name_param, phred_age_param) sidekick_param = StructQueryParameter( - 'sidekick', bharney_name_param, bharney_age_param) - roles_param = StructQueryParameter( - 'roles', hero_param, sidekick_param) + "sidekick", bharney_name_param, bharney_age_param + ) + roles_param = StructQueryParameter("roles", hero_param, sidekick_param) friends_param = ArrayQueryParameter( - name='friends', array_type='STRING', - values=[phred_name, bharney_name]) + name="friends", array_type="STRING", values=[phred_name, bharney_name] + ) with_friends_param = StructQueryParameter(None, friends_param) top_left_param = StructQueryParameter( - 'top_left', - ScalarQueryParameter('x', 'INT64', 12), - ScalarQueryParameter('y', 'INT64', 102)) + "top_left", + ScalarQueryParameter("x", "INT64", 12), + ScalarQueryParameter("y", "INT64", 102), + ) bottom_right_param = StructQueryParameter( - 'bottom_right', - ScalarQueryParameter('x', 'INT64', 22), - ScalarQueryParameter('y', 'INT64', 92)) + "bottom_right", + ScalarQueryParameter("x", "INT64", 22), + ScalarQueryParameter("y", "INT64", 92), + ) rectangle_param = StructQueryParameter( - 'rectangle', top_left_param, bottom_right_param) + "rectangle", top_left_param, bottom_right_param + ) examples = [ { - 'sql': 'SELECT @question', - 'expected': question, - 'query_parameters': [question_param], - }, - { - 'sql': 'SELECT @answer', - 'expected': answer, - 'query_parameters': [answer_param], - }, - { - 'sql': 'SELECT @pi', - 'expected': pi, - 'query_parameters': [pi_param], + "sql": "SELECT @question", + "expected": question, + "query_parameters": [question_param], }, { - 'sql': 'SELECT @pi_numeric_param', - 'expected': pi_numeric, - 'query_parameters': [pi_numeric_param], + "sql": "SELECT @answer", + "expected": answer, + "query_parameters": [answer_param], }, + {"sql": "SELECT @pi", "expected": pi, "query_parameters": [pi_param]}, { - 'sql': 'SELECT @truthy', - 'expected': truthy, - 'query_parameters': [truthy_param], + "sql": "SELECT @pi_numeric_param", + "expected": pi_numeric, + "query_parameters": [pi_numeric_param], }, { - 'sql': 'SELECT @beef', - 'expected': beef, - 'query_parameters': [beef_param], + "sql": "SELECT @truthy", + "expected": truthy, + "query_parameters": [truthy_param], }, + {"sql": "SELECT @beef", "expected": beef, "query_parameters": [beef_param]}, { - 'sql': 'SELECT @naive', - 'expected': naive, - 'query_parameters': [naive_param], + "sql": "SELECT @naive", + "expected": naive, + "query_parameters": [naive_param], }, { - 'sql': 'SELECT @naive_date', - 'expected': naive.date(), - 'query_parameters': [naive_date_param], + "sql": "SELECT @naive_date", + "expected": naive.date(), + "query_parameters": [naive_date_param], }, { - 'sql': 'SELECT @naive_time', - 'expected': naive.time(), - 'query_parameters': [naive_time_param], + "sql": "SELECT @naive_time", + "expected": naive.time(), + "query_parameters": [naive_time_param], }, { - 'sql': 'SELECT @zoned', - 'expected': zoned, - 'query_parameters': [zoned_param], + "sql": "SELECT @zoned", + "expected": zoned, + "query_parameters": [zoned_param], }, { - 'sql': 'SELECT @array_param', - 'expected': [1, 2], - 'query_parameters': [array_param], + "sql": "SELECT @array_param", + "expected": [1, 2], + "query_parameters": [array_param], }, { - 'sql': 'SELECT (@hitchhiker.question, @hitchhiker.answer)', - 'expected': ({'_field_1': question, '_field_2': answer}), - 'query_parameters': [struct_param], + "sql": "SELECT (@hitchhiker.question, @hitchhiker.answer)", + "expected": ({"_field_1": question, "_field_2": answer}), + "query_parameters": [struct_param], }, { - 'sql': - 'SELECT ' - '((@rectangle.bottom_right.x - @rectangle.top_left.x) ' - '* (@rectangle.top_left.y - @rectangle.bottom_right.y))', - 'expected': 100, - 'query_parameters': [rectangle_param], + "sql": "SELECT " + "((@rectangle.bottom_right.x - @rectangle.top_left.x) " + "* (@rectangle.top_left.y - @rectangle.bottom_right.y))", + "expected": 100, + "query_parameters": [rectangle_param], }, { - 'sql': 'SELECT ?', - 'expected': [ - {'name': phred_name, 'age': phred_age}, - {'name': bharney_name, 'age': bharney_age}, + "sql": "SELECT ?", + "expected": [ + {"name": phred_name, "age": phred_age}, + {"name": bharney_name, "age": bharney_age}, ], - 'query_parameters': [characters_param], + "query_parameters": [characters_param], }, { - 'sql': 'SELECT @roles', - 'expected': { - 'hero': {'name': phred_name, 'age': phred_age}, - 'sidekick': {'name': bharney_name, 'age': bharney_age}, + "sql": "SELECT @roles", + "expected": { + "hero": {"name": phred_name, "age": phred_age}, + "sidekick": {"name": bharney_name, "age": bharney_age}, }, - 'query_parameters': [roles_param], + "query_parameters": [roles_param], }, { - 'sql': 'SELECT ?', - 'expected': { - 'friends': [phred_name, bharney_name], - }, - 'query_parameters': [with_friends_param], + "sql": "SELECT ?", + "expected": {"friends": [phred_name, bharney_name]}, + "query_parameters": [with_friends_param], }, ] for example in examples: jconfig = QueryJobConfig() - jconfig.query_parameters = example['query_parameters'] + jconfig.query_parameters = example["query_parameters"] query_job = Config.CLIENT.query( - example['sql'], + example["sql"], job_config=jconfig, - job_id_prefix='test_query_w_query_params') + job_id_prefix="test_query_w_query_params", + ) rows = list(query_job.result()) self.assertEqual(len(rows), 1) self.assertEqual(len(rows[0]), 1) - self.assertEqual(rows[0][0], example['expected']) + self.assertEqual(rows[0][0], example["expected"]) def test_dbapi_w_query_parameters(self): examples = [ { - 'sql': 'SELECT %(boolval)s', - 'expected': True, - 'query_parameters': { - 'boolval': True, - }, + "sql": "SELECT %(boolval)s", + "expected": True, + "query_parameters": {"boolval": True}, }, { - 'sql': 'SELECT %(a "very" weird `name`)s', - 'expected': True, - 'query_parameters': { - 'a "very" weird `name`': True, - }, + "sql": 'SELECT %(a "very" weird `name`)s', + "expected": True, + "query_parameters": {'a "very" weird `name`': True}, }, { - 'sql': 'SELECT %(select)s', - 'expected': True, - 'query_parameters': { - 'select': True, # this name is a keyword - }, + "sql": "SELECT %(select)s", + "expected": True, + "query_parameters": {"select": True}, # this name is a keyword }, + {"sql": "SELECT %s", "expected": False, "query_parameters": [False]}, { - 'sql': 'SELECT %s', - 'expected': False, - 'query_parameters': [False], + "sql": "SELECT %(intval)s", + "expected": 123, + "query_parameters": {"intval": 123}, }, { - 'sql': 'SELECT %(intval)s', - 'expected': 123, - 'query_parameters': { - 'intval': 123, - }, + "sql": "SELECT %s", + "expected": -123456789, + "query_parameters": [-123456789], }, { - 'sql': 'SELECT %s', - 'expected': -123456789, - 'query_parameters': [-123456789], + "sql": "SELECT %(floatval)s", + "expected": 1.25, + "query_parameters": {"floatval": 1.25}, }, { - 'sql': 'SELECT %(floatval)s', - 'expected': 1.25, - 'query_parameters': { - 'floatval': 1.25, - }, + "sql": "SELECT LOWER(%(strval)s)", + "query_parameters": {"strval": "I Am A String"}, + "expected": "i am a string", }, { - 'sql': 'SELECT LOWER(%(strval)s)', - 'query_parameters': { - 'strval': 'I Am A String', - }, - 'expected': 'i am a string', + "sql": "SELECT DATE_SUB(%(dateval)s, INTERVAL 1 DAY)", + "query_parameters": {"dateval": datetime.date(2017, 4, 2)}, + "expected": datetime.date(2017, 4, 1), }, { - 'sql': 'SELECT DATE_SUB(%(dateval)s, INTERVAL 1 DAY)', - 'query_parameters': { - 'dateval': datetime.date(2017, 4, 2), - }, - 'expected': datetime.date(2017, 4, 1), + "sql": "SELECT TIME_ADD(%(timeval)s, INTERVAL 4 SECOND)", + "query_parameters": {"timeval": datetime.time(12, 34, 56)}, + "expected": datetime.time(12, 35, 0), }, { - 'sql': 'SELECT TIME_ADD(%(timeval)s, INTERVAL 4 SECOND)', - 'query_parameters': { - 'timeval': datetime.time(12, 34, 56), + "sql": ("SELECT DATETIME_ADD(%(datetimeval)s, INTERVAL 53 SECOND)"), + "query_parameters": { + "datetimeval": datetime.datetime(2012, 3, 4, 5, 6, 7) }, - 'expected': datetime.time(12, 35, 0), + "expected": datetime.datetime(2012, 3, 4, 5, 7, 0), }, { - 'sql': ( - 'SELECT DATETIME_ADD(%(datetimeval)s, INTERVAL 53 SECOND)' - ), - 'query_parameters': { - 'datetimeval': datetime.datetime(2012, 3, 4, 5, 6, 7), + "sql": "SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)", + "query_parameters": { + "zoned": datetime.datetime(2012, 3, 4, 5, 6, 7, tzinfo=UTC) }, - 'expected': datetime.datetime(2012, 3, 4, 5, 7, 0), + "expected": datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), }, { - 'sql': 'SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)', - 'query_parameters': { - 'zoned': datetime.datetime( - 2012, 3, 4, 5, 6, 7, tzinfo=UTC), + "sql": "SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)", + "query_parameters": { + "zoned": datetime.datetime(2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC) }, - 'expected': datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), - }, - { - 'sql': 'SELECT TIMESTAMP_TRUNC(%(zoned)s, MINUTE)', - 'query_parameters': { - 'zoned': datetime.datetime( - 2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC), - }, - 'expected': datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), + "expected": datetime.datetime(2012, 3, 4, 5, 6, 0, tzinfo=UTC), }, ] for example in examples: - msg = 'sql: {} query_parameters: {}'.format( - example['sql'], example['query_parameters']) + msg = "sql: {} query_parameters: {}".format( + example["sql"], example["query_parameters"] + ) - Config.CURSOR.execute(example['sql'], example['query_parameters']) + Config.CURSOR.execute(example["sql"], example["query_parameters"]) self.assertEqual(Config.CURSOR.rowcount, 1, msg=msg) row = Config.CURSOR.fetchone() self.assertEqual(len(row), 1, msg=msg) - self.assertEqual(row[0], example['expected'], msg=msg) + self.assertEqual(row[0], example["expected"], msg=msg) row = Config.CURSOR.fetchone() self.assertIsNone(row, msg=msg) def test_large_query_w_public_data(self): - PUBLIC = 'bigquery-public-data' - DATASET_ID = 'samples' - TABLE_NAME = 'natality' + PUBLIC = "bigquery-public-data" + DATASET_ID = "samples" + TABLE_NAME = "natality" LIMIT = 1000 - SQL = 'SELECT * from `{}.{}.{}` LIMIT {}'.format( - PUBLIC, DATASET_ID, TABLE_NAME, LIMIT) + SQL = "SELECT * from `{}.{}.{}` LIMIT {}".format( + PUBLIC, DATASET_ID, TABLE_NAME, LIMIT + ) query_job = Config.CLIENT.query(SQL) @@ -1559,19 +1479,20 @@ def test_large_query_w_public_data(self): self.assertEqual(len(rows), LIMIT) def test_query_future(self): - query_job = Config.CLIENT.query('SELECT 1') + query_job = Config.CLIENT.query("SELECT 1") iterator = query_job.result(timeout=JOB_TIMEOUT) row_tuples = [r.values() for r in iterator] self.assertEqual(row_tuples, [(1,)]) def test_query_iter(self): import types - query_job = Config.CLIENT.query('SELECT 1') + + query_job = Config.CLIENT.query("SELECT 1") self.assertIsInstance(iter(query_job), types.GeneratorType) row_tuples = [r.values() for r in query_job] self.assertEqual(row_tuples, [(1,)]) - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_query_results_to_dataframe(self): QUERY = """ SELECT id, author, time_ts, dead @@ -1583,10 +1504,14 @@ def test_query_results_to_dataframe(self): self.assertIsInstance(df, pandas.DataFrame) self.assertEqual(len(df), 10) # verify the number of rows - column_names = ['id', 'author', 'time_ts', 'dead'] + column_names = ["id", "author", "time_ts", "dead"] self.assertEqual(list(df), column_names) # verify the column names - exp_datatypes = {'id': int, 'author': six.text_type, - 'time_ts': pandas.Timestamp, 'dead': bool} + exp_datatypes = { + "id": int, + "author": six.text_type, + "time_ts": pandas.Timestamp, + "dead": bool, + } for index, row in df.iterrows(): for col in column_names: # all the schema fields are nullable, so None is acceptable @@ -1597,25 +1522,31 @@ def test_insert_rows_nested_nested(self): # See #2951 SF = bigquery.SchemaField schema = [ - SF('string_col', 'STRING', mode='NULLABLE'), - SF('record_col', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_string', 'STRING', mode='NULLABLE'), - SF('nested_repeated', 'INTEGER', mode='REPEATED'), - SF('nested_record', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_nested_string', 'STRING', mode='NULLABLE'), - ]), - ]), + SF("string_col", "STRING", mode="NULLABLE"), + SF( + "record_col", + "RECORD", + mode="NULLABLE", + fields=[ + SF("nested_string", "STRING", mode="NULLABLE"), + SF("nested_repeated", "INTEGER", mode="REPEATED"), + SF( + "nested_record", + "RECORD", + mode="NULLABLE", + fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], + ), + ], + ), ] record = { - 'nested_string': 'another string value', - 'nested_repeated': [0, 1, 2], - 'nested_record': {'nested_nested_string': 'some deep insight'}, + "nested_string": "another string value", + "nested_repeated": [0, 1, 2], + "nested_record": {"nested_nested_string": "some deep insight"}, } - to_insert = [ - ('Some value', record) - ] - table_id = 'test_table' - dataset = self.temp_dataset(_make_dataset_id('issue_2951')) + to_insert = [("Some value", record)] + table_id = "test_table" + dataset = self.temp_dataset(_make_dataset_id("issue_2951")) table_arg = Table(dataset.table(table_id), schema=schema) table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) @@ -1631,25 +1562,31 @@ def test_insert_rows_nested_nested_dictionary(self): # See #2951 SF = bigquery.SchemaField schema = [ - SF('string_col', 'STRING', mode='NULLABLE'), - SF('record_col', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_string', 'STRING', mode='NULLABLE'), - SF('nested_repeated', 'INTEGER', mode='REPEATED'), - SF('nested_record', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_nested_string', 'STRING', mode='NULLABLE'), - ]), - ]), + SF("string_col", "STRING", mode="NULLABLE"), + SF( + "record_col", + "RECORD", + mode="NULLABLE", + fields=[ + SF("nested_string", "STRING", mode="NULLABLE"), + SF("nested_repeated", "INTEGER", mode="REPEATED"), + SF( + "nested_record", + "RECORD", + mode="NULLABLE", + fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], + ), + ], + ), ] record = { - 'nested_string': 'another string value', - 'nested_repeated': [0, 1, 2], - 'nested_record': {'nested_nested_string': 'some deep insight'}, + "nested_string": "another string value", + "nested_repeated": [0, 1, 2], + "nested_record": {"nested_nested_string": "some deep insight"}, } - to_insert = [ - {'string_col': 'Some value', 'record_col': record} - ] - table_id = 'test_table' - dataset = self.temp_dataset(_make_dataset_id('issue_2951')) + to_insert = [{"string_col": "Some value", "record_col": record}] + table_id = "test_table" + dataset = self.temp_dataset(_make_dataset_id("issue_2951")) table_arg = Table(dataset.table(table_id), schema=schema) table = retry_403(Config.CLIENT.create_table)(table_arg) self.to_delete.insert(0, table) @@ -1659,13 +1596,12 @@ def test_insert_rows_nested_nested_dictionary(self): retry = RetryResult(_has_rows, max_tries=8) rows = retry(self._fetch_single_page)(table) row_tuples = [r.values() for r in rows] - expected_rows = [('Some value', record)] + expected_rows = [("Some value", record)] self.assertEqual(row_tuples, expected_rows) def test_create_table_rows_fetch_nested_schema(self): - table_name = 'test_table' - dataset = self.temp_dataset( - _make_dataset_id('create_table_nested_schema')) + table_name = "test_table" + dataset = self.temp_dataset(_make_dataset_id("create_table_nested_schema")) schema = _load_json_schema() table_arg = Table(dataset.table(table_name), schema=schema) table = retry_403(Config.CLIENT.create_table)(table_arg) @@ -1675,7 +1611,7 @@ def test_create_table_rows_fetch_nested_schema(self): to_insert = [] # Data is in "JSON Lines" format, see http://jsonlines.org/ - json_filename = os.path.join(WHERE, 'data', 'characters.jsonl') + json_filename = os.path.join(WHERE, "data", "characters.jsonl") with open(json_filename) as rows_file: for line in rows_file: to_insert.append(json.loads(line)) @@ -1690,72 +1626,73 @@ def test_create_table_rows_fetch_nested_schema(self): self.assertEqual(len(fetched), len(to_insert)) for found, expected in zip(sorted(fetched_tuples), to_insert): - self.assertEqual(found[0], expected['Name']) - self.assertEqual(found[1], int(expected['Age'])) - self.assertEqual(found[2], expected['Weight']) - self.assertEqual(found[3], expected['IsMagic']) - - self.assertEqual(len(found[4]), len(expected['Spells'])) - for f_spell, e_spell in zip(found[4], expected['Spells']): - self.assertEqual(f_spell['Name'], e_spell['Name']) - parts = time.strptime( - e_spell['LastUsed'], '%Y-%m-%d %H:%M:%S UTC') + self.assertEqual(found[0], expected["Name"]) + self.assertEqual(found[1], int(expected["Age"])) + self.assertEqual(found[2], expected["Weight"]) + self.assertEqual(found[3], expected["IsMagic"]) + + self.assertEqual(len(found[4]), len(expected["Spells"])) + for f_spell, e_spell in zip(found[4], expected["Spells"]): + self.assertEqual(f_spell["Name"], e_spell["Name"]) + parts = time.strptime(e_spell["LastUsed"], "%Y-%m-%d %H:%M:%S UTC") e_used = datetime.datetime(*parts[0:6], tzinfo=UTC) - self.assertEqual(f_spell['LastUsed'], e_used) - self.assertEqual(f_spell['DiscoveredBy'], - e_spell['DiscoveredBy']) - self.assertEqual(f_spell['Properties'], e_spell['Properties']) + self.assertEqual(f_spell["LastUsed"], e_used) + self.assertEqual(f_spell["DiscoveredBy"], e_spell["DiscoveredBy"]) + self.assertEqual(f_spell["Properties"], e_spell["Properties"]) - e_icon = base64.standard_b64decode( - e_spell['Icon'].encode('ascii')) - self.assertEqual(f_spell['Icon'], e_icon) + e_icon = base64.standard_b64decode(e_spell["Icon"].encode("ascii")) + self.assertEqual(f_spell["Icon"], e_icon) - parts = time.strptime(expected['TeaTime'], '%H:%M:%S') + parts = time.strptime(expected["TeaTime"], "%H:%M:%S") e_teatime = datetime.time(*parts[3:6]) self.assertEqual(found[5], e_teatime) - parts = time.strptime(expected['NextVacation'], '%Y-%m-%d') + parts = time.strptime(expected["NextVacation"], "%Y-%m-%d") e_nextvac = datetime.date(*parts[0:3]) self.assertEqual(found[6], e_nextvac) - parts = time.strptime(expected['FavoriteTime'], - '%Y-%m-%dT%H:%M:%S') + parts = time.strptime(expected["FavoriteTime"], "%Y-%m-%dT%H:%M:%S") e_favtime = datetime.datetime(*parts[0:6]) self.assertEqual(found[7], e_favtime) - self.assertEqual(found[8], - decimal.Decimal(expected['FavoriteNumber'])) + self.assertEqual(found[8], decimal.Decimal(expected["FavoriteNumber"])) def _fetch_dataframe(self, query): return Config.CLIENT.query(query).result().to_dataframe() - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_nested_table_to_dataframe(self): from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition SF = bigquery.SchemaField schema = [ - SF('string_col', 'STRING', mode='NULLABLE'), - SF('record_col', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_string', 'STRING', mode='NULLABLE'), - SF('nested_repeated', 'INTEGER', mode='REPEATED'), - SF('nested_record', 'RECORD', mode='NULLABLE', fields=[ - SF('nested_nested_string', 'STRING', mode='NULLABLE'), - ]), - ]), + SF("string_col", "STRING", mode="NULLABLE"), + SF( + "record_col", + "RECORD", + mode="NULLABLE", + fields=[ + SF("nested_string", "STRING", mode="NULLABLE"), + SF("nested_repeated", "INTEGER", mode="REPEATED"), + SF( + "nested_record", + "RECORD", + mode="NULLABLE", + fields=[SF("nested_nested_string", "STRING", mode="NULLABLE")], + ), + ], + ), ] record = { - 'nested_string': 'another string value', - 'nested_repeated': [0, 1, 2], - 'nested_record': {'nested_nested_string': 'some deep insight'}, + "nested_string": "another string value", + "nested_repeated": [0, 1, 2], + "nested_record": {"nested_nested_string": "some deep insight"}, } - to_insert = [ - {'string_col': 'Some value', 'record_col': record}, - ] + to_insert = [{"string_col": "Some value", "record_col": record}] rows = [json.dumps(row) for row in to_insert] - body = six.BytesIO('{}\n'.format('\n'.join(rows)).encode('ascii')) - table_id = 'test_table' - dataset = self.temp_dataset(_make_dataset_id('nested_df')) + body = six.BytesIO("{}\n".format("\n".join(rows)).encode("ascii")) + table_id = "test_table" + dataset = self.temp_dataset(_make_dataset_id("nested_df")) table = dataset.table(table_id) self.to_delete.insert(0, table) job_config = bigquery.LoadJobConfig() @@ -1763,32 +1700,31 @@ def test_nested_table_to_dataframe(self): job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON job_config.schema = schema # Load a table using a local JSON file from memory. - Config.CLIENT.load_table_from_file( - body, table, job_config=job_config).result() + Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() - df = Config.CLIENT.list_rows( - table, selected_fields=schema).to_dataframe() + df = Config.CLIENT.list_rows(table, selected_fields=schema).to_dataframe() self.assertIsInstance(df, pandas.DataFrame) self.assertEqual(len(df), 1) # verify the number of rows - exp_columns = ['string_col', 'record_col'] + exp_columns = ["string_col", "record_col"] self.assertEqual(list(df), exp_columns) # verify the column names row = df.iloc[0] # verify the row content - self.assertEqual(row['string_col'], 'Some value') - self.assertEqual(row['record_col'], record) + self.assertEqual(row["string_col"], "Some value") + self.assertEqual(row["record_col"], record) # verify that nested data can be accessed with indices/keys - self.assertEqual(row['record_col']['nested_repeated'][0], 0) + self.assertEqual(row["record_col"]["nested_repeated"][0], 0) self.assertEqual( - row['record_col']['nested_record']['nested_nested_string'], - 'some deep insight') + row["record_col"]["nested_record"]["nested_nested_string"], + "some deep insight", + ) def test_list_rows_empty_table(self): from google.cloud.bigquery.table import RowIterator - dataset_id = _make_dataset_id('empty_table') + dataset_id = _make_dataset_id("empty_table") dataset = self.temp_dataset(dataset_id) - table_ref = dataset.table('empty_table') + table_ref = dataset.table("empty_table") table = Config.CLIENT.create_table(bigquery.Table(table_ref)) # It's a bit silly to list rows for an empty table, but this does @@ -1806,13 +1742,13 @@ def test_list_rows_page_size(self): num_pages, num_last_page = divmod(num_items, page_size) SF = bigquery.SchemaField - schema = [SF('string_col', 'STRING', mode='NULLABLE')] - to_insert = [{'string_col': 'item%d' % i} for i in range(num_items)] + schema = [SF("string_col", "STRING", mode="NULLABLE")] + to_insert = [{"string_col": "item%d" % i} for i in range(num_items)] rows = [json.dumps(row) for row in to_insert] - body = six.BytesIO('{}\n'.format('\n'.join(rows)).encode('ascii')) + body = six.BytesIO("{}\n".format("\n".join(rows)).encode("ascii")) - table_id = 'test_table' - dataset = self.temp_dataset(_make_dataset_id('nested_df')) + table_id = "test_table" + dataset = self.temp_dataset(_make_dataset_id("nested_df")) table = dataset.table(table_id) self.to_delete.insert(0, table) job_config = bigquery.LoadJobConfig() @@ -1820,11 +1756,9 @@ def test_list_rows_page_size(self): job_config.source_format = SourceFormat.NEWLINE_DELIMITED_JSON job_config.schema = schema # Load a table using a local JSON file from memory. - Config.CLIENT.load_table_from_file( - body, table, job_config=job_config).result() + Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() - df = Config.CLIENT.list_rows( - table, selected_fields=schema, page_size=page_size) + df = Config.CLIENT.list_rows(table, selected_fields=schema, page_size=page_size) pages = df.pages for i in range(num_pages): @@ -1842,12 +1776,12 @@ def temp_dataset(self, dataset_id, location=None): return dataset -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') -@pytest.mark.skipif(IPython is None, reason='Requires `ipython`') -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(IPython is None, reason="Requires `ipython`") +@pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") sql = """ SELECT CONCAT( @@ -1860,22 +1794,21 @@ def test_bigquery_magic(): LIMIT 10 """ with io.capture_output() as captured: - result = ip.run_cell_magic('bigquery', '', sql) + result = ip.run_cell_magic("bigquery", "", sql) - lines = re.split('\n|\r', captured.stdout) + lines = re.split("\n|\r", captured.stdout) # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != '\x1b[2K', lines)) + updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) assert re.match("Executing query with job ID: .*", updates[0]) - assert all(re.match("Query executing: .*s", line) - for line in updates[1:-1]) + assert all(re.match("Query executing: .*s", line) for line in updates[1:-1]) assert re.match("Query complete after .*s", updates[-1]) assert isinstance(result, pandas.DataFrame) - assert len(result) == 10 # verify row count - assert list(result) == ['url', 'view_count'] # verify column names + assert len(result) == 10 # verify row count + assert list(result) == ["url", "view_count"] # verify column names def _job_done(instance): - return instance.state.lower() == 'done' + return instance.state.lower() == "done" def _dataset_exists(ds): @@ -1895,7 +1828,7 @@ def _table_exists(t): return False -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def ipython(): config = tools.default_config() config.TerminalInteractiveShell.simple_prompt = True diff --git a/bigquery/tests/unit/test__helpers.py b/bigquery/tests/unit/test__helpers.py index d0a93ebd1340..c2c4f9f7f787 100644 --- a/bigquery/tests/unit/test__helpers.py +++ b/bigquery/tests/unit/test__helpers.py @@ -19,38 +19,36 @@ class Test_not_null(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _not_null return _not_null(value, field) def test_w_none_nullable(self): - self.assertFalse(self._call_fut(None, _Field('NULLABLE'))) + self.assertFalse(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): - self.assertTrue(self._call_fut(None, _Field('REQUIRED'))) + self.assertTrue(self._call_fut(None, _Field("REQUIRED"))) def test_w_value(self): self.assertTrue(self._call_fut(object(), object())) class Test_int_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _int_from_json return _int_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('42', object()) + coerced = self._call_fut("42", object()) self.assertEqual(coerced, 42) def test_w_float_value(self): @@ -59,21 +57,20 @@ def test_w_float_value(self): class Test_float_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _float_from_json return _float_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('3.1415', object()) + coerced = self._call_fut("3.1415", object()) self.assertEqual(coerced, 3.1415) def test_w_float_value(self): @@ -82,22 +79,21 @@ def test_w_float_value(self): class Test_decimal_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _decimal_from_json return _decimal_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('3.1415', object()) - self.assertEqual(coerced, decimal.Decimal('3.1415')) + coerced = self._call_fut("3.1415", object()) + self.assertEqual(coerced, decimal.Decimal("3.1415")) def test_w_float_value(self): coerced = self._call_fut(3.1415, object()) @@ -106,319 +102,295 @@ def test_w_float_value(self): class Test_bool_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _bool_from_json return _bool_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(AttributeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_value_t(self): - coerced = self._call_fut('T', object()) + coerced = self._call_fut("T", object()) self.assertTrue(coerced) def test_w_value_true(self): - coerced = self._call_fut('True', object()) + coerced = self._call_fut("True", object()) self.assertTrue(coerced) def test_w_value_1(self): - coerced = self._call_fut('1', object()) + coerced = self._call_fut("1", object()) self.assertTrue(coerced) def test_w_value_other(self): - coerced = self._call_fut('f', object()) + coerced = self._call_fut("f", object()) self.assertFalse(coerced) class Test_string_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _string_from_json return _string_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): - self.assertIsNone(self._call_fut(None, _Field('REQUIRED'))) + self.assertIsNone(self._call_fut(None, _Field("REQUIRED"))) def test_w_string_value(self): - coerced = self._call_fut('Wonderful!', object()) - self.assertEqual(coerced, 'Wonderful!') + coerced = self._call_fut("Wonderful!", object()) + self.assertEqual(coerced, "Wonderful!") class Test_bytes_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _bytes_from_json return _bytes_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_base64_encoded_bytes(self): - expected = b'Wonderful!' + expected = b"Wonderful!" encoded = base64.standard_b64encode(expected) coerced = self._call_fut(encoded, object()) self.assertEqual(coerced, expected) def test_w_base64_encoded_text(self): - expected = b'Wonderful!' - encoded = base64.standard_b64encode(expected).decode('ascii') + expected = b"Wonderful!" + encoded = base64.standard_b64encode(expected).decode("ascii") coerced = self._call_fut(encoded, object()) self.assertEqual(coerced, expected) class Test_timestamp_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _timestamp_from_json return _timestamp_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): from google.cloud._helpers import _EPOCH - coerced = self._call_fut('1.234567', object()) + coerced = self._call_fut("1.234567", object()) self.assertEqual( - coerced, - _EPOCH + datetime.timedelta(seconds=1, microseconds=234567)) + coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) + ) def test_w_float_value(self): from google.cloud._helpers import _EPOCH coerced = self._call_fut(1.234567, object()) self.assertEqual( - coerced, - _EPOCH + datetime.timedelta(seconds=1, microseconds=234567)) + coerced, _EPOCH + datetime.timedelta(seconds=1, microseconds=234567) + ) class Test_timestamp_query_param_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery import _helpers return _helpers._timestamp_query_param_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_timestamp_valid(self): from google.cloud._helpers import UTC samples = [ ( - '2016-12-20 15:58:27.339328+00:00', - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) + "2016-12-20 15:58:27.339328+00:00", + datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( - '2016-12-20 15:58:27+00:00', - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC) + "2016-12-20 15:58:27+00:00", + datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( - '2016-12-20T15:58:27.339328+00:00', - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) + "2016-12-20T15:58:27.339328+00:00", + datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( - '2016-12-20T15:58:27+00:00', - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC) + "2016-12-20T15:58:27+00:00", + datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( - '2016-12-20 15:58:27.339328Z', - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) + "2016-12-20 15:58:27.339328Z", + datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( - '2016-12-20 15:58:27Z', - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC) + "2016-12-20 15:58:27Z", + datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ( - '2016-12-20T15:58:27.339328Z', - datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) + "2016-12-20T15:58:27.339328Z", + datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC), ), ( - '2016-12-20T15:58:27Z', - datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC) + "2016-12-20T15:58:27Z", + datetime.datetime(2016, 12, 20, 15, 58, 27, tzinfo=UTC), ), ] for timestamp_str, expected_result in samples: self.assertEqual( - self._call_fut(timestamp_str, _Field('NULLABLE')), - expected_result) + self._call_fut(timestamp_str, _Field("NULLABLE")), expected_result + ) def test_w_timestamp_invalid(self): with self.assertRaises(ValueError): - self._call_fut('definitely-not-a-timestamp', _Field('NULLABLE')) + self._call_fut("definitely-not-a-timestamp", _Field("NULLABLE")) class Test_datetime_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _datetime_from_json return _datetime_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('2016-12-02T18:51:33', object()) - self.assertEqual( - coerced, - datetime.datetime(2016, 12, 2, 18, 51, 33)) + coerced = self._call_fut("2016-12-02T18:51:33", object()) + self.assertEqual(coerced, datetime.datetime(2016, 12, 2, 18, 51, 33)) def test_w_microseconds(self): - coerced = self._call_fut('2015-05-22T10:11:12.987654', object()) - self.assertEqual( - coerced, - datetime.datetime(2015, 5, 22, 10, 11, 12, 987654)) + coerced = self._call_fut("2015-05-22T10:11:12.987654", object()) + self.assertEqual(coerced, datetime.datetime(2015, 5, 22, 10, 11, 12, 987654)) class Test_date_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _date_from_json return _date_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('1987-09-22', object()) - self.assertEqual( - coerced, - datetime.date(1987, 9, 22)) + coerced = self._call_fut("1987-09-22", object()) + self.assertEqual(coerced, datetime.date(1987, 9, 22)) class Test_time_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _time_from_json return _time_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_string_value(self): - coerced = self._call_fut('12:12:27', object()) - self.assertEqual( - coerced, - datetime.time(12, 12, 27)) + coerced = self._call_fut("12:12:27", object()) + self.assertEqual(coerced, datetime.time(12, 12, 27)) def test_w_subsecond_string_value(self): - coerced = self._call_fut('12:12:27.123456', object()) - self.assertEqual( - coerced, - datetime.time(12, 12, 27, 123456)) + coerced = self._call_fut("12:12:27.123456", object()) + self.assertEqual(coerced, datetime.time(12, 12, 27, 123456)) def test_w_bogus_string_value(self): with self.assertRaises(ValueError): - self._call_fut('12:12:27.123', object()) + self._call_fut("12:12:27.123", object()) class Test_record_from_json(unittest.TestCase): - def _call_fut(self, value, field): from google.cloud.bigquery._helpers import _record_from_json return _record_from_json(value, field) def test_w_none_nullable(self): - self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + self.assertIsNone(self._call_fut(None, _Field("NULLABLE"))) def test_w_none_required(self): with self.assertRaises(TypeError): - self._call_fut(None, _Field('REQUIRED')) + self._call_fut(None, _Field("REQUIRED")) def test_w_nullable_subfield_none(self): - subfield = _Field('NULLABLE', 'age', 'INTEGER') - field = _Field('REQUIRED', fields=[subfield]) - value = {'f': [{'v': None}]} + subfield = _Field("NULLABLE", "age", "INTEGER") + field = _Field("REQUIRED", fields=[subfield]) + value = {"f": [{"v": None}]} coerced = self._call_fut(value, field) - self.assertEqual(coerced, {'age': None}) + self.assertEqual(coerced, {"age": None}) def test_w_scalar_subfield(self): - subfield = _Field('REQUIRED', 'age', 'INTEGER') - field = _Field('REQUIRED', fields=[subfield]) - value = {'f': [{'v': 42}]} + subfield = _Field("REQUIRED", "age", "INTEGER") + field = _Field("REQUIRED", fields=[subfield]) + value = {"f": [{"v": 42}]} coerced = self._call_fut(value, field) - self.assertEqual(coerced, {'age': 42}) + self.assertEqual(coerced, {"age": 42}) def test_w_scalar_subfield_geography(self): - subfield = _Field('REQUIRED', 'geo', 'GEOGRAPHY') - field = _Field('REQUIRED', fields=[subfield]) - value = {'f': [{'v': 'POINT(1, 2)'}]} + subfield = _Field("REQUIRED", "geo", "GEOGRAPHY") + field = _Field("REQUIRED", fields=[subfield]) + value = {"f": [{"v": "POINT(1, 2)"}]} coerced = self._call_fut(value, field) - self.assertEqual(coerced, {'geo': 'POINT(1, 2)'}) + self.assertEqual(coerced, {"geo": "POINT(1, 2)"}) def test_w_repeated_subfield(self): - subfield = _Field('REPEATED', 'color', 'STRING') - field = _Field('REQUIRED', fields=[subfield]) - value = {'f': [{'v': [{'v': 'red'}, {'v': 'yellow'}, {'v': 'blue'}]}]} + subfield = _Field("REPEATED", "color", "STRING") + field = _Field("REQUIRED", fields=[subfield]) + value = {"f": [{"v": [{"v": "red"}, {"v": "yellow"}, {"v": "blue"}]}]} coerced = self._call_fut(value, field) - self.assertEqual(coerced, {'color': ['red', 'yellow', 'blue']}) + self.assertEqual(coerced, {"color": ["red", "yellow", "blue"]}) def test_w_record_subfield(self): - full_name = _Field('REQUIRED', 'full_name', 'STRING') - area_code = _Field('REQUIRED', 'area_code', 'STRING') - local_number = _Field('REQUIRED', 'local_number', 'STRING') - rank = _Field('REQUIRED', 'rank', 'INTEGER') - phone = _Field('NULLABLE', 'phone', 'RECORD', - fields=[area_code, local_number, rank]) - person = _Field('REQUIRED', 'person', 'RECORD', - fields=[full_name, phone]) + full_name = _Field("REQUIRED", "full_name", "STRING") + area_code = _Field("REQUIRED", "area_code", "STRING") + local_number = _Field("REQUIRED", "local_number", "STRING") + rank = _Field("REQUIRED", "rank", "INTEGER") + phone = _Field( + "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] + ) + person = _Field("REQUIRED", "person", "RECORD", fields=[full_name, phone]) value = { - 'f': [ - {'v': 'Phred Phlyntstone'}, - {'v': {'f': [{'v': '800'}, {'v': '555-1212'}, {'v': 1}]}}, - ], + "f": [ + {"v": "Phred Phlyntstone"}, + {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, + ] } expected = { - 'full_name': 'Phred Phlyntstone', - 'phone': { - 'area_code': '800', - 'local_number': '555-1212', - 'rank': 1, - } + "full_name": "Phred Phlyntstone", + "phone": {"area_code": "800", "local_number": "555-1212", "rank": 1}, } coerced = self._call_fut(value, person) self.assertEqual(coerced, expected) class Test_field_to_index_mapping(unittest.TestCase): - def _call_fut(self, schema): from google.cloud.bigquery._helpers import _field_to_index_mapping @@ -429,17 +401,14 @@ def test_w_empty_schema(self): def test_w_non_empty_schema(self): schema = [ - _Field('REPEATED', 'first', 'INTEGER'), - _Field('REQUIRED', 'second', 'INTEGER'), - _Field('REPEATED', 'third', 'INTEGER'), + _Field("REPEATED", "first", "INTEGER"), + _Field("REQUIRED", "second", "INTEGER"), + _Field("REPEATED", "third", "INTEGER"), ] - self.assertEqual( - self._call_fut(schema), - {'first': 0, 'second': 1, 'third': 2}) + self.assertEqual(self._call_fut(schema), {"first": 0, "second": 1, "third": 2}) class Test_row_tuple_from_json(unittest.TestCase): - def _call_fut(self, row, schema): from google.cloud.bigquery._helpers import _row_tuple_from_json @@ -447,97 +416,116 @@ def _call_fut(self, row, schema): def test_w_single_scalar_column(self): # SELECT 1 AS col - col = _Field('REQUIRED', 'col', 'INTEGER') - row = {u'f': [{u'v': u'1'}]} + col = _Field("REQUIRED", "col", "INTEGER") + row = {u"f": [{u"v": u"1"}]} self.assertEqual(self._call_fut(row, schema=[col]), (1,)) def test_w_single_scalar_geography_column(self): # SELECT 1 AS col - col = _Field('REQUIRED', 'geo', 'GEOGRAPHY') - row = {u'f': [{u'v': u'POINT(1, 2)'}]} - self.assertEqual(self._call_fut(row, schema=[col]), ('POINT(1, 2)',)) + col = _Field("REQUIRED", "geo", "GEOGRAPHY") + row = {u"f": [{u"v": u"POINT(1, 2)"}]} + self.assertEqual(self._call_fut(row, schema=[col]), ("POINT(1, 2)",)) def test_w_single_struct_column(self): # SELECT (1, 2) AS col - sub_1 = _Field('REQUIRED', 'sub_1', 'INTEGER') - sub_2 = _Field('REQUIRED', 'sub_2', 'INTEGER') - col = _Field('REQUIRED', 'col', 'RECORD', fields=[sub_1, sub_2]) - row = {u'f': [{u'v': {u'f': [{u'v': u'1'}, {u'v': u'2'}]}}]} - self.assertEqual(self._call_fut(row, schema=[col]), - ({'sub_1': 1, 'sub_2': 2},)) + sub_1 = _Field("REQUIRED", "sub_1", "INTEGER") + sub_2 = _Field("REQUIRED", "sub_2", "INTEGER") + col = _Field("REQUIRED", "col", "RECORD", fields=[sub_1, sub_2]) + row = {u"f": [{u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}]}}]} + self.assertEqual(self._call_fut(row, schema=[col]), ({"sub_1": 1, "sub_2": 2},)) def test_w_single_array_column(self): # SELECT [1, 2, 3] as col - col = _Field('REPEATED', 'col', 'INTEGER') - row = {u'f': [{u'v': [{u'v': u'1'}, {u'v': u'2'}, {u'v': u'3'}]}]} - self.assertEqual(self._call_fut(row, schema=[col]), - ([1, 2, 3],)) + col = _Field("REPEATED", "col", "INTEGER") + row = {u"f": [{u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}]} + self.assertEqual(self._call_fut(row, schema=[col]), ([1, 2, 3],)) def test_w_struct_w_nested_array_column(self): # SELECT ([1, 2], 3, [4, 5]) as col - first = _Field('REPEATED', 'first', 'INTEGER') - second = _Field('REQUIRED', 'second', 'INTEGER') - third = _Field('REPEATED', 'third', 'INTEGER') - col = _Field('REQUIRED', 'col', 'RECORD', - fields=[first, second, third]) + first = _Field("REPEATED", "first", "INTEGER") + second = _Field("REQUIRED", "second", "INTEGER") + third = _Field("REPEATED", "third", "INTEGER") + col = _Field("REQUIRED", "col", "RECORD", fields=[first, second, third]) row = { - u'f': [ - {u'v': { - u'f': [ - {u'v': [{u'v': u'1'}, {u'v': u'2'}]}, - {u'v': u'3'}, - {u'v': [{u'v': u'4'}, {u'v': u'5'}]} - ] - }}, + u"f": [ + { + u"v": { + u"f": [ + {u"v": [{u"v": u"1"}, {u"v": u"2"}]}, + {u"v": u"3"}, + {u"v": [{u"v": u"4"}, {u"v": u"5"}]}, + ] + } + } ] } self.assertEqual( self._call_fut(row, schema=[col]), - ({u'first': [1, 2], u'second': 3, u'third': [4, 5]},)) + ({u"first": [1, 2], u"second": 3, u"third": [4, 5]},), + ) def test_w_array_of_struct(self): # SELECT [(1, 2, 3), (4, 5, 6)] as col - first = _Field('REQUIRED', 'first', 'INTEGER') - second = _Field('REQUIRED', 'second', 'INTEGER') - third = _Field('REQUIRED', 'third', 'INTEGER') - col = _Field('REPEATED', 'col', 'RECORD', - fields=[first, second, third]) - row = {u'f': [{u'v': [ - {u'v': {u'f': [{u'v': u'1'}, {u'v': u'2'}, {u'v': u'3'}]}}, - {u'v': {u'f': [{u'v': u'4'}, {u'v': u'5'}, {u'v': u'6'}]}}, - ]}]} + first = _Field("REQUIRED", "first", "INTEGER") + second = _Field("REQUIRED", "second", "INTEGER") + third = _Field("REQUIRED", "third", "INTEGER") + col = _Field("REPEATED", "col", "RECORD", fields=[first, second, third]) + row = { + u"f": [ + { + u"v": [ + {u"v": {u"f": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}}, + {u"v": {u"f": [{u"v": u"4"}, {u"v": u"5"}, {u"v": u"6"}]}}, + ] + } + ] + } self.assertEqual( self._call_fut(row, schema=[col]), - ([ - {u'first': 1, u'second': 2, u'third': 3}, - {u'first': 4, u'second': 5, u'third': 6}, - ],)) + ( + [ + {u"first": 1, u"second": 2, u"third": 3}, + {u"first": 4, u"second": 5, u"third": 6}, + ], + ), + ) def test_w_array_of_struct_w_array(self): # SELECT [([1, 2, 3], 4), ([5, 6], 7)] - first = _Field('REPEATED', 'first', 'INTEGER') - second = _Field('REQUIRED', 'second', 'INTEGER') - col = _Field('REPEATED', 'col', 'RECORD', fields=[first, second]) - row = {u'f': [{u'v': [ - {u'v': {u'f': [ - {u'v': [{u'v': u'1'}, {u'v': u'2'}, {u'v': u'3'}]}, - {u'v': u'4'} - ]}}, - {u'v': {u'f': [ - {u'v': [{u'v': u'5'}, {u'v': u'6'}]}, - {u'v': u'7'} - ]}} - ]}]} + first = _Field("REPEATED", "first", "INTEGER") + second = _Field("REQUIRED", "second", "INTEGER") + col = _Field("REPEATED", "col", "RECORD", fields=[first, second]) + row = { + u"f": [ + { + u"v": [ + { + u"v": { + u"f": [ + {u"v": [{u"v": u"1"}, {u"v": u"2"}, {u"v": u"3"}]}, + {u"v": u"4"}, + ] + } + }, + { + u"v": { + u"f": [ + {u"v": [{u"v": u"5"}, {u"v": u"6"}]}, + {u"v": u"7"}, + ] + } + }, + ] + } + ] + } self.assertEqual( self._call_fut(row, schema=[col]), - ([ - {u'first': [1, 2, 3], u'second': 4}, - {u'first': [5, 6], u'second': 7}, - ],)) + ([{u"first": [1, 2, 3], u"second": 4}, {u"first": [5, 6], u"second": 7}],), + ) class Test_rows_from_json(unittest.TestCase): - def _call_fut(self, rows, schema): from google.cloud.bigquery._helpers import _rows_from_json @@ -546,46 +534,39 @@ def _call_fut(self, rows, schema): def test_w_record_subfield(self): from google.cloud.bigquery.table import Row - full_name = _Field('REQUIRED', 'full_name', 'STRING') - area_code = _Field('REQUIRED', 'area_code', 'STRING') - local_number = _Field('REQUIRED', 'local_number', 'STRING') - rank = _Field('REQUIRED', 'rank', 'INTEGER') - phone = _Field('NULLABLE', 'phone', 'RECORD', - fields=[area_code, local_number, rank]) - color = _Field('REPEATED', 'color', 'STRING') + full_name = _Field("REQUIRED", "full_name", "STRING") + area_code = _Field("REQUIRED", "area_code", "STRING") + local_number = _Field("REQUIRED", "local_number", "STRING") + rank = _Field("REQUIRED", "rank", "INTEGER") + phone = _Field( + "NULLABLE", "phone", "RECORD", fields=[area_code, local_number, rank] + ) + color = _Field("REPEATED", "color", "STRING") schema = [full_name, phone, color] rows = [ - {'f': [ - {'v': 'Phred Phlyntstone'}, - {'v': {'f': [{'v': '800'}, {'v': '555-1212'}, {'v': 1}]}}, - {'v': [{'v': 'orange'}, {'v': 'black'}]}, - ]}, - {'f': [ - {'v': 'Bharney Rhubble'}, - {'v': {'f': [{'v': '877'}, {'v': '768-5309'}, {'v': 2}]}}, - {'v': [{'v': 'brown'}]}, - ]}, - {'f': [ - {'v': 'Wylma Phlyntstone'}, - {'v': None}, - {'v': []}, - ]}, + { + "f": [ + {"v": "Phred Phlyntstone"}, + {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, + {"v": [{"v": "orange"}, {"v": "black"}]}, + ] + }, + { + "f": [ + {"v": "Bharney Rhubble"}, + {"v": {"f": [{"v": "877"}, {"v": "768-5309"}, {"v": 2}]}}, + {"v": [{"v": "brown"}]}, + ] + }, + {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}, {"v": []}]}, ] - phred_phone = { - 'area_code': '800', - 'local_number': '555-1212', - 'rank': 1, - } - bharney_phone = { - 'area_code': '877', - 'local_number': '768-5309', - 'rank': 2, - } - f2i = {'full_name': 0, 'phone': 1, 'color': 2} + phred_phone = {"area_code": "800", "local_number": "555-1212", "rank": 1} + bharney_phone = {"area_code": "877", "local_number": "768-5309", "rank": 2} + f2i = {"full_name": 0, "phone": 1, "color": 2} expected = [ - Row(('Phred Phlyntstone', phred_phone, ['orange', 'black']), f2i), - Row(('Bharney Rhubble', bharney_phone, ['brown']), f2i), - Row(('Wylma Phlyntstone', None, []), f2i), + Row(("Phred Phlyntstone", phred_phone, ["orange", "black"]), f2i), + Row(("Bharney Rhubble", bharney_phone, ["brown"]), f2i), + Row(("Wylma Phlyntstone", None, []), f2i), ] coerced = self._call_fut(rows, schema) self.assertEqual(coerced, expected) @@ -594,57 +575,47 @@ def test_w_int64_float64_bool(self): from google.cloud.bigquery.table import Row # "Standard" SQL dialect uses 'INT64', 'FLOAT64', 'BOOL'. - candidate = _Field('REQUIRED', 'candidate', 'STRING') - votes = _Field('REQUIRED', 'votes', 'INT64') - percentage = _Field('REQUIRED', 'percentage', 'FLOAT64') - incumbent = _Field('REQUIRED', 'incumbent', 'BOOL') + candidate = _Field("REQUIRED", "candidate", "STRING") + votes = _Field("REQUIRED", "votes", "INT64") + percentage = _Field("REQUIRED", "percentage", "FLOAT64") + incumbent = _Field("REQUIRED", "incumbent", "BOOL") schema = [candidate, votes, percentage, incumbent] rows = [ - {'f': [ - {'v': 'Phred Phlyntstone'}, - {'v': 8}, - {'v': 0.25}, - {'v': 'true'}, - ]}, - {'f': [ - {'v': 'Bharney Rhubble'}, - {'v': 4}, - {'v': 0.125}, - {'v': 'false'}, - ]}, - {'f': [ - {'v': 'Wylma Phlyntstone'}, - {'v': 20}, - {'v': 0.625}, - {'v': 'false'}, - ]}, + {"f": [{"v": "Phred Phlyntstone"}, {"v": 8}, {"v": 0.25}, {"v": "true"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": 4}, {"v": 0.125}, {"v": "false"}]}, + { + "f": [ + {"v": "Wylma Phlyntstone"}, + {"v": 20}, + {"v": 0.625}, + {"v": "false"}, + ] + }, ] - f2i = {'candidate': 0, 'votes': 1, 'percentage': 2, 'incumbent': 3} + f2i = {"candidate": 0, "votes": 1, "percentage": 2, "incumbent": 3} expected = [ - Row(('Phred Phlyntstone', 8, 0.25, True), f2i), - Row(('Bharney Rhubble', 4, 0.125, False), f2i), - Row(('Wylma Phlyntstone', 20, 0.625, False), f2i), + Row(("Phred Phlyntstone", 8, 0.25, True), f2i), + Row(("Bharney Rhubble", 4, 0.125, False), f2i), + Row(("Wylma Phlyntstone", 20, 0.625, False), f2i), ] coerced = self._call_fut(rows, schema) self.assertEqual(coerced, expected) class Test_int_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _int_to_json return _int_to_json(value) def test_w_int(self): - self.assertEqual(self._call_fut(123), '123') + self.assertEqual(self._call_fut(123), "123") def test_w_string(self): - self.assertEqual(self._call_fut('123'), '123') + self.assertEqual(self._call_fut("123"), "123") class Test_float_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _float_to_json @@ -655,7 +626,6 @@ def test_w_float(self): class Test_decimal_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _decimal_to_json @@ -665,31 +635,29 @@ def test_w_float(self): self.assertEqual(self._call_fut(1.23), 1.23) def test_w_string(self): - self.assertEqual(self._call_fut('1.23'), '1.23') + self.assertEqual(self._call_fut("1.23"), "1.23") def test_w_decimal(self): - self.assertEqual(self._call_fut(decimal.Decimal('1.23')), '1.23') + self.assertEqual(self._call_fut(decimal.Decimal("1.23")), "1.23") class Test_bool_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _bool_to_json return _bool_to_json(value) def test_w_true(self): - self.assertEqual(self._call_fut(True), 'true') + self.assertEqual(self._call_fut(True), "true") def test_w_false(self): - self.assertEqual(self._call_fut(False), 'false') + self.assertEqual(self._call_fut(False), "false") def test_w_string(self): - self.assertEqual(self._call_fut('false'), 'false') + self.assertEqual(self._call_fut("false"), "false") class Test_bytes_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _bytes_to_json @@ -700,14 +668,13 @@ def test_w_non_bytes(self): self.assertIs(self._call_fut(non_bytes), non_bytes) def test_w_bytes(self): - source = b'source' - expected = u'c291cmNl' + source = b"source" + expected = u"c291cmNl" converted = self._call_fut(source) self.assertEqual(converted, expected) class Test_timestamp_to_json_parameter(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _timestamp_to_json_parameter @@ -717,35 +684,32 @@ def test_w_float(self): self.assertEqual(self._call_fut(1.234567), 1.234567) def test_w_string(self): - ZULU = '2016-12-20 15:58:27.339328+00:00' + ZULU = "2016-12-20 15:58:27.339328+00:00" self.assertEqual(self._call_fut(ZULU), ZULU) def test_w_datetime_wo_zone(self): - ZULU = '2016-12-20 15:58:27.339328+00:00' + ZULU = "2016-12-20 15:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) self.assertEqual(self._call_fut(when), ZULU) def test_w_datetime_w_non_utc_zone(self): class _Zone(datetime.tzinfo): - def utcoffset(self, _): return datetime.timedelta(minutes=-240) - ZULU = '2016-12-20 19:58:27.339328+00:00' - when = datetime.datetime( - 2016, 12, 20, 15, 58, 27, 339328, tzinfo=_Zone()) + ZULU = "2016-12-20 19:58:27.339328+00:00" + when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=_Zone()) self.assertEqual(self._call_fut(when), ZULU) def test_w_datetime_w_utc_zone(self): from google.cloud._helpers import UTC - ZULU = '2016-12-20 15:58:27.339328+00:00' + ZULU = "2016-12-20 15:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) self.assertEqual(self._call_fut(when), ZULU) class Test_timestamp_to_json_row(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _timestamp_to_json_row @@ -755,108 +719,99 @@ def test_w_float(self): self.assertEqual(self._call_fut(1.234567), 1.234567) def test_w_string(self): - ZULU = '2016-12-20 15:58:27.339328+00:00' + ZULU = "2016-12-20 15:58:27.339328+00:00" self.assertEqual(self._call_fut(ZULU), ZULU) def test_w_datetime(self): from google.cloud._helpers import _microseconds_from_datetime when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328) - self.assertEqual( - self._call_fut(when), _microseconds_from_datetime(when) / 1e6) + self.assertEqual(self._call_fut(when), _microseconds_from_datetime(when) / 1e6) class Test_datetime_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _datetime_to_json return _datetime_to_json(value) def test_w_string(self): - RFC3339 = '2016-12-03T14:14:51Z' + RFC3339 = "2016-12-03T14:14:51Z" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): from google.cloud._helpers import UTC when = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456, tzinfo=UTC) - self.assertEqual(self._call_fut(when), '2016-12-03T14:11:27.123456') + self.assertEqual(self._call_fut(when), "2016-12-03T14:11:27.123456") class Test_date_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _date_to_json return _date_to_json(value) def test_w_string(self): - RFC3339 = '2016-12-03' + RFC3339 = "2016-12-03" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): when = datetime.date(2016, 12, 3) - self.assertEqual(self._call_fut(when), '2016-12-03') + self.assertEqual(self._call_fut(when), "2016-12-03") class Test_time_to_json(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _time_to_json return _time_to_json(value) def test_w_string(self): - RFC3339 = '12:13:41' + RFC3339 = "12:13:41" self.assertEqual(self._call_fut(RFC3339), RFC3339) def test_w_datetime(self): when = datetime.time(12, 13, 41) - self.assertEqual(self._call_fut(when), '12:13:41') + self.assertEqual(self._call_fut(when), "12:13:41") class Test_snake_to_camel_case(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _snake_to_camel_case return _snake_to_camel_case(value) def test_w_snake_case_string(self): - self.assertEqual(self._call_fut('friendly_name'), 'friendlyName') + self.assertEqual(self._call_fut("friendly_name"), "friendlyName") def test_w_camel_case_string(self): - self.assertEqual(self._call_fut('friendlyName'), 'friendlyName') + self.assertEqual(self._call_fut("friendlyName"), "friendlyName") class Test__get_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys, **kw): from google.cloud.bigquery._helpers import _get_sub_prop return _get_sub_prop(container, keys, **kw) def test_w_empty_container_default_default(self): - self.assertIsNone(self._call_fut({}, ['key1'])) + self.assertIsNone(self._call_fut({}, ["key1"])) def test_w_missing_key_explicit_default(self): - self.assertEqual(self._call_fut({'key2': 2}, ['key1'], default=1), 1) + self.assertEqual(self._call_fut({"key2": 2}, ["key1"], default=1), 1) def test_w_matching_single_key(self): - self.assertEqual(self._call_fut({'key1': 1}, ['key1']), 1) + self.assertEqual(self._call_fut({"key1": 1}, ["key1"]), 1) def test_w_matching_first_key_missing_second_key(self): - self.assertIsNone( - self._call_fut({'key1': {'key3': 3}}, ['key1', 'key2'])) + self.assertIsNone(self._call_fut({"key1": {"key3": 3}}, ["key1", "key2"])) def test_w_matching_first_key_matching_second_key(self): - self.assertEqual( - self._call_fut({'key1': {'key2': 2}}, ['key1', 'key2']), 2) + self.assertEqual(self._call_fut({"key1": {"key2": 2}}, ["key1", "key2"]), 2) class Test__set_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys, value): from google.cloud.bigquery._helpers import _set_sub_prop @@ -864,57 +819,55 @@ def _call_fut(self, container, keys, value): def test_w_empty_container_single_key(self): container = {} - self._call_fut(container, ['key1'], 'value') - self.assertEqual(container, {'key1': 'value'}) + self._call_fut(container, ["key1"], "value") + self.assertEqual(container, {"key1": "value"}) def test_w_empty_container_nested_keys(self): container = {} - self._call_fut(container, ['key1', 'key2', 'key3'], 'value') - self.assertEqual(container, {'key1': {'key2': {'key3': 'value'}}}) + self._call_fut(container, ["key1", "key2", "key3"], "value") + self.assertEqual(container, {"key1": {"key2": {"key3": "value"}}}) def test_w_existing_value(self): - container = {'key1': 'before'} - self._call_fut(container, ['key1'], 'after') - self.assertEqual(container, {'key1': 'after'}) + container = {"key1": "before"} + self._call_fut(container, ["key1"], "after") + self.assertEqual(container, {"key1": "after"}) def test_w_nested_keys_existing_value(self): - container = {'key1': {'key2': {'key3': 'before'}}} - self._call_fut(container, ['key1', 'key2', 'key3'], 'after') - self.assertEqual(container, {'key1': {'key2': {'key3': 'after'}}}) + container = {"key1": {"key2": {"key3": "before"}}} + self._call_fut(container, ["key1", "key2", "key3"], "after") + self.assertEqual(container, {"key1": {"key2": {"key3": "after"}}}) class Test__del_sub_prop(unittest.TestCase): - def _call_fut(self, container, keys): from google.cloud.bigquery._helpers import _del_sub_prop return _del_sub_prop(container, keys) def test_w_single_key(self): - container = {'key1': 'value'} - self._call_fut(container, ['key1']) + container = {"key1": "value"} + self._call_fut(container, ["key1"]) self.assertEqual(container, {}) def test_w_empty_container_nested_keys(self): container = {} - self._call_fut(container, ['key1', 'key2', 'key3']) - self.assertEqual(container, {'key1': {'key2': {}}}) + self._call_fut(container, ["key1", "key2", "key3"]) + self.assertEqual(container, {"key1": {"key2": {}}}) def test_w_existing_value_nested_keys(self): - container = {'key1': {'key2': {'key3': 'value'}}} - self._call_fut(container, ['key1', 'key2', 'key3']) - self.assertEqual(container, {'key1': {'key2': {}}}) + container = {"key1": {"key2": {"key3": "value"}}} + self._call_fut(container, ["key1", "key2", "key3"]) + self.assertEqual(container, {"key1": {"key2": {}}}) class Test__int_or_none(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _int_or_none return _int_or_none(value) def test_w_num_string(self): - self.assertEqual(self._call_fut('123'), 123) + self.assertEqual(self._call_fut("123"), 123) def test_w_none(self): self.assertIsNone(self._call_fut(None)) @@ -924,29 +877,27 @@ def test_w_int(self): def test_w_non_num_string(self): with self.assertRaises(ValueError): - self._call_fut('ham') + self._call_fut("ham") class Test__str_or_none(unittest.TestCase): - def _call_fut(self, value): from google.cloud.bigquery._helpers import _str_or_none return _str_or_none(value) def test_w_int(self): - self.assertEqual(self._call_fut(123), '123') + self.assertEqual(self._call_fut(123), "123") def test_w_none(self): self.assertIsNone(self._call_fut(None)) def test_w_str(self): - self.assertEqual(self._call_fut('ham'), 'ham') + self.assertEqual(self._call_fut("ham"), "ham") class _Field(object): - - def __init__(self, mode, name='unknown', field_type='UNKNOWN', fields=()): + def __init__(self, mode, name="unknown", field_type="UNKNOWN", fields=()): self.mode = mode self.name = name self.field_type = field_type diff --git a/bigquery/tests/unit/test__http.py b/bigquery/tests/unit/test__http.py index c1cd48ffdca8..890046ee05fa 100644 --- a/bigquery/tests/unit/test__http.py +++ b/bigquery/tests/unit/test__http.py @@ -19,7 +19,6 @@ class TestConnection(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery._http import Connection @@ -31,26 +30,20 @@ def _make_one(self, *args, **kw): def test_build_api_url_no_extra_query_params(self): conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - 'bigquery', - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) + URI = "/".join([conn.API_BASE_URL, "bigquery", conn.API_VERSION, "foo"]) + self.assertEqual(conn.build_api_url("/foo"), URI) def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) + uri = conn.build_api_url("/foo", {"bar": "baz"}) scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', 'bigquery', conn.API_VERSION, 'foo'])) + self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, "/".join(["", "bigquery", conn.API_VERSION, "foo"])) parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') + self.assertEqual(parms["bar"], "baz") def test_extra_headers(self): from google.cloud import _http as base_http @@ -59,26 +52,22 @@ def test_extra_headers(self): http = mock.create_autospec(requests.Session, instance=True) response = requests.Response() response.status_code = 200 - data = b'brent-spiner' + data = b"brent-spiner" response._content = data http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock(_http=http, spec=["_http"]) conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) + req_data = "req-data-boring" + result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) self.assertEqual(result, data) expected_headers = { - 'Accept-Encoding': 'gzip', + "Accept-Encoding": "gzip", base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, + "User-Agent": conn.USER_AGENT, } - expected_uri = conn.build_api_url('/rainbow') + expected_uri = conn.build_api_url("/rainbow") http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, + data=req_data, headers=expected_headers, method="GET", url=expected_uri ) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 69535f62a00e..0fc14b160a9c 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -25,6 +25,7 @@ import six from six.moves import http_client import pytest + try: import pandas except (ImportError, AttributeError): # pragma: NO COVER @@ -48,38 +49,39 @@ def _make_connection(*responses): from google.cloud.exceptions import NotFound mock_conn = mock.create_autospec(google.cloud.bigquery._http.Connection) - mock_conn.USER_AGENT = 'testing 1.2.3' - mock_conn.api_request.side_effect = list(responses) + [NotFound('miss')] + mock_conn.USER_AGENT = "testing 1.2.3" + mock_conn.api_request.side_effect = list(responses) + [NotFound("miss")] return mock_conn def _make_list_partitons_meta_info(project, dataset_id, table_id, num_rows=0): return { - 'tableReference': - { - 'projectId': project, - 'datasetId': dataset_id, - 'tableId': '{}$__PARTITIONS_SUMMARY__'.format(table_id), - }, - 'schema': {'fields': [ - {'name': 'project_id', 'type': 'STRING', 'mode': 'NULLABLE'}, - {'name': 'dataset_id', 'type': 'STRING', 'mode': 'NULLABLE'}, - {'name': 'table_id', 'type': 'STRING', 'mode': 'NULLABLE'}, - {'name': 'partition_id', 'type': 'STRING', 'mode': 'NULLABLE'} - ]}, - 'etag': 'ETAG', - 'numRows': num_rows, + "tableReference": { + "projectId": project, + "datasetId": dataset_id, + "tableId": "{}$__PARTITIONS_SUMMARY__".format(table_id), + }, + "schema": { + "fields": [ + {"name": "project_id", "type": "STRING", "mode": "NULLABLE"}, + {"name": "dataset_id", "type": "STRING", "mode": "NULLABLE"}, + {"name": "table_id", "type": "STRING", "mode": "NULLABLE"}, + {"name": "partition_id", "type": "STRING", "mode": "NULLABLE"}, + ] + }, + "etag": "ETAG", + "numRows": num_rows, } class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - DS_ID = 'DATASET_ID' - TABLE_ID = 'TABLE_ID' + PROJECT = "PROJECT" + DS_ID = "DATASET_ID" + TABLE_ID = "TABLE_ID" TABLE_REF = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - KMS_KEY_NAME = 'projects/1/locations/global/keyRings/1/cryptoKeys/1' - LOCATION = 'us-central' + KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + LOCATION = "us-central" @staticmethod def _get_target_class(): @@ -95,8 +97,7 @@ def test_ctor_defaults(self): creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) @@ -107,9 +108,10 @@ def test_ctor_w_location(self): creds = _make_credentials() http = object() - location = 'us-central' - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http, location=location) + location = "us-central" + client = self._make_one( + project=self.PROJECT, credentials=creds, _http=http, location=location + ) self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) @@ -121,13 +123,17 @@ def test_ctor_w_query_job_config(self): creds = _make_credentials() http = object() - location = 'us-central' + location = "us-central" job_config = QueryJobConfig() job_config.dry_run = True - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http, location=location, - default_query_job_config=job_config) + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + location=location, + default_query_job_config=job_config, + ) self.assertIsInstance(client._connection, Connection) self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) @@ -145,16 +151,17 @@ def test__get_query_results_miss_w_explicit_project_and_timeout(self): with self.assertRaises(NotFound): client._get_query_results( - 'nothere', None, - project='other-project', + "nothere", + None, + project="other-project", location=self.LOCATION, - timeout_ms=500) + timeout_ms=500, + ) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/other-project/queries/nothere', - query_params={ - 'maxResults': 0, 'timeoutMs': 500, 'location': self.LOCATION}, + method="GET", + path="/projects/other-project/queries/nothere", + query_params={"maxResults": 0, "timeoutMs": 500, "location": self.LOCATION}, ) def test__get_query_results_miss_w_client_location(self): @@ -165,40 +172,30 @@ def test__get_query_results_miss_w_client_location(self): conn = client._connection = _make_connection() with self.assertRaises(NotFound): - client._get_query_results('nothere', None) + client._get_query_results("nothere", None) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/PROJECT/queries/nothere', - query_params={'maxResults': 0, 'location': self.LOCATION}) + method="GET", + path="/projects/PROJECT/queries/nothere", + query_params={"maxResults": 0, "location": self.LOCATION}, + ) def test__get_query_results_hit(self): - job_id = 'query_job' + job_id = "query_job" data = { - 'kind': 'bigquery#getQueryResultsResponse', - 'etag': 'some-tag', - 'schema': { - 'fields': [ - { - 'name': 'title', - 'type': 'STRING', - 'mode': 'NULLABLE' - }, - { - 'name': 'unique_words', - 'type': 'INTEGER', - 'mode': 'NULLABLE' - } + "kind": "bigquery#getQueryResultsResponse", + "etag": "some-tag", + "schema": { + "fields": [ + {"name": "title", "type": "STRING", "mode": "NULLABLE"}, + {"name": "unique_words", "type": "INTEGER", "mode": "NULLABLE"}, ] }, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': job_id, - }, - 'totalRows': '10', - 'totalBytesProcessed': '2464625', - 'jobComplete': True, - 'cacheHit': False, + "jobReference": {"projectId": self.PROJECT, "jobId": job_id}, + "totalRows": "10", + "totalBytesProcessed": "2464625", + "jobComplete": True, + "cacheHit": False, } creds = _make_credentials() @@ -210,63 +207,58 @@ def test__get_query_results_hit(self): self.assertTrue(query_results.complete) def test_get_service_account_email(self): - path = '/projects/%s/serviceAccount' % (self.PROJECT,) + path = "/projects/%s/serviceAccount" % (self.PROJECT,) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) - email = 'bq-123@bigquery-encryption.iam.gserviceaccount.com' - resource = { - 'kind': 'bigquery#getServiceAccountResponse', - 'email': email, - } + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + email = "bq-123@bigquery-encryption.iam.gserviceaccount.com" + resource = {"kind": "bigquery#getServiceAccountResponse", "email": email} conn = client._connection = _make_connection(resource) service_account_email = client.get_service_account_email() - conn.api_request.assert_called_once_with(method='GET', path=path) + conn.api_request.assert_called_once_with(method="GET", path=path) self.assertEqual(service_account_email, email) def test_get_service_account_email_w_alternate_project(self): - project = 'my-alternate-project' - path = '/projects/%s/serviceAccount' % (project,) + project = "my-alternate-project" + path = "/projects/%s/serviceAccount" % (project,) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) - email = 'bq-123@bigquery-encryption.iam.gserviceaccount.com' - resource = { - 'kind': 'bigquery#getServiceAccountResponse', - 'email': email, - } + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + email = "bq-123@bigquery-encryption.iam.gserviceaccount.com" + resource = {"kind": "bigquery#getServiceAccountResponse", "email": email} conn = client._connection = _make_connection(resource) - service_account_email = client.get_service_account_email( - project=project) + service_account_email = client.get_service_account_email(project=project) - conn.api_request.assert_called_once_with(method='GET', path=path) + conn.api_request.assert_called_once_with(method="GET", path=path) self.assertEqual(service_account_email, email) def test_list_projects_defaults(self): from google.cloud.bigquery.client import Project - PROJECT_1 = 'PROJECT_ONE' - PROJECT_2 = 'PROJECT_TWO' - TOKEN = 'TOKEN' + PROJECT_1 = "PROJECT_ONE" + PROJECT_2 = "PROJECT_TWO" + TOKEN = "TOKEN" DATA = { - 'nextPageToken': TOKEN, - 'projects': [ - {'kind': 'bigquery#project', - 'id': PROJECT_1, - 'numericId': 1, - 'projectReference': {'projectId': PROJECT_1}, - 'friendlyName': 'One'}, - {'kind': 'bigquery#project', - 'id': PROJECT_2, - 'numericId': 2, - 'projectReference': {'projectId': PROJECT_2}, - 'friendlyName': 'Two'}, - ] + "nextPageToken": TOKEN, + "projects": [ + { + "kind": "bigquery#project", + "id": PROJECT_1, + "numericId": 1, + "projectReference": {"projectId": PROJECT_1}, + "friendlyName": "One", + }, + { + "kind": "bigquery#project", + "id": PROJECT_2, + "numericId": 2, + "projectReference": {"projectId": PROJECT_2}, + "friendlyName": "Two", + }, + ], } creds = _make_credentials() client = self._make_one(PROJECT_1, creds) @@ -277,19 +269,20 @@ def test_list_projects_defaults(self): projects = list(page) token = iterator.next_page_token - self.assertEqual(len(projects), len(DATA['projects'])) - for found, expected in zip(projects, DATA['projects']): + self.assertEqual(len(projects), len(DATA["projects"])) + for found, expected in zip(projects, DATA["projects"]): self.assertIsInstance(found, Project) - self.assertEqual(found.project_id, expected['id']) - self.assertEqual(found.numeric_id, expected['numericId']) - self.assertEqual(found.friendly_name, expected['friendlyName']) + self.assertEqual(found.project_id, expected["id"]) + self.assertEqual(found.numeric_id, expected["numericId"]) + self.assertEqual(found.friendly_name, expected["friendlyName"]) self.assertEqual(token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', path='/projects', query_params={}) + method="GET", path="/projects", query_params={} + ) def test_list_projects_explicit_response_missing_projects_key(self): - TOKEN = 'TOKEN' + TOKEN = "TOKEN" DATA = {} creds = _make_credentials() client = self._make_one(self.PROJECT, creds) @@ -304,31 +297,40 @@ def test_list_projects_explicit_response_missing_projects_key(self): self.assertIsNone(token) conn.api_request.assert_called_once_with( - method='GET', - path='/projects', - query_params={'maxResults': 3, 'pageToken': TOKEN}) + method="GET", + path="/projects", + query_params={"maxResults": 3, "pageToken": TOKEN}, + ) def test_list_datasets_defaults(self): from google.cloud.bigquery.dataset import DatasetListItem - DATASET_1 = 'dataset_one' - DATASET_2 = 'dataset_two' - PATH = 'projects/%s/datasets' % self.PROJECT - TOKEN = 'TOKEN' + DATASET_1 = "dataset_one" + DATASET_2 = "dataset_two" + PATH = "projects/%s/datasets" % self.PROJECT + TOKEN = "TOKEN" DATA = { - 'nextPageToken': TOKEN, - 'datasets': [ - {'kind': 'bigquery#dataset', - 'id': '%s:%s' % (self.PROJECT, DATASET_1), - 'datasetReference': {'datasetId': DATASET_1, - 'projectId': self.PROJECT}, - 'friendlyName': None}, - {'kind': 'bigquery#dataset', - 'id': '%s:%s' % (self.PROJECT, DATASET_2), - 'datasetReference': {'datasetId': DATASET_2, - 'projectId': self.PROJECT}, - 'friendlyName': 'Two'}, - ] + "nextPageToken": TOKEN, + "datasets": [ + { + "kind": "bigquery#dataset", + "id": "%s:%s" % (self.PROJECT, DATASET_1), + "datasetReference": { + "datasetId": DATASET_1, + "projectId": self.PROJECT, + }, + "friendlyName": None, + }, + { + "kind": "bigquery#dataset", + "id": "%s:%s" % (self.PROJECT, DATASET_2), + "datasetReference": { + "datasetId": DATASET_2, + "projectId": self.PROJECT, + }, + "friendlyName": "Two", + }, + ], } creds = _make_credentials() client = self._make_one(self.PROJECT, creds) @@ -339,40 +341,40 @@ def test_list_datasets_defaults(self): datasets = list(page) token = iterator.next_page_token - self.assertEqual(len(datasets), len(DATA['datasets'])) - for found, expected in zip(datasets, DATA['datasets']): + self.assertEqual(len(datasets), len(DATA["datasets"])) + for found, expected in zip(datasets, DATA["datasets"]): self.assertIsInstance(found, DatasetListItem) - self.assertEqual(found.full_dataset_id, expected['id']) - self.assertEqual(found.friendly_name, expected['friendlyName']) + self.assertEqual(found.full_dataset_id, expected["id"]) + self.assertEqual(found.friendly_name, expected["friendlyName"]) self.assertEqual(token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', path='/%s' % PATH, query_params={}) + method="GET", path="/%s" % PATH, query_params={} + ) def test_list_datasets_w_project(self): creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection({}) - list(client.list_datasets(project='other-project')) + list(client.list_datasets(project="other-project")) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/other-project/datasets', - query_params={}) + method="GET", path="/projects/other-project/datasets", query_params={} + ) def test_list_datasets_explicit_response_missing_datasets_key(self): - PATH = 'projects/%s/datasets' % self.PROJECT - TOKEN = 'TOKEN' - FILTER = 'FILTER' + PATH = "projects/%s/datasets" % self.PROJECT + TOKEN = "TOKEN" + FILTER = "FILTER" DATA = {} creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection(DATA) iterator = client.list_datasets( - include_all=True, filter=FILTER, - max_results=3, page_token=TOKEN) + include_all=True, filter=FILTER, max_results=3, page_token=TOKEN + ) page = six.next(iterator.pages) datasets = list(page) token = iterator.next_page_token @@ -381,22 +383,22 @@ def test_list_datasets_explicit_response_missing_datasets_key(self): self.assertIsNone(token) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, + method="GET", + path="/%s" % PATH, query_params={ - 'all': True, - 'filter': FILTER, - 'maxResults': 3, - 'pageToken': TOKEN, - }) + "all": True, + "filter": FILTER, + "maxResults": 3, + "pageToken": TOKEN, + }, + ) def test_dataset_with_specified_project(self): from google.cloud.bigquery.dataset import DatasetReference creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) dataset = client.dataset(self.DS_ID, self.PROJECT) self.assertIsInstance(dataset, DatasetReference) self.assertEqual(dataset.dataset_id, self.DS_ID) @@ -407,8 +409,7 @@ def test_dataset_with_default_project(self): creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) dataset = client.dataset(self.DS_ID) self.assertIsInstance(dataset, DatasetReference) self.assertEqual(dataset.dataset_id, self.DS_ID) @@ -417,74 +418,66 @@ def test_dataset_with_default_project(self): def test_get_dataset(self): from google.cloud.exceptions import ServerError - path = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) resource = { - 'id': '%s:%s' % (self.PROJECT, self.DS_ID), - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - }, + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, } conn = client._connection = _make_connection(resource) dataset_ref = client.dataset(self.DS_ID) dataset = client.get_dataset(dataset_ref) - conn.api_request.assert_called_once_with( - method='GET', path='/%s' % path) + conn.api_request.assert_called_once_with(method="GET", path="/%s" % path) self.assertEqual(dataset.dataset_id, self.DS_ID) # Test retry. # Not a cloud API exception (missing 'errors' field). - client._connection = _make_connection(Exception(''), resource) + client._connection = _make_connection(Exception(""), resource) with self.assertRaises(Exception): client.get_dataset(dataset_ref) # Zero-length errors field. - client._connection = _make_connection(ServerError(''), resource) + client._connection = _make_connection(ServerError(""), resource) with self.assertRaises(ServerError): client.get_dataset(dataset_ref) # Non-retryable reason. client._connection = _make_connection( - ServerError('', errors=[{'reason': 'serious'}]), - resource) + ServerError("", errors=[{"reason": "serious"}]), resource + ) with self.assertRaises(ServerError): client.get_dataset(dataset_ref) # Retryable reason, but retry is disabled. client._connection = _make_connection( - ServerError('', errors=[{'reason': 'backendError'}]), - resource) + ServerError("", errors=[{"reason": "backendError"}]), resource + ) with self.assertRaises(ServerError): client.get_dataset(dataset_ref, retry=None) # Retryable reason, default retry: success. client._connection = _make_connection( - ServerError('', errors=[{'reason': 'backendError'}]), - resource) + ServerError("", errors=[{"reason": "backendError"}]), resource + ) dataset = client.get_dataset( # Test with a string for dataset ID. - dataset_ref.dataset_id, + dataset_ref.dataset_id ) self.assertEqual(dataset.dataset_id, self.DS_ID) def test_create_dataset_minimal(self): from google.cloud.bigquery.dataset import Dataset - PATH = 'projects/%s/datasets' % self.PROJECT + PATH = "projects/%s/datasets" % self.PROJECT RESOURCE = { - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - }, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -497,57 +490,52 @@ def test_create_dataset_minimal(self): self.assertEqual(after.dataset_id, self.DS_ID) self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE['etag']) - self.assertEqual(after.full_dataset_id, RESOURCE['id']) + self.assertEqual(after.etag, RESOURCE["etag"]) + self.assertEqual(after.full_dataset_id, RESOURCE["id"]) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, + method="POST", + path="/%s" % PATH, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - }) + "labels": {}, + }, + ) def test_create_dataset_w_attrs(self): from google.cloud.bigquery.dataset import Dataset, AccessEntry - PATH = 'projects/%s/datasets' % self.PROJECT - DESCRIPTION = 'DESC' - FRIENDLY_NAME = 'FN' - LOCATION = 'US' - USER_EMAIL = 'phred@example.com' - LABELS = {'color': 'red'} + PATH = "projects/%s/datasets" % self.PROJECT + DESCRIPTION = "DESC" + FRIENDLY_NAME = "FN" + LOCATION = "US" + USER_EMAIL = "phred@example.com" + LABELS = {"color": "red"} VIEW = { - 'projectId': 'my-proj', - 'datasetId': 'starry-skies', - 'tableId': 'northern-hemisphere', + "projectId": "my-proj", + "datasetId": "starry-skies", + "tableId": "northern-hemisphere", } RESOURCE = { - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - }, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'description': DESCRIPTION, - 'friendlyName': FRIENDLY_NAME, - 'location': LOCATION, - 'defaultTableExpirationMs': '3600', - 'labels': LABELS, - 'access': [ - {'role': 'OWNER', 'userByEmail': USER_EMAIL}, - {'view': VIEW}, - ], + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "description": DESCRIPTION, + "friendlyName": FRIENDLY_NAME, + "location": LOCATION, + "defaultTableExpirationMs": "3600", + "labels": LABELS, + "access": [{"role": "OWNER", "userByEmail": USER_EMAIL}, {"view": VIEW}], } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection(RESOURCE) entries = [ - AccessEntry('OWNER', 'userByEmail', USER_EMAIL), - AccessEntry(None, 'view', VIEW), + AccessEntry("OWNER", "userByEmail", USER_EMAIL), + AccessEntry(None, "view", VIEW), ] ds_ref = client.dataset(self.DS_ID) @@ -563,8 +551,8 @@ def test_create_dataset_w_attrs(self): self.assertEqual(after.dataset_id, self.DS_ID) self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE['etag']) - self.assertEqual(after.full_dataset_id, RESOURCE['id']) + self.assertEqual(after.etag, RESOURCE["etag"]) + self.assertEqual(after.full_dataset_id, RESOURCE["id"]) self.assertEqual(after.description, DESCRIPTION) self.assertEqual(after.friendly_name, FRIENDLY_NAME) self.assertEqual(after.location, LOCATION) @@ -572,34 +560,34 @@ def test_create_dataset_w_attrs(self): self.assertEqual(after.labels, LABELS) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, + method="POST", + path="/%s" % PATH, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'description': DESCRIPTION, - 'friendlyName': FRIENDLY_NAME, - 'location': LOCATION, - 'defaultTableExpirationMs': '3600', - 'access': [ - {'role': 'OWNER', 'userByEmail': USER_EMAIL}, - {'view': VIEW}, + "description": DESCRIPTION, + "friendlyName": FRIENDLY_NAME, + "location": LOCATION, + "defaultTableExpirationMs": "3600", + "access": [ + {"role": "OWNER", "userByEmail": USER_EMAIL}, + {"view": VIEW}, ], - 'labels': LABELS, - }) + "labels": LABELS, + }, + ) def test_create_dataset_w_custom_property(self): # The library should handle sending properties to the API that are not # yet part of the library from google.cloud.bigquery.dataset import Dataset - path = '/projects/%s/datasets' % self.PROJECT + path = "/projects/%s/datasets" % self.PROJECT resource = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'newAlphaProperty': 'unreleased property', + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "newAlphaProperty": "unreleased property", } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -607,42 +595,41 @@ def test_create_dataset_w_custom_property(self): ds_ref = client.dataset(self.DS_ID) before = Dataset(ds_ref) - before._properties['newAlphaProperty'] = 'unreleased property' + before._properties["newAlphaProperty"] = "unreleased property" after = client.create_dataset(before) self.assertEqual(after.dataset_id, self.DS_ID) self.assertEqual(after.project, self.PROJECT) - self.assertEqual( - after._properties['newAlphaProperty'], 'unreleased property') + self.assertEqual(after._properties["newAlphaProperty"], "unreleased property") conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=path, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'newAlphaProperty': 'unreleased property', - 'labels': {}, - } + "newAlphaProperty": "unreleased property", + "labels": {}, + }, ) def test_create_dataset_w_client_location_wo_dataset_location(self): from google.cloud.bigquery.dataset import Dataset - PATH = 'projects/%s/datasets' % self.PROJECT + PATH = "projects/%s/datasets" % self.PROJECT RESOURCE = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'location': self.LOCATION, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "location": self.LOCATION, } creds = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION) + project=self.PROJECT, credentials=creds, location=self.LOCATION + ) conn = client._connection = _make_connection(RESOURCE) ds_ref = client.dataset(self.DS_ID) @@ -652,39 +639,38 @@ def test_create_dataset_w_client_location_wo_dataset_location(self): self.assertEqual(after.dataset_id, self.DS_ID) self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE['etag']) - self.assertEqual(after.full_dataset_id, RESOURCE['id']) + self.assertEqual(after.etag, RESOURCE["etag"]) + self.assertEqual(after.full_dataset_id, RESOURCE["id"]) self.assertEqual(after.location, self.LOCATION) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, + method="POST", + path="/%s" % PATH, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - 'location': self.LOCATION, - }) + "labels": {}, + "location": self.LOCATION, + }, + ) def test_create_dataset_w_client_location_w_dataset_location(self): from google.cloud.bigquery.dataset import Dataset - PATH = 'projects/%s/datasets' % self.PROJECT - OTHER_LOCATION = 'EU' + PATH = "projects/%s/datasets" % self.PROJECT + OTHER_LOCATION = "EU" RESOURCE = { - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - }, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'location': OTHER_LOCATION, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "location": OTHER_LOCATION, } creds = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION) + project=self.PROJECT, credentials=creds, location=self.LOCATION + ) conn = client._connection = _make_connection(RESOURCE) ds_ref = client.dataset(self.DS_ID) @@ -695,139 +681,141 @@ def test_create_dataset_w_client_location_w_dataset_location(self): self.assertEqual(after.dataset_id, self.DS_ID) self.assertEqual(after.project, self.PROJECT) - self.assertEqual(after.etag, RESOURCE['etag']) - self.assertEqual(after.full_dataset_id, RESOURCE['id']) + self.assertEqual(after.etag, RESOURCE["etag"]) + self.assertEqual(after.full_dataset_id, RESOURCE["id"]) self.assertEqual(after.location, OTHER_LOCATION) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, + method="POST", + path="/%s" % PATH, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - 'location': OTHER_LOCATION, - }) + "labels": {}, + "location": OTHER_LOCATION, + }, + ) def test_create_dataset_w_reference(self): - path = '/projects/%s/datasets' % self.PROJECT + path = "/projects/%s/datasets" % self.PROJECT resource = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'location': self.LOCATION, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "location": self.LOCATION, } creds = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION) + project=self.PROJECT, credentials=creds, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) dataset = client.create_dataset(client.dataset(self.DS_ID)) self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource['etag']) - self.assertEqual(dataset.full_dataset_id, resource['id']) + self.assertEqual(dataset.etag, resource["etag"]) + self.assertEqual(dataset.full_dataset_id, resource["id"]) self.assertEqual(dataset.location, self.LOCATION) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=path, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - 'location': self.LOCATION, - }) + "labels": {}, + "location": self.LOCATION, + }, + ) def test_create_dataset_w_fully_qualified_string(self): - path = '/projects/%s/datasets' % self.PROJECT + path = "/projects/%s/datasets" % self.PROJECT resource = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'location': self.LOCATION, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "location": self.LOCATION, } creds = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION) + project=self.PROJECT, credentials=creds, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) - dataset = client.create_dataset( - '{}.{}'.format(self.PROJECT, self.DS_ID)) + dataset = client.create_dataset("{}.{}".format(self.PROJECT, self.DS_ID)) self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource['etag']) - self.assertEqual(dataset.full_dataset_id, resource['id']) + self.assertEqual(dataset.etag, resource["etag"]) + self.assertEqual(dataset.full_dataset_id, resource["id"]) self.assertEqual(dataset.location, self.LOCATION) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=path, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - 'location': self.LOCATION, - }) + "labels": {}, + "location": self.LOCATION, + }, + ) def test_create_dataset_w_string(self): - path = '/projects/%s/datasets' % self.PROJECT + path = "/projects/%s/datasets" % self.PROJECT resource = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': "etag", - 'id': "%s:%s" % (self.PROJECT, self.DS_ID), - 'location': self.LOCATION, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "location": self.LOCATION, } creds = _make_credentials() client = self._make_one( - project=self.PROJECT, credentials=creds, location=self.LOCATION) + project=self.PROJECT, credentials=creds, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) dataset = client.create_dataset(self.DS_ID) self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual(dataset.etag, resource['etag']) - self.assertEqual(dataset.full_dataset_id, resource['id']) + self.assertEqual(dataset.etag, resource["etag"]) + self.assertEqual(dataset.full_dataset_id, resource["id"]) self.assertEqual(dataset.location, self.LOCATION) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=path, data={ - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, + "datasetReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, }, - 'labels': {}, - 'location': self.LOCATION, - }) + "labels": {}, + "location": self.LOCATION, + }, + ) def test_create_table_w_day_partition(self): from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TimePartitioning - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) @@ -837,18 +825,19 @@ def test_create_table_w_day_partition(self): got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'timePartitioning': {'type': 'DAY'}, - 'labels': {}, - }) - self.assertEqual(table.time_partitioning.type_, 'DAY') + "timePartitioning": {"type": "DAY"}, + "labels": {}, + }, + ) + self.assertEqual(table.time_partitioning.type_, "DAY") self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_custom_property(self): @@ -856,92 +845,91 @@ def test_create_table_w_custom_property(self): # yet part of the library from google.cloud.bigquery.table import Table - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'newAlphaProperty': 'unreleased property', + "newAlphaProperty": "unreleased property", } conn = client._connection = _make_connection(resource) table = Table(self.TABLE_REF) - table._properties['newAlphaProperty'] = 'unreleased property' + table._properties["newAlphaProperty"] = "unreleased property" got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'newAlphaProperty': 'unreleased property', - 'labels': {}, - }) - self.assertEqual( - got._properties['newAlphaProperty'], 'unreleased property') + "newAlphaProperty": "unreleased property", + "labels": {}, + }, + ) + self.assertEqual(got._properties["newAlphaProperty"], "unreleased property") self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_encryption_configuration(self): from google.cloud.bigquery.table import EncryptionConfiguration from google.cloud.bigquery.table import Table - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) table = Table(self.TABLE_REF) table.encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME) + kms_key_name=self.KMS_KEY_NAME + ) got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'labels': {}, - 'encryptionConfiguration': {'kmsKeyName': self.KMS_KEY_NAME}, - }) + "labels": {}, + "encryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_day_partition_and_expire(self): from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TimePartitioning - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) @@ -951,57 +939,57 @@ def test_create_table_w_day_partition_and_expire(self): got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'timePartitioning': {'type': 'DAY', 'expirationMs': '100'}, - 'labels': {}, - }) - self.assertEqual(table.time_partitioning.type_, 'DAY') + "timePartitioning": {"type": "DAY", "expirationMs": "100"}, + "labels": {}, + }, + ) + self.assertEqual(table.time_partitioning.type_, "DAY") self.assertEqual(table.time_partitioning.expiration_ms, 100) self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_schema_and_query(self): from google.cloud.bigquery.table import Table, SchemaField - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) - query = 'SELECT * from %s:%s' % (self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) + query = "SELECT * from %s:%s" % (self.DS_ID, self.TABLE_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'schema': { - 'fields': [ + "schema": { + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, }, { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None, + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, }, - ], + ] }, - 'view': {'query': query}, + "view": {"query": query}, } schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] conn = client._connection = _make_connection(resource) table = Table(self.TABLE_REF, schema=schema) @@ -1010,33 +998,34 @@ def test_create_table_w_schema_and_query(self): got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'schema': { - 'fields': [ + "schema": { + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, }, { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None, + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, }, - ], + ] }, - 'view': {'query': query, 'useLegacySql': False}, - 'labels': {}, - }) + "view": {"query": query, "useLegacySql": False}, + "labels": {}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) self.assertEqual(got.project, self.PROJECT) self.assertEqual(got.dataset_id, self.DS_ID) @@ -1048,63 +1037,63 @@ def test_create_table_w_external(self): from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.table import Table - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'externalDataConfiguration': { - 'sourceFormat': SourceFormat.CSV, - 'autodetect': True, + "externalDataConfiguration": { + "sourceFormat": SourceFormat.CSV, + "autodetect": True, }, } conn = client._connection = _make_connection(resource) table = Table(self.TABLE_REF) - ec = ExternalConfig('CSV') + ec = ExternalConfig("CSV") ec.autodetect = True table.external_data_configuration = ec got = client.create_table(table) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'externalDataConfiguration': { - 'sourceFormat': SourceFormat.CSV, - 'autodetect': True, + "externalDataConfiguration": { + "sourceFormat": SourceFormat.CSV, + "autodetect": True, }, - 'labels': {}, - }) + "labels": {}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) self.assertEqual(got.project, self.PROJECT) self.assertEqual(got.dataset_id, self.DS_ID) - self.assertEqual(got.external_data_configuration.source_format, - SourceFormat.CSV) + self.assertEqual( + got.external_data_configuration.source_format, SourceFormat.CSV + ) self.assertEqual(got.external_data_configuration.autodetect, True) def test_create_table_w_reference(self): - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) @@ -1112,99 +1101,102 @@ def test_create_table_w_reference(self): got = client.create_table(self.TABLE_REF) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'labels': {}, - }) + "labels": {}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_fully_qualified_string(self): - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) got = client.create_table( - '{}.{}.{}'.format(self.PROJECT, self.DS_ID, self.TABLE_ID)) + "{}.{}.{}".format(self.PROJECT, self.DS_ID, self.TABLE_ID) + ) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'labels': {}, - }) + "labels": {}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_string(self): - path = 'projects/%s/datasets/%s/tables' % ( - self.PROJECT, self.DS_ID) + path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) - got = client.create_table('{}.{}'.format(self.DS_ID, self.TABLE_ID)) + got = client.create_table("{}.{}".format(self.DS_ID, self.TABLE_ID)) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % path, + method="POST", + path="/%s" % path, data={ - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'labels': {}, - }) + "labels": {}, + }, + ) self.assertEqual(got.table_id, self.TABLE_ID) def test_get_table(self): - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } conn = client._connection = _make_connection(resource) table = client.get_table(self.TABLE_REF) - conn.api_request.assert_called_once_with( - method='GET', path='/%s' % path) + conn.api_request.assert_called_once_with(method="GET", path="/%s" % path) self.assertEqual(table.table_id, self.TABLE_ID) def test_update_dataset_w_invalid_field(self): @@ -1218,25 +1210,22 @@ def test_update_dataset_w_invalid_field(self): def test_update_dataset(self): from google.cloud.bigquery.dataset import Dataset, AccessEntry - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) - DESCRIPTION = 'DESCRIPTION' - FRIENDLY_NAME = 'TITLE' - LOCATION = 'loc' - LABELS = {'priority': 'high'} - ACCESS = [ - {'role': 'OWNER', 'userByEmail': 'phred@example.com'}, - ] + PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) + DESCRIPTION = "DESCRIPTION" + FRIENDLY_NAME = "TITLE" + LOCATION = "loc" + LABELS = {"priority": "high"} + ACCESS = [{"role": "OWNER", "userByEmail": "phred@example.com"}] EXP = 17 RESOURCE = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': "etag", - 'description': DESCRIPTION, - 'friendlyName': FRIENDLY_NAME, - 'location': LOCATION, - 'defaultTableExpirationMs': EXP, - 'labels': LABELS, - 'access': ACCESS, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": "etag", + "description": DESCRIPTION, + "friendlyName": FRIENDLY_NAME, + "location": LOCATION, + "defaultTableExpirationMs": EXP, + "labels": LABELS, + "access": ACCESS, } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -1247,22 +1236,22 @@ def test_update_dataset(self): ds.location = LOCATION ds.default_table_expiration_ms = EXP ds.labels = LABELS - ds.access_entries = [ - AccessEntry('OWNER', 'userByEmail', 'phred@example.com')] + ds.access_entries = [AccessEntry("OWNER", "userByEmail", "phred@example.com")] ds2 = client.update_dataset( - ds, ['description', 'friendly_name', 'location', 'labels', - 'access_entries']) + ds, ["description", "friendly_name", "location", "labels", "access_entries"] + ) conn.api_request.assert_called_once_with( - method='PATCH', + method="PATCH", data={ - 'description': DESCRIPTION, - 'friendlyName': FRIENDLY_NAME, - 'location': LOCATION, - 'labels': LABELS, - 'access': ACCESS, + "description": DESCRIPTION, + "friendlyName": FRIENDLY_NAME, + "location": LOCATION, + "labels": LABELS, + "access": ACCESS, }, - path='/' + PATH, - headers=None) + path="/" + PATH, + headers=None, + ) self.assertEqual(ds2.description, ds.description) self.assertEqual(ds2.friendly_name, ds.friendly_name) self.assertEqual(ds2.location, ds.location) @@ -1270,79 +1259,80 @@ def test_update_dataset(self): self.assertEqual(ds2.access_entries, ds.access_entries) # ETag becomes If-Match header. - ds._properties['etag'] = 'etag' + ds._properties["etag"] = "etag" client.update_dataset(ds, []) req = conn.api_request.call_args - self.assertEqual(req[1]['headers']['If-Match'], 'etag') + self.assertEqual(req[1]["headers"]["If-Match"], "etag") def test_update_dataset_w_custom_property(self): # The library should handle sending properties to the API that are not # yet part of the library from google.cloud.bigquery.dataset import Dataset - path = '/projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) + path = "/projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) resource = { - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'newAlphaProperty': 'unreleased property', + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "newAlphaProperty": "unreleased property", } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection(resource) dataset = Dataset(client.dataset(self.DS_ID)) - dataset._properties['newAlphaProperty'] = 'unreleased property' + dataset._properties["newAlphaProperty"] = "unreleased property" - dataset = client.update_dataset(dataset, ['newAlphaProperty']) + dataset = client.update_dataset(dataset, ["newAlphaProperty"]) conn.api_request.assert_called_once_with( - method='PATCH', - data={'newAlphaProperty': 'unreleased property'}, + method="PATCH", + data={"newAlphaProperty": "unreleased property"}, path=path, headers=None, ) self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) - self.assertEqual( - dataset._properties['newAlphaProperty'], 'unreleased property') + self.assertEqual(dataset._properties["newAlphaProperty"], "unreleased property") def test_update_table(self): from google.cloud.bigquery.table import Table, SchemaField - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) - description = 'description' - title = 'title' + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) + description = "description" + title = "title" resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'schema': { - 'fields': [ + "schema": { + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, }, { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, }, - ], + ] }, - 'etag': 'etag', - 'description': description, - 'friendlyName': title, - 'labels': {'x': 'y'}, + "etag": "etag", + "description": description, + "friendlyName": title, + "labels": {"x": "y"}, } schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -1350,92 +1340,98 @@ def test_update_table(self): table = Table(self.TABLE_REF, schema=schema) table.description = description table.friendly_name = title - table.labels = {'x': 'y'} + table.labels = {"x": "y"} updated_table = client.update_table( - table, ['schema', 'description', 'friendly_name', 'labels']) + table, ["schema", "description", "friendly_name", "labels"] + ) sent = { - 'schema': { - 'fields': [ + "schema": { + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, }, { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, }, - ], + ] }, - 'description': description, - 'friendlyName': title, - 'labels': {'x': 'y'}, + "description": description, + "friendlyName": title, + "labels": {"x": "y"}, } conn.api_request.assert_called_once_with( - method='PATCH', - data=sent, - path='/' + path, - headers=None) + method="PATCH", data=sent, path="/" + path, headers=None + ) self.assertEqual(updated_table.description, table.description) self.assertEqual(updated_table.friendly_name, table.friendly_name) self.assertEqual(updated_table.schema, table.schema) self.assertEqual(updated_table.labels, table.labels) # ETag becomes If-Match header. - table._properties['etag'] = 'etag' + table._properties["etag"] = "etag" client.update_table(table, []) req = conn.api_request.call_args - self.assertEqual(req[1]['headers']['If-Match'], 'etag') + self.assertEqual(req[1]["headers"]["If-Match"], "etag") def test_update_table_w_custom_property(self): from google.cloud.bigquery.table import Table - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'newAlphaProperty': 'unreleased property', + "newAlphaProperty": "unreleased property", } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection(resource) table = Table(self.TABLE_REF) - table._properties['newAlphaProperty'] = 'unreleased property' + table._properties["newAlphaProperty"] = "unreleased property" - updated_table = client.update_table(table, ['newAlphaProperty']) + updated_table = client.update_table(table, ["newAlphaProperty"]) conn.api_request.assert_called_once_with( - method='PATCH', - path='/%s' % path, - data={'newAlphaProperty': 'unreleased property'}, - headers=None) + method="PATCH", + path="/%s" % path, + data={"newAlphaProperty": "unreleased property"}, + headers=None, + ) self.assertEqual( - updated_table._properties['newAlphaProperty'], - 'unreleased property') + updated_table._properties["newAlphaProperty"], "unreleased property" + ) def test_update_table_only_use_legacy_sql(self): from google.cloud.bigquery.table import Table - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'view': {'useLegacySql': True} + "view": {"useLegacySql": True}, } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -1443,15 +1439,15 @@ def test_update_table_only_use_legacy_sql(self): table = Table(self.TABLE_REF) table.view_use_legacy_sql = True - updated_table = client.update_table(table, ['view_use_legacy_sql']) + updated_table = client.update_table(table, ["view_use_legacy_sql"]) conn.api_request.assert_called_once_with( - method='PATCH', - path='/%s' % path, - data={'view': {'useLegacySql': True}}, - headers=None) - self.assertEqual( - updated_table.view_use_legacy_sql, table.view_use_legacy_sql) + method="PATCH", + path="/%s" % path, + data={"view": {"useLegacySql": True}}, + headers=None, + ) + self.assertEqual(updated_table.view_use_legacy_sql, table.view_use_legacy_sql) def test_update_table_w_query(self): import datetime @@ -1459,45 +1455,45 @@ def test_update_table_w_query(self): from google.cloud._helpers import _millis from google.cloud.bigquery.table import Table, SchemaField - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) - query = 'select fullname, age from person_ages' - location = 'EU' + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) + query = "select fullname, age from person_ages" + location = "EU" exp_time = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) schema_resource = { - 'fields': [ + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, }, { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, }, - ], + ] } schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] resource = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID - }, - 'schema': schema_resource, - 'view': { - 'query': query, - 'useLegacySql': True, + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'location': location, - 'expirationTime': _millis(exp_time) + "schema": schema_resource, + "view": {"query": query, "useLegacySql": True}, + "location": location, + "expirationTime": _millis(exp_time), } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -1506,28 +1502,23 @@ def test_update_table_w_query(self): table.expires = exp_time table.view_query = query table.view_use_legacy_sql = True - updated_properties = [ - 'schema', 'view_query', 'expires', 'view_use_legacy_sql'] + updated_properties = ["schema", "view_query", "expires", "view_use_legacy_sql"] updated_table = client.update_table(table, updated_properties) self.assertEqual(updated_table.schema, table.schema) self.assertEqual(updated_table.view_query, table.view_query) self.assertEqual(updated_table.expires, table.expires) - self.assertEqual( - updated_table.view_use_legacy_sql, table.view_use_legacy_sql) + self.assertEqual(updated_table.view_use_legacy_sql, table.view_use_legacy_sql) self.assertEqual(updated_table.location, location) conn.api_request.assert_called_once_with( - method='PATCH', - path='/%s' % path, + method="PATCH", + path="/%s" % path, data={ - 'view': { - 'query': query, - 'useLegacySql': True, - }, - 'expirationTime': str(_millis(exp_time)), - 'schema': schema_resource, + "view": {"query": query, "useLegacySql": True}, + "expirationTime": str(_millis(exp_time)), + "schema": schema_resource, }, headers=None, ) @@ -1535,24 +1526,31 @@ def test_update_table_w_query(self): def test_update_table_w_schema_None(self): # Simulate deleting schema: not sure if back-end will actually # allow this operation, but the spec says it is optional. - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) resource1 = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]} + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, + }, + "schema": { + "fields": [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, + ] + }, } resource2 = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } creds = _make_credentials() @@ -1560,7 +1558,7 @@ def test_update_table_w_schema_None(self): conn = client._connection = _make_connection(resource1, resource2) table = client.get_table( # Test with string for table ID - '{}.{}.{}'.format( + "{}.{}.{}".format( self.TABLE_REF.project, self.TABLE_REF.dataset_id, self.TABLE_REF.table_id, @@ -1568,41 +1566,44 @@ def test_update_table_w_schema_None(self): ) table.schema = None - updated_table = client.update_table(table, ['schema']) + updated_table = client.update_table(table, ["schema"]) self.assertEqual(len(conn.api_request.call_args_list), 2) req = conn.api_request.call_args_list[1] - self.assertEqual(req[1]['method'], 'PATCH') - sent = {'schema': None} - self.assertEqual(req[1]['data'], sent) - self.assertEqual(req[1]['path'], '/%s' % path) + self.assertEqual(req[1]["method"], "PATCH") + sent = {"schema": None} + self.assertEqual(req[1]["data"], sent) + self.assertEqual(req[1]["path"], "/%s" % path) self.assertEqual(len(updated_table.schema), 0) def test_update_table_delete_property(self): from google.cloud.bigquery.table import Table - description = 'description' - title = 'title' - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + description = "description" + title = "title" + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) resource1 = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'description': description, - 'friendlyName': title, + "description": description, + "friendlyName": title, } resource2 = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_ID), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID + "id": "%s:%s:%s" % (self.PROJECT, self.DS_ID, self.TABLE_ID), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'description': None, + "description": None, } creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) @@ -1610,22 +1611,21 @@ def test_update_table_delete_property(self): table = Table(self.TABLE_REF) table.description = description table.friendly_name = title - table2 = client.update_table(table, ['description', 'friendly_name']) + table2 = client.update_table(table, ["description", "friendly_name"]) self.assertEqual(table2.description, table.description) table2.description = None - table3 = client.update_table(table2, ['description']) + table3 = client.update_table(table2, ["description"]) self.assertEqual(len(conn.api_request.call_args_list), 2) req = conn.api_request.call_args_list[1] - self.assertEqual(req[1]['method'], 'PATCH') - self.assertEqual(req[1]['path'], '/%s' % path) - sent = {'description': None} - self.assertEqual(req[1]['data'], sent) + self.assertEqual(req[1]["method"], "PATCH") + self.assertEqual(req[1]["path"], "/%s" % path) + sent = {"description": None} + self.assertEqual(req[1]["data"], sent) self.assertIsNone(table3.description) def test_list_tables_empty(self): - path = '/projects/{}/datasets/{}/tables'.format( - self.PROJECT, self.DS_ID) + path = "/projects/{}/datasets/{}/tables".format(self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection({}) @@ -1640,31 +1640,40 @@ def test_list_tables_empty(self): self.assertEqual(tables, []) self.assertIsNone(token) conn.api_request.assert_called_once_with( - method='GET', path=path, query_params={}) + method="GET", path=path, query_params={} + ) def test_list_tables_defaults(self): from google.cloud.bigquery.table import TableListItem - TABLE_1 = 'table_one' - TABLE_2 = 'table_two' - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - TOKEN = 'TOKEN' + TABLE_1 = "table_one" + TABLE_2 = "table_two" + PATH = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) + TOKEN = "TOKEN" DATA = { - 'nextPageToken': TOKEN, - 'tables': [ - {'kind': 'bigquery#table', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_1), - 'tableReference': {'tableId': TABLE_1, - 'datasetId': self.DS_ID, - 'projectId': self.PROJECT}, - 'type': 'TABLE'}, - {'kind': 'bigquery#table', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_2), - 'tableReference': {'tableId': TABLE_2, - 'datasetId': self.DS_ID, - 'projectId': self.PROJECT}, - 'type': 'TABLE'}, - ] + "nextPageToken": TOKEN, + "tables": [ + { + "kind": "bigquery#table", + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_1), + "tableReference": { + "tableId": TABLE_1, + "datasetId": self.DS_ID, + "projectId": self.PROJECT, + }, + "type": "TABLE", + }, + { + "kind": "bigquery#table", + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_2), + "tableReference": { + "tableId": TABLE_2, + "datasetId": self.DS_ID, + "projectId": self.PROJECT, + }, + "type": "TABLE", + }, + ], } creds = _make_credentials() @@ -1678,37 +1687,46 @@ def test_list_tables_defaults(self): tables = list(page) token = iterator.next_page_token - self.assertEqual(len(tables), len(DATA['tables'])) - for found, expected in zip(tables, DATA['tables']): + self.assertEqual(len(tables), len(DATA["tables"])) + for found, expected in zip(tables, DATA["tables"]): self.assertIsInstance(found, TableListItem) - self.assertEqual(found.full_table_id, expected['id']) - self.assertEqual(found.table_type, expected['type']) + self.assertEqual(found.full_table_id, expected["id"]) + self.assertEqual(found.table_type, expected["type"]) self.assertEqual(token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', path='/%s' % PATH, query_params={}) + method="GET", path="/%s" % PATH, query_params={} + ) def test_list_tables_explicit(self): from google.cloud.bigquery.table import TableListItem - TABLE_1 = 'table_one' - TABLE_2 = 'table_two' - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) - TOKEN = 'TOKEN' + TABLE_1 = "table_one" + TABLE_2 = "table_two" + PATH = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) + TOKEN = "TOKEN" DATA = { - 'tables': [ - {'kind': 'bigquery#dataset', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_1), - 'tableReference': {'tableId': TABLE_1, - 'datasetId': self.DS_ID, - 'projectId': self.PROJECT}, - 'type': 'TABLE'}, - {'kind': 'bigquery#dataset', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_2), - 'tableReference': {'tableId': TABLE_2, - 'datasetId': self.DS_ID, - 'projectId': self.PROJECT}, - 'type': 'TABLE'}, + "tables": [ + { + "kind": "bigquery#dataset", + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_1), + "tableReference": { + "tableId": TABLE_1, + "datasetId": self.DS_ID, + "projectId": self.PROJECT, + }, + "type": "TABLE", + }, + { + "kind": "bigquery#dataset", + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, TABLE_2), + "tableReference": { + "tableId": TABLE_2, + "datasetId": self.DS_ID, + "projectId": self.PROJECT, + }, + "type": "TABLE", + }, ] } @@ -1720,23 +1738,26 @@ def test_list_tables_explicit(self): iterator = client.list_tables( # Test with string for dataset ID. self.DS_ID, - max_results=3, page_token=TOKEN) + max_results=3, + page_token=TOKEN, + ) self.assertEqual(iterator.dataset, dataset) page = six.next(iterator.pages) tables = list(page) token = iterator.next_page_token - self.assertEqual(len(tables), len(DATA['tables'])) - for found, expected in zip(tables, DATA['tables']): + self.assertEqual(len(tables), len(DATA["tables"])) + for found, expected in zip(tables, DATA["tables"]): self.assertIsInstance(found, TableListItem) - self.assertEqual(found.full_table_id, expected['id']) - self.assertEqual(found.table_type, expected['type']) + self.assertEqual(found.full_table_id, expected["id"]) + self.assertEqual(found.table_type, expected["type"]) self.assertIsNone(token) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, - query_params={'maxResults': 3, 'pageToken': TOKEN}) + method="GET", + path="/%s" % PATH, + query_params={"maxResults": 3, "pageToken": TOKEN}, + ) def test_list_tables_wrong_type(self): creds = _make_credentials() @@ -1749,26 +1770,21 @@ def test_delete_dataset(self): from google.cloud.bigquery.dataset import DatasetReference ds_ref = DatasetReference(self.PROJECT, self.DS_ID) - datasets = ( - ds_ref, - Dataset(ds_ref), - '{}.{}'.format(self.PROJECT, self.DS_ID), - ) - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) + datasets = (ds_ref, Dataset(ds_ref), "{}.{}".format(self.PROJECT, self.DS_ID)) + PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection(*([{}] * len(datasets))) for arg in datasets: client.delete_dataset(arg) conn.api_request.assert_called_with( - method='DELETE', - path='/%s' % PATH, - query_params={}) + method="DELETE", path="/%s" % PATH, query_params={} + ) def test_delete_dataset_delete_contents(self): from google.cloud.bigquery.dataset import Dataset - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) + PATH = "projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _make_connection({}, {}) @@ -1776,9 +1792,10 @@ def test_delete_dataset_delete_contents(self): for arg in (ds_ref, Dataset(ds_ref)): client.delete_dataset(arg, delete_contents=True) conn.api_request.assert_called_with( - method='DELETE', - path='/%s' % PATH, - query_params={'deleteContents': 'true'}) + method="DELETE", + path="/%s" % PATH, + query_params={"deleteContents": "true"}, + ) def test_delete_dataset_wrong_type(self): creds = _make_credentials() @@ -1792,24 +1809,25 @@ def test_delete_table(self): tables = ( self.TABLE_REF, Table(self.TABLE_REF), - '{}.{}.{}'.format( + "{}.{}.{}".format( self.TABLE_REF.project, self.TABLE_REF.dataset_id, self.TABLE_REF.table_id, ), ) - path = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + path = "projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(*([{}] * len(tables))) for arg in tables: client.delete_table(arg) - conn.api_request.assert_called_with( - method='DELETE', path='/%s' % path) + conn.api_request.assert_called_with(method="DELETE", path="/%s" % path) def test_delete_table_w_wrong_type(self): creds = _make_credentials() @@ -1819,6 +1837,7 @@ def test_delete_table_w_wrong_type(self): def test_job_from_resource_unknown_type(self): from google.cloud.bigquery.job import UnknownJob + creds = _make_credentials() client = self._make_one(self.PROJECT, creds) got = client.job_from_resource({}) # Can parse redacted job. @@ -1828,29 +1847,26 @@ def test_job_from_resource_unknown_type(self): def test_get_job_miss_w_explict_project(self): from google.cloud.exceptions import NotFound - OTHER_PROJECT = 'OTHER_PROJECT' - JOB_ID = 'NONESUCH' + OTHER_PROJECT = "OTHER_PROJECT" + JOB_ID = "NONESUCH" creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection() with self.assertRaises(NotFound): - client.get_job( - JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) + client.get_job(JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/OTHER_PROJECT/jobs/NONESUCH', - query_params={ - 'projection': 'full', - 'location': self.LOCATION, - }) + method="GET", + path="/projects/OTHER_PROJECT/jobs/NONESUCH", + query_params={"projection": "full", "location": self.LOCATION}, + ) def test_get_job_miss_w_client_location(self): from google.cloud.exceptions import NotFound - OTHER_PROJECT = 'OTHER_PROJECT' - JOB_ID = 'NONESUCH' + OTHER_PROJECT = "OTHER_PROJECT" + JOB_ID = "NONESUCH" creds = _make_credentials() client = self._make_one(self.PROJECT, creds, location=self.LOCATION) conn = client._connection = _make_connection() @@ -1859,38 +1875,33 @@ def test_get_job_miss_w_client_location(self): client.get_job(JOB_ID, project=OTHER_PROJECT) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/OTHER_PROJECT/jobs/NONESUCH', - query_params={ - 'projection': 'full', - 'location': self.LOCATION, - }) + method="GET", + path="/projects/OTHER_PROJECT/jobs/NONESUCH", + query_params={"projection": "full", "location": self.LOCATION}, + ) def test_get_job_hit(self): from google.cloud.bigquery.job import CreateDisposition from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.job import WriteDisposition - JOB_ID = 'query_job' - QUERY_DESTINATION_TABLE = 'query_destination_table' - QUERY = 'SELECT * from test_dataset:test_table' + JOB_ID = "query_job" + QUERY_DESTINATION_TABLE = "query_destination_table" + QUERY = "SELECT * from test_dataset:test_table" ASYNC_QUERY_DATA = { - 'id': '{}:{}'.format(self.PROJECT, JOB_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'query_job', - }, - 'state': 'DONE', - 'configuration': { - 'query': { - 'query': QUERY, - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': QUERY_DESTINATION_TABLE, + "id": "{}:{}".format(self.PROJECT, JOB_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, + "state": "DONE", + "configuration": { + "query": { + "query": QUERY, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": QUERY_DESTINATION_TABLE, }, - 'createDisposition': CreateDisposition.CREATE_IF_NEEDED, - 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, + "createDisposition": CreateDisposition.CREATE_IF_NEEDED, + "writeDisposition": WriteDisposition.WRITE_TRUNCATE, } }, } @@ -1902,43 +1913,38 @@ def test_get_job_hit(self): self.assertIsInstance(job, QueryJob) self.assertEqual(job.job_id, JOB_ID) - self.assertEqual(job.create_disposition, - CreateDisposition.CREATE_IF_NEEDED) - self.assertEqual(job.write_disposition, - WriteDisposition.WRITE_TRUNCATE) + self.assertEqual(job.create_disposition, CreateDisposition.CREATE_IF_NEEDED) + self.assertEqual(job.write_disposition, WriteDisposition.WRITE_TRUNCATE) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/PROJECT/jobs/query_job', - query_params={'projection': 'full'}, + method="GET", + path="/projects/PROJECT/jobs/query_job", + query_params={"projection": "full"}, ) def test_cancel_job_miss_w_explict_project(self): from google.cloud.exceptions import NotFound - OTHER_PROJECT = 'OTHER_PROJECT' - JOB_ID = 'NONESUCH' + OTHER_PROJECT = "OTHER_PROJECT" + JOB_ID = "NONESUCH" creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection() with self.assertRaises(NotFound): - client.cancel_job( - JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) + client.cancel_job(JOB_ID, project=OTHER_PROJECT, location=self.LOCATION) conn.api_request.assert_called_once_with( - method='POST', - path='/projects/OTHER_PROJECT/jobs/NONESUCH/cancel', - query_params={ - 'projection': 'full', - 'location': self.LOCATION, - }) + method="POST", + path="/projects/OTHER_PROJECT/jobs/NONESUCH/cancel", + query_params={"projection": "full", "location": self.LOCATION}, + ) def test_cancel_job_miss_w_client_location(self): from google.cloud.exceptions import NotFound - OTHER_PROJECT = 'OTHER_PROJECT' - JOB_ID = 'NONESUCH' + OTHER_PROJECT = "OTHER_PROJECT" + JOB_ID = "NONESUCH" creds = _make_credentials() client = self._make_one(self.PROJECT, creds, location=self.LOCATION) conn = client._connection = _make_connection() @@ -1947,34 +1953,23 @@ def test_cancel_job_miss_w_client_location(self): client.cancel_job(JOB_ID, project=OTHER_PROJECT) conn.api_request.assert_called_once_with( - method='POST', - path='/projects/OTHER_PROJECT/jobs/NONESUCH/cancel', - query_params={ - 'projection': 'full', - 'location': self.LOCATION, - }) + method="POST", + path="/projects/OTHER_PROJECT/jobs/NONESUCH/cancel", + query_params={"projection": "full", "location": self.LOCATION}, + ) def test_cancel_job_hit(self): from google.cloud.bigquery.job import QueryJob - JOB_ID = 'query_job' - QUERY = 'SELECT * from test_dataset:test_table' + JOB_ID = "query_job" + QUERY = "SELECT * from test_dataset:test_table" QUERY_JOB_RESOURCE = { - 'id': '{}:{}'.format(self.PROJECT, JOB_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'query_job', - }, - 'state': 'RUNNING', - 'configuration': { - 'query': { - 'query': QUERY, - } - }, - } - RESOURCE = { - 'job': QUERY_JOB_RESOURCE, + "id": "{}:{}".format(self.PROJECT, JOB_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, + "state": "RUNNING", + "configuration": {"query": {"query": QUERY}}, } + RESOURCE = {"job": QUERY_JOB_RESOURCE} creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection(RESOURCE) @@ -1986,9 +1981,10 @@ def test_cancel_job_hit(self): self.assertEqual(job.query, QUERY) conn.api_request.assert_called_once_with( - method='POST', - path='/projects/PROJECT/jobs/query_job/cancel', - query_params={'projection': 'full'}) + method="POST", + path="/projects/PROJECT/jobs/query_job/cancel", + query_params={"projection": "full"}, + ) def test_list_jobs_defaults(self): from google.cloud.bigquery.job import CopyJob @@ -1998,106 +1994,91 @@ def test_list_jobs_defaults(self): from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.job import WriteDisposition - SOURCE_TABLE = 'source_table' - DESTINATION_TABLE = 'destination_table' - QUERY_DESTINATION_TABLE = 'query_destination_table' - SOURCE_URI = 'gs://test_bucket/src_object*' - DESTINATION_URI = 'gs://test_bucket/dst_object*' + SOURCE_TABLE = "source_table" + DESTINATION_TABLE = "destination_table" + QUERY_DESTINATION_TABLE = "query_destination_table" + SOURCE_URI = "gs://test_bucket/src_object*" + DESTINATION_URI = "gs://test_bucket/dst_object*" JOB_TYPES = { - 'load_job': LoadJob, - 'copy_job': CopyJob, - 'extract_job': ExtractJob, - 'query_job': QueryJob, + "load_job": LoadJob, + "copy_job": CopyJob, + "extract_job": ExtractJob, + "query_job": QueryJob, } - PATH = 'projects/%s/jobs' % self.PROJECT - TOKEN = 'TOKEN' - QUERY = 'SELECT * from test_dataset:test_table' + PATH = "projects/%s/jobs" % self.PROJECT + TOKEN = "TOKEN" + QUERY = "SELECT * from test_dataset:test_table" ASYNC_QUERY_DATA = { - 'id': '%s:%s' % (self.PROJECT, 'query_job'), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'query_job', - }, - 'state': 'DONE', - 'configuration': { - 'query': { - 'query': QUERY, - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': QUERY_DESTINATION_TABLE, + "id": "%s:%s" % (self.PROJECT, "query_job"), + "jobReference": {"projectId": self.PROJECT, "jobId": "query_job"}, + "state": "DONE", + "configuration": { + "query": { + "query": QUERY, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": QUERY_DESTINATION_TABLE, }, - 'createDisposition': CreateDisposition.CREATE_IF_NEEDED, - 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, + "createDisposition": CreateDisposition.CREATE_IF_NEEDED, + "writeDisposition": WriteDisposition.WRITE_TRUNCATE, } }, } EXTRACT_DATA = { - 'id': '%s:%s' % (self.PROJECT, 'extract_job'), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'extract_job', - }, - 'state': 'DONE', - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE_TABLE, + "id": "%s:%s" % (self.PROJECT, "extract_job"), + "jobReference": {"projectId": self.PROJECT, "jobId": "extract_job"}, + "state": "DONE", + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE_TABLE, }, - 'destinationUris': [DESTINATION_URI], + "destinationUris": [DESTINATION_URI], } }, } COPY_DATA = { - 'id': '%s:%s' % (self.PROJECT, 'copy_job'), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'copy_job', - }, - 'state': 'DONE', - 'configuration': { - 'copy': { - 'sourceTables': [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE_TABLE, - }], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': DESTINATION_TABLE, + "id": "%s:%s" % (self.PROJECT, "copy_job"), + "jobReference": {"projectId": self.PROJECT, "jobId": "copy_job"}, + "state": "DONE", + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE_TABLE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": DESTINATION_TABLE, }, } }, } LOAD_DATA = { - 'id': '%s:%s' % (self.PROJECT, 'load_job'), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'load_job', - }, - 'state': 'DONE', - 'configuration': { - 'load': { - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE_TABLE, + "id": "%s:%s" % (self.PROJECT, "load_job"), + "jobReference": {"projectId": self.PROJECT, "jobId": "load_job"}, + "state": "DONE", + "configuration": { + "load": { + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE_TABLE, }, - 'sourceUris': [SOURCE_URI], + "sourceUris": [SOURCE_URI], } }, } DATA = { - 'nextPageToken': TOKEN, - 'jobs': [ - ASYNC_QUERY_DATA, - EXTRACT_DATA, - COPY_DATA, - LOAD_DATA, - ] + "nextPageToken": TOKEN, + "jobs": [ASYNC_QUERY_DATA, EXTRACT_DATA, COPY_DATA, LOAD_DATA], } creds = _make_credentials() client = self._make_one(self.PROJECT, creds) @@ -2108,50 +2089,39 @@ def test_list_jobs_defaults(self): jobs = list(page) token = iterator.next_page_token - self.assertEqual(len(jobs), len(DATA['jobs'])) - for found, expected in zip(jobs, DATA['jobs']): - name = expected['jobReference']['jobId'] + self.assertEqual(len(jobs), len(DATA["jobs"])) + for found, expected in zip(jobs, DATA["jobs"]): + name = expected["jobReference"]["jobId"] self.assertIsInstance(found, JOB_TYPES[name]) self.assertEqual(found.job_id, name) self.assertEqual(token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, - query_params={'projection': 'full'}) + method="GET", path="/%s" % PATH, query_params={"projection": "full"} + ) def test_list_jobs_load_job_wo_sourceUris(self): from google.cloud.bigquery.job import LoadJob - SOURCE_TABLE = 'source_table' - JOB_TYPES = { - 'load_job': LoadJob, - } - PATH = 'projects/%s/jobs' % self.PROJECT - TOKEN = 'TOKEN' + SOURCE_TABLE = "source_table" + JOB_TYPES = {"load_job": LoadJob} + PATH = "projects/%s/jobs" % self.PROJECT + TOKEN = "TOKEN" LOAD_DATA = { - 'id': '%s:%s' % (self.PROJECT, 'load_job'), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'load_job', - }, - 'state': 'DONE', - 'configuration': { - 'load': { - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE_TABLE, - }, + "id": "%s:%s" % (self.PROJECT, "load_job"), + "jobReference": {"projectId": self.PROJECT, "jobId": "load_job"}, + "state": "DONE", + "configuration": { + "load": { + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE_TABLE, + } } }, } - DATA = { - 'nextPageToken': TOKEN, - 'jobs': [ - LOAD_DATA, - ] - } + DATA = {"nextPageToken": TOKEN, "jobs": [LOAD_DATA]} creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection(DATA) @@ -2161,28 +2131,28 @@ def test_list_jobs_load_job_wo_sourceUris(self): jobs = list(page) token = iterator.next_page_token - self.assertEqual(len(jobs), len(DATA['jobs'])) - for found, expected in zip(jobs, DATA['jobs']): - name = expected['jobReference']['jobId'] + self.assertEqual(len(jobs), len(DATA["jobs"])) + for found, expected in zip(jobs, DATA["jobs"]): + name = expected["jobReference"]["jobId"] self.assertIsInstance(found, JOB_TYPES[name]) self.assertEqual(found.job_id, name) self.assertEqual(token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, - query_params={'projection': 'full'}) + method="GET", path="/%s" % PATH, query_params={"projection": "full"} + ) def test_list_jobs_explicit_missing(self): - PATH = 'projects/%s/jobs' % self.PROJECT + PATH = "projects/%s/jobs" % self.PROJECT DATA = {} - TOKEN = 'TOKEN' + TOKEN = "TOKEN" creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection(DATA) - iterator = client.list_jobs(max_results=1000, page_token=TOKEN, - all_users=True, state_filter='done') + iterator = client.list_jobs( + max_results=1000, page_token=TOKEN, all_users=True, state_filter="done" + ) page = six.next(iterator.pages) jobs = list(page) token = iterator.next_page_token @@ -2191,29 +2161,29 @@ def test_list_jobs_explicit_missing(self): self.assertIsNone(token) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, + method="GET", + path="/%s" % PATH, query_params={ - 'projection': 'full', - 'maxResults': 1000, - 'pageToken': TOKEN, - 'allUsers': True, - 'stateFilter': 'done' - }) + "projection": "full", + "maxResults": 1000, + "pageToken": TOKEN, + "allUsers": True, + "stateFilter": "done", + }, + ) def test_list_jobs_w_project(self): creds = _make_credentials() client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection({}) - list(client.list_jobs(project='other-project')) + list(client.list_jobs(project="other-project")) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/other-project/jobs', - query_params={ - 'projection': 'full', - }) + method="GET", + path="/projects/other-project/jobs", + query_params={"projection": "full"}, + ) def test_list_jobs_w_time_filter(self): creds = _make_credentials() @@ -2226,44 +2196,40 @@ def test_list_jobs_w_time_filter(self): end_time = datetime.datetime(2038, 1, 19, 3, 14, 7, 1000) end_time_millis = (((2 ** 31) - 1) * 1000) + 1 - list(client.list_jobs( - min_creation_time=start_time, max_creation_time=end_time)) + list(client.list_jobs(min_creation_time=start_time, max_creation_time=end_time)) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/%s/jobs' % self.PROJECT, + method="GET", + path="/projects/%s/jobs" % self.PROJECT, query_params={ - 'projection': 'full', - 'minCreationTime': '1', - 'maxCreationTime': str(end_time_millis), - }) + "projection": "full", + "minCreationTime": "1", + "maxCreationTime": str(end_time_millis), + }, + ) def test_load_table_from_uri(self): from google.cloud.bigquery.job import LoadJob - JOB = 'job_name' - DESTINATION = 'destination_table' - SOURCE_URI = 'http://example.com/source.csv' + JOB = "job_name" + DESTINATION = "destination_table" + SOURCE_URI = "http://example.com/source.csv" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'load': { - 'sourceUris': [SOURCE_URI], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': DESTINATION, + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "load": { + "sourceUris": [SOURCE_URI], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": DESTINATION, }, - }, + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) destination = client.dataset(self.DS_ID).table(DESTINATION) @@ -2271,9 +2237,8 @@ def test_load_table_from_uri(self): # Check that load_table_from_uri actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/%s/jobs' % self.PROJECT, - data=RESOURCE) + method="POST", path="/projects/%s/jobs" % self.PROJECT, data=RESOURCE + ) self.assertIsInstance(job, LoadJob) self.assertIs(job._client, client) @@ -2291,97 +2256,100 @@ def test_load_table_from_uri(self): self.assertIs(job.destination, destination) def test_load_table_from_uri_w_explicit_project(self): - job_id = 'this-is-a-job-id' - destination_id = 'destination_table' - source_uri = 'gs://example/source.csv' + job_id = "this-is-a-job-id" + destination_id = "destination_table" + source_uri = "gs://example/source.csv" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'load': { - 'sourceUris': [source_uri], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': destination_id, + "configuration": { + "load": { + "sourceUris": [source_uri], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": destination_id, }, - }, + } }, } creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(resource) destination = client.dataset(self.DS_ID).table(destination_id) client.load_table_from_uri( - source_uri, destination, job_id=job_id, project='other-project', - location=self.LOCATION) + source_uri, + destination, + job_id=job_id, + project="other-project", + location=self.LOCATION, + ) # Check that load_table_from_uri actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource) + method="POST", path="/projects/other-project/jobs", data=resource + ) def test_load_table_from_uri_w_client_location(self): - job_id = 'this-is-a-job-id' - destination_id = 'destination_table' - source_uri = 'gs://example/source.csv' + job_id = "this-is-a-job-id" + destination_id = "destination_table" + source_uri = "gs://example/source.csv" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'load': { - 'sourceUris': [source_uri], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': destination_id, + "configuration": { + "load": { + "sourceUris": [source_uri], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": destination_id, }, - }, + } }, } creds = _make_credentials() http = object() client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - location=self.LOCATION) + project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) client.load_table_from_uri( source_uri, # Test with string for table ID. - '{}.{}'.format( - self.DS_ID, - destination_id, - ), + "{}.{}".format(self.DS_ID, destination_id), job_id=job_id, - project='other-project') + project="other-project", + ) # Check that load_table_from_uri actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource) + method="POST", path="/projects/other-project/jobs", data=resource + ) @staticmethod - def _mock_requests_response(status_code, headers, content=b''): + def _mock_requests_response(status_code, headers, content=b""): return mock.Mock( - content=content, headers=headers, status_code=status_code, - spec=['content', 'headers', 'status_code']) + content=content, + headers=headers, + status_code=status_code, + spec=["content", "headers", "status_code"], + ) - def _mock_transport(self, status_code, headers, content=b''): - fake_transport = mock.Mock(spec=['request']) + def _mock_transport(self, status_code, headers, content=b""): + fake_transport = mock.Mock(spec=["request"]) fake_response = self._mock_requests_response( - status_code, headers, content=content) + status_code, headers, content=content + ) fake_transport.request.return_value = fake_response return fake_transport @@ -2395,29 +2363,30 @@ def _initiate_resumable_upload_helper(self, num_retries=None): from google.cloud.bigquery.job import SourceFormat # Create mocks to be checked for doing transport. - resumable_url = 'http://test.invalid?upload_id=hey-you' - response_headers = {'location': resumable_url} - fake_transport = self._mock_transport( - http_client.OK, response_headers) + resumable_url = "http://test.invalid?upload_id=hey-you" + response_headers = {"location": resumable_url} + fake_transport = self._mock_transport(http_client.OK, response_headers) client = self._make_one(project=self.PROJECT, _http=fake_transport) conn = client._connection = _make_connection() # Create some mock arguments and call the method under test. - data = b'goodbye gudbi gootbee' + data = b"goodbye gudbi gootbee" stream = io.BytesIO(data) config = LoadJobConfig() config.source_format = SourceFormat.CSV job = LoadJob(None, None, self.TABLE_REF, client, job_config=config) metadata = job.to_api_repr() upload, transport = client._initiate_resumable_upload( - stream, metadata, num_retries) + stream, metadata, num_retries + ) # Check the returned values. self.assertIsInstance(upload, ResumableUpload) upload_url = ( - 'https://www.googleapis.com/upload/bigquery/v2/projects/' + "https://www.googleapis.com/upload/bigquery/v2/projects/" + self.PROJECT - + '/jobs?uploadType=resumable') + + "/jobs?uploadType=resumable" + ) self.assertEqual(upload.upload_url, upload_url) expected_headers = _get_upload_headers(conn.USER_AGENT) self.assertEqual(upload._headers, expected_headers) @@ -2442,11 +2411,11 @@ def _initiate_resumable_upload_helper(self, num_retries=None): # Check the mocks. request_headers = expected_headers.copy() - request_headers['x-upload-content-type'] = _GENERIC_CONTENT_TYPE + request_headers["x-upload-content-type"] = _GENERIC_CONTENT_TYPE fake_transport.request.assert_called_once_with( - 'POST', + "POST", upload_url, - data=json.dumps(metadata).encode('utf-8'), + data=json.dumps(metadata).encode("utf-8"), headers=request_headers, ) @@ -2456,8 +2425,7 @@ def test__initiate_resumable_upload(self): def test__initiate_resumable_upload_with_retry(self): self._initiate_resumable_upload_helper(num_retries=11) - def _do_multipart_upload_success_helper( - self, get_boundary, num_retries=None): + def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): from google.cloud.bigquery.client import _get_upload_headers from google.cloud.bigquery.job import LoadJob from google.cloud.bigquery.job import LoadJobConfig @@ -2468,15 +2436,14 @@ def _do_multipart_upload_success_helper( conn = client._connection = _make_connection() # Create some mock arguments. - data = b'Bzzzz-zap \x00\x01\xf4' + data = b"Bzzzz-zap \x00\x01\xf4" stream = io.BytesIO(data) config = LoadJobConfig() config.source_format = SourceFormat.CSV job = LoadJob(None, None, self.TABLE_REF, client, job_config=config) metadata = job.to_api_repr() size = len(data) - response = client._do_multipart_upload( - stream, metadata, size, num_retries) + response = client._do_multipart_upload(stream, metadata, size, num_retries) # Check the mocks and the returned value. self.assertIs(response, fake_transport.request.return_value) @@ -2484,68 +2451,63 @@ def _do_multipart_upload_success_helper( get_boundary.assert_called_once_with() upload_url = ( - 'https://www.googleapis.com/upload/bigquery/v2/projects/' + "https://www.googleapis.com/upload/bigquery/v2/projects/" + self.PROJECT - + '/jobs?uploadType=multipart') + + "/jobs?uploadType=multipart" + ) payload = ( - b'--==0==\r\n' - + b'content-type: application/json; charset=UTF-8\r\n\r\n' - + json.dumps(metadata).encode('utf-8') + b'\r\n' - + b'--==0==\r\n' - + b'content-type: */*\r\n\r\n' - + data + b'\r\n' - + b'--==0==--') + b"--==0==\r\n" + + b"content-type: application/json; charset=UTF-8\r\n\r\n" + + json.dumps(metadata).encode("utf-8") + + b"\r\n" + + b"--==0==\r\n" + + b"content-type: */*\r\n\r\n" + + data + + b"\r\n" + + b"--==0==--" + ) headers = _get_upload_headers(conn.USER_AGENT) - headers['content-type'] = b'multipart/related; boundary="==0=="' + headers["content-type"] = b'multipart/related; boundary="==0=="' fake_transport.request.assert_called_once_with( - 'POST', - upload_url, - data=payload, - headers=headers, + "POST", upload_url, data=payload, headers=headers ) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload(self, get_boundary): self._do_multipart_upload_success_helper(get_boundary) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, get_boundary): self._do_multipart_upload_success_helper(get_boundary, num_retries=8) def test_copy_table(self): from google.cloud.bigquery.job import CopyJob - JOB = 'job_name' - SOURCE = 'source_table' - DESTINATION = 'destination_table' + JOB = "job_name" + SOURCE = "source_table" + DESTINATION = "destination_table" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'copy': { - 'sourceTables': [ + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "copy": { + "sourceTables": [ { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE, - }, + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, + } ], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': DESTINATION, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": DESTINATION, }, - }, + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) dataset = client.dataset(self.DS_ID) source = dataset.table(SOURCE) @@ -2555,9 +2517,8 @@ def test_copy_table(self): # Check that copy_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/%s/jobs' % self.PROJECT, - data=RESOURCE) + method="POST", path="/projects/%s/jobs" % self.PROJECT, data=RESOURCE + ) self.assertIsInstance(job, CopyJob) self.assertIs(job._client, client) @@ -2566,7 +2527,7 @@ def test_copy_table(self): self.assertIs(job.destination, destination) conn = client._connection = _make_connection(RESOURCE) - source2 = dataset.table(SOURCE + '2') + source2 = dataset.table(SOURCE + "2") job = client.copy_table([source, source2], destination, job_id=JOB) self.assertIsInstance(job, CopyJob) self.assertIs(job._client, client) @@ -2575,125 +2536,122 @@ def test_copy_table(self): self.assertIs(job.destination, destination) def test_copy_table_w_explicit_project(self): - job_id = 'this-is-a-job-id' - source_id = 'source_table' - destination_id = 'destination_table' + job_id = "this-is-a-job-id" + source_id = "source_table" + destination_id = "destination_table" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'copy': { - 'sourceTables': [ + "configuration": { + "copy": { + "sourceTables": [ { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': source_id, - }, + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": source_id, + } ], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': destination_id, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": destination_id, }, - }, + } }, } creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(resource) dataset = client.dataset(self.DS_ID) source = dataset.table(source_id) destination = dataset.table(destination_id) client.copy_table( - source, destination, job_id=job_id, project='other-project', - location=self.LOCATION) + source, + destination, + job_id=job_id, + project="other-project", + location=self.LOCATION, + ) # Check that copy_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_copy_table_w_client_location(self): - job_id = 'this-is-a-job-id' - source_id = 'source_table' - destination_id = 'destination_table' + job_id = "this-is-a-job-id" + source_id = "source_table" + destination_id = "destination_table" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'copy': { - 'sourceTables': [ + "configuration": { + "copy": { + "sourceTables": [ { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': source_id, - }, + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": source_id, + } ], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': destination_id, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": destination_id, }, - }, + } }, } creds = _make_credentials() http = object() client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - location=self.LOCATION) + project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) client.copy_table( # Test with string for table IDs. - '{}.{}'.format(self.DS_ID, source_id), - '{}.{}'.format(self.DS_ID, destination_id), - job_id=job_id, project='other-project') + "{}.{}".format(self.DS_ID, source_id), + "{}.{}".format(self.DS_ID, destination_id), + job_id=job_id, + project="other-project", + ) # Check that copy_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_extract_table(self): from google.cloud.bigquery.job import ExtractJob - JOB = 'job_id' - SOURCE = 'source_table' - DESTINATION = 'gs://bucket_name/object_name' + JOB = "job_id" + SOURCE = "source_table" + DESTINATION = "gs://bucket_name/object_name" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE, + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, }, - 'destinationUris': [DESTINATION], - }, + "destinationUris": [DESTINATION], + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) dataset = client.dataset(self.DS_ID) source = dataset.table(SOURCE) @@ -2702,9 +2660,8 @@ def test_extract_table(self): # Check that extract_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/PROJECT/jobs', - data=RESOURCE) + method="POST", path="/projects/PROJECT/jobs", data=RESOURCE + ) # Check the job resource. self.assertIsInstance(job, ExtractJob) @@ -2714,83 +2671,85 @@ def test_extract_table(self): self.assertEqual(list(job.destination_uris), [DESTINATION]) def test_extract_table_w_explicit_project(self): - job_id = 'job_id' - source_id = 'source_table' - destination = 'gs://bucket_name/object_name' + job_id = "job_id" + source_id = "source_table" + destination = "gs://bucket_name/object_name" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': source_id, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": source_id, }, - 'destinationUris': [destination], - }, + "destinationUris": [destination], + } }, } creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(resource) dataset = client.dataset(self.DS_ID) source = dataset.table(source_id) client.extract_table( - source, destination, job_id=job_id, project='other-project', - location=self.LOCATION) + source, + destination, + job_id=job_id, + project="other-project", + location=self.LOCATION, + ) # Check that extract_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_extract_table_w_client_location(self): - job_id = 'job_id' - source_id = 'source_table' - destination = 'gs://bucket_name/object_name' + job_id = "job_id" + source_id = "source_table" + destination = "gs://bucket_name/object_name" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': source_id, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": source_id, }, - 'destinationUris': [destination], - }, + "destinationUris": [destination], + } }, } creds = _make_credentials() http = object() client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - location=self.LOCATION) + project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) client.extract_table( # Test with string for table ID. - '{}.{}'.format(self.DS_ID, source_id), - destination, job_id=job_id, project='other-project') + "{}.{}".format(self.DS_ID, source_id), + destination, + job_id=job_id, + project="other-project", + ) # Check that extract_table actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_extract_table_generated_job_id(self): @@ -2798,46 +2757,40 @@ def test_extract_table_generated_job_id(self): from google.cloud.bigquery.job import ExtractJobConfig from google.cloud.bigquery.job import DestinationFormat - JOB = 'job_id' - SOURCE = 'source_table' - DESTINATION = 'gs://bucket_name/object_name' + JOB = "job_id" + SOURCE = "source_table" + DESTINATION = "gs://bucket_name/object_name" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE, + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, }, - 'destinationUris': [DESTINATION], - 'destinationFormat': 'NEWLINE_DELIMITED_JSON', - }, + "destinationUris": [DESTINATION], + "destinationFormat": "NEWLINE_DELIMITED_JSON", + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) dataset = client.dataset(self.DS_ID) source = dataset.table(SOURCE) job_config = ExtractJobConfig() - job_config.destination_format = ( - DestinationFormat.NEWLINE_DELIMITED_JSON) + job_config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON job = client.extract_table(source, DESTINATION, job_config=job_config) # Check that extract_table actually starts the job. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/projects/PROJECT/jobs') - self.assertIsInstance( - req['data']['jobReference']['jobId'], six.string_types) + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/projects/PROJECT/jobs") + self.assertIsInstance(req["data"]["jobReference"]["jobId"], six.string_types) # Check the job resource. self.assertIsInstance(job, ExtractJob) @@ -2848,74 +2801,56 @@ def test_extract_table_generated_job_id(self): def test_extract_table_w_destination_uris(self): from google.cloud.bigquery.job import ExtractJob - JOB = 'job_id' - SOURCE = 'source_table' - DESTINATION1 = 'gs://bucket_name/object_one' - DESTINATION2 = 'gs://bucket_name/object_two' + JOB = "job_id" + SOURCE = "source_table" + DESTINATION1 = "gs://bucket_name/object_one" + DESTINATION2 = "gs://bucket_name/object_two" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': SOURCE, + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, }, - 'destinationUris': [ - DESTINATION1, - DESTINATION2, - ], - }, + "destinationUris": [DESTINATION1, DESTINATION2], + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) dataset = client.dataset(self.DS_ID) source = dataset.table(SOURCE) - job = client.extract_table( - source, [DESTINATION1, DESTINATION2], job_id=JOB) + job = client.extract_table(source, [DESTINATION1, DESTINATION2], job_id=JOB) # Check that extract_table actually starts the job. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/projects/PROJECT/jobs') + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/projects/PROJECT/jobs") # Check the job resource. self.assertIsInstance(job, ExtractJob) self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(job.source, source) - self.assertEqual( - list(job.destination_uris), [DESTINATION1, DESTINATION2]) + self.assertEqual(list(job.destination_uris), [DESTINATION1, DESTINATION2]) def test_query_defaults(self): from google.cloud.bigquery.job import QueryJob - QUERY = 'select count(*) from persons' + QUERY = "select count(*) from persons" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': 'some-random-id', - }, - 'configuration': { - 'query': { - 'query': QUERY, - 'useLegacySql': False, - }, - }, + "jobReference": {"projectId": self.PROJECT, "jobId": "some-random-id"}, + "configuration": {"query": {"query": QUERY, "useLegacySql": False}}, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) job = client.query(QUERY) @@ -2930,68 +2865,59 @@ def test_query_defaults(self): # Check that query actually starts the job. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/projects/PROJECT/jobs') - sent = req['data'] - self.assertIsInstance( - sent['jobReference']['jobId'], six.string_types) - sent_config = sent['configuration']['query'] - self.assertEqual(sent_config['query'], QUERY) - self.assertFalse(sent_config['useLegacySql']) + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/projects/PROJECT/jobs") + sent = req["data"] + self.assertIsInstance(sent["jobReference"]["jobId"], six.string_types) + sent_config = sent["configuration"]["query"] + self.assertEqual(sent_config["query"], QUERY) + self.assertFalse(sent_config["useLegacySql"]) def test_query_w_explicit_project(self): - job_id = 'some-job-id' - query = 'select count(*) from persons' + job_id = "some-job-id" + query = "select count(*) from persons" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, - }, - 'configuration': { - 'query': { - 'query': query, - 'useLegacySql': False, - }, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, + "configuration": {"query": {"query": query, "useLegacySql": False}}, } creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(resource) client.query( - query, job_id=job_id, project='other-project', - location=self.LOCATION) + query, job_id=job_id, project="other-project", location=self.LOCATION + ) # Check that query actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_query_w_explicit_job_config(self): - job_id = 'some-job-id' - query = 'select count(*) from persons' + job_id = "some-job-id" + query = "select count(*) from persons" resource = { - 'jobReference': { - 'jobId': job_id, - 'projectId': self.PROJECT, - 'location': self.LOCATION, + "jobReference": { + "jobId": job_id, + "projectId": self.PROJECT, + "location": self.LOCATION, }, - 'configuration': { - 'query': { - 'query': query, - 'defaultDataset': { - 'projectId': self.PROJECT, - 'datasetId': 'some-dataset', + "configuration": { + "query": { + "query": query, + "defaultDataset": { + "projectId": self.PROJECT, + "datasetId": "some-dataset", }, - 'useLegacySql': False, - 'useQueryCache': True, - 'maximumBytesBilled': '2000', - }, + "useLegacySql": False, + "useQueryCache": True, + "maximumBytesBilled": "2000", + } }, } @@ -2999,14 +2925,19 @@ def test_query_w_explicit_job_config(self): http = object() from google.cloud.bigquery import QueryJobConfig, DatasetReference + default_job_config = QueryJobConfig() default_job_config.default_dataset = DatasetReference( - self.PROJECT, 'some-dataset') + self.PROJECT, "some-dataset" + ) default_job_config.maximum_bytes_billed = 1000 client = self._make_one( - project=self.PROJECT, credentials=creds, - _http=http, default_query_job_config=default_job_config) + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) conn = client._connection = _make_connection(resource) job_config = QueryJobConfig() @@ -3014,33 +2945,31 @@ def test_query_w_explicit_job_config(self): job_config.maximum_bytes_billed = 2000 client.query( - query, job_id=job_id, location=self.LOCATION, - job_config=job_config) + query, job_id=job_id, location=self.LOCATION, job_config=job_config + ) # Check that query actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/PROJECT/jobs', - data=resource, + method="POST", path="/projects/PROJECT/jobs", data=resource ) def test_query_w_explicit_job_config_override(self): - job_id = 'some-job-id' - query = 'select count(*) from persons' + job_id = "some-job-id" + query = "select count(*) from persons" resource = { - 'jobReference': { - 'jobId': job_id, - 'projectId': self.PROJECT, - 'location': self.LOCATION, + "jobReference": { + "jobId": job_id, + "projectId": self.PROJECT, + "location": self.LOCATION, }, - 'configuration': { - 'query': { - 'query': query, - 'defaultDataset': None, - 'useLegacySql': False, - 'useQueryCache': True, - 'maximumBytesBilled': '2000', - }, + "configuration": { + "query": { + "query": query, + "defaultDataset": None, + "useLegacySql": False, + "useQueryCache": True, + "maximumBytesBilled": "2000", + } }, } @@ -3048,14 +2977,19 @@ def test_query_w_explicit_job_config_override(self): http = object() from google.cloud.bigquery import QueryJobConfig, DatasetReference + default_job_config = QueryJobConfig() default_job_config.default_dataset = DatasetReference( - self.PROJECT, 'some-dataset') + self.PROJECT, "some-dataset" + ) default_job_config.maximum_bytes_billed = 1000 client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - default_query_job_config=default_job_config) + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) conn = client._connection = _make_connection(resource) job_config = QueryJobConfig() @@ -3064,32 +2998,29 @@ def test_query_w_explicit_job_config_override(self): job_config.default_dataset = None client.query( - query, job_id=job_id, location=self.LOCATION, - job_config=job_config, + query, job_id=job_id, location=self.LOCATION, job_config=job_config ) # Check that query actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/PROJECT/jobs', - data=resource, + method="POST", path="/projects/PROJECT/jobs", data=resource ) def test_query_w_client_default_config_no_incoming(self): - job_id = 'some-job-id' - query = 'select count(*) from persons' + job_id = "some-job-id" + query = "select count(*) from persons" resource = { - 'jobReference': { - 'jobId': job_id, - 'projectId': self.PROJECT, - 'location': self.LOCATION, + "jobReference": { + "jobId": job_id, + "projectId": self.PROJECT, + "location": self.LOCATION, }, - 'configuration': { - 'query': { - 'query': query, - 'useLegacySql': False, - 'maximumBytesBilled': '1000', - }, + "configuration": { + "query": { + "query": query, + "useLegacySql": False, + "maximumBytesBilled": "1000", + } }, } @@ -3097,78 +3028,65 @@ def test_query_w_client_default_config_no_incoming(self): http = object() from google.cloud.bigquery import QueryJobConfig + default_job_config = QueryJobConfig() default_job_config.maximum_bytes_billed = 1000 client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - default_query_job_config=default_job_config) + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) conn = client._connection = _make_connection(resource) - client.query( - query, job_id=job_id, location=self.LOCATION) + client.query(query, job_id=job_id, location=self.LOCATION) # Check that query actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/PROJECT/jobs', - data=resource, + method="POST", path="/projects/PROJECT/jobs", data=resource ) def test_query_w_client_location(self): - job_id = 'some-job-id' - query = 'select count(*) from persons' + job_id = "some-job-id" + query = "select count(*) from persons" resource = { - 'jobReference': { - 'projectId': 'other-project', - 'location': self.LOCATION, - 'jobId': job_id, - }, - 'configuration': { - 'query': { - 'query': query, - 'useLegacySql': False, - }, + "jobReference": { + "projectId": "other-project", + "location": self.LOCATION, + "jobId": job_id, }, + "configuration": {"query": {"query": query, "useLegacySql": False}}, } creds = _make_credentials() http = object() client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http, - location=self.LOCATION) + project=self.PROJECT, credentials=creds, _http=http, location=self.LOCATION + ) conn = client._connection = _make_connection(resource) - client.query( - query, job_id=job_id, project='other-project') + client.query(query, job_id=job_id, project="other-project") # Check that query actually starts the job. conn.api_request.assert_called_once_with( - method='POST', - path='/projects/other-project/jobs', - data=resource, + method="POST", path="/projects/other-project/jobs", data=resource ) def test_query_detect_location(self): - query = 'select count(*) from persons' - resource_location = 'EU' + query = "select count(*) from persons" + resource_location = "EU" resource = { - 'jobReference': { - 'projectId': self.PROJECT, + "jobReference": { + "projectId": self.PROJECT, # Location not set in request, but present in the response. - 'location': resource_location, - 'jobId': 'some-random-id', - }, - 'configuration': { - 'query': { - 'query': query, - 'useLegacySql': False, - }, + "location": resource_location, + "jobId": "some-random-id", }, + "configuration": {"query": {"query": query, "useLegacySql": False}}, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(resource) job = client.query(query) @@ -3178,36 +3096,30 @@ def test_query_detect_location(self): # Check that request did not contain a location. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - sent = req['data'] - self.assertIsNone(sent['jobReference'].get('location')) + sent = req["data"] + self.assertIsNone(sent["jobReference"].get("location")) def test_query_w_udf_resources(self): from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import UDFResource - RESOURCE_URI = 'gs://some-bucket/js/lib.js' - JOB = 'job_name' - QUERY = 'select count(*) from persons' + RESOURCE_URI = "gs://some-bucket/js/lib.js" + JOB = "job_name" + QUERY = "select count(*) from persons" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'query': { - 'query': QUERY, - 'useLegacySql': True, - 'userDefinedFunctionResources': [ - {'resourceUri': RESOURCE_URI}, - ], - }, + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "query": { + "query": QUERY, + "useLegacySql": True, + "userDefinedFunctionResources": [{"resourceUri": RESOURCE_URI}], + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) udf_resources = [UDFResource("resourceUri", RESOURCE_URI)] config = QueryJobConfig() @@ -3226,50 +3138,46 @@ def test_query_w_udf_resources(self): # Check that query actually starts the job. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/projects/PROJECT/jobs') - sent = req['data'] - self.assertIsInstance( - sent['jobReference']['jobId'], six.string_types) - sent_config = sent['configuration']['query'] - self.assertEqual(sent_config['query'], QUERY) - self.assertTrue(sent_config['useLegacySql']) + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/projects/PROJECT/jobs") + sent = req["data"] + self.assertIsInstance(sent["jobReference"]["jobId"], six.string_types) + sent_config = sent["configuration"]["query"] + self.assertEqual(sent_config["query"], QUERY) + self.assertTrue(sent_config["useLegacySql"]) self.assertEqual( - sent_config['userDefinedFunctionResources'][0], - {'resourceUri': RESOURCE_URI}) + sent_config["userDefinedFunctionResources"][0], + {"resourceUri": RESOURCE_URI}, + ) def test_query_w_query_parameters(self): from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import ScalarQueryParameter - JOB = 'job_name' - QUERY = 'select count(*) from persons' + JOB = "job_name" + QUERY = "select count(*) from persons" RESOURCE = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': JOB, - }, - 'configuration': { - 'query': { - 'query': QUERY, - 'useLegacySql': False, - 'queryParameters': [ + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "query": { + "query": QUERY, + "useLegacySql": False, + "queryParameters": [ { - 'name': 'foo', - 'parameterType': {'type': 'INT64'}, - 'parameterValue': {'value': '123'} - }, + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, + } ], - }, + } }, } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESOURCE) - query_parameters = [ScalarQueryParameter('foo', 'INT64', 123)] + query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] config = QueryJobConfig() config.query_parameters = query_parameters @@ -3285,34 +3193,34 @@ def test_query_w_query_parameters(self): # Check that query actually starts the job. conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/projects/PROJECT/jobs') - sent = req['data'] - self.assertEqual(sent['jobReference']['jobId'], JOB) - sent_config = sent['configuration']['query'] - self.assertEqual(sent_config['query'], QUERY) - self.assertFalse(sent_config['useLegacySql']) + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/projects/PROJECT/jobs") + sent = req["data"] + self.assertEqual(sent["jobReference"]["jobId"], JOB) + sent_config = sent["configuration"]["query"] + self.assertEqual(sent_config["query"], QUERY) + self.assertFalse(sent_config["useLegacySql"]) self.assertEqual( - sent_config['queryParameters'][0], + sent_config["queryParameters"][0], { - 'name': 'foo', - 'parameterType': {'type': 'INT64'}, - 'parameterValue': {'value': '123'} - }) + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, + }, + ) def test_insert_rows_wo_schema(self): from google.cloud.bigquery.table import Table, _TABLE_HAS_NO_SCHEMA creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) table = Table(self.TABLE_REF) ROWS = [ - ('Phred Phlyntstone', 32), - ('Bharney Rhubble', 33), - ('Wylma Phlyntstone', 29), - ('Bhettye Rhubble', 27), + ("Phred Phlyntstone", 32), + ("Bharney Rhubble", 33), + ("Wylma Phlyntstone", 29), + ("Bhettye Rhubble", 27), ] with self.assertRaises(ValueError) as exc: @@ -3328,55 +3236,53 @@ def test_insert_rows_w_schema(self): from google.cloud.bigquery.table import SchemaField WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED'), - SchemaField('joined', 'TIMESTAMP', mode='NULLABLE'), + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), ] ROWS = [ - ('Phred Phlyntstone', 32, _datetime_to_rfc3339(WHEN)), - ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), - ('Wylma Phlyntstone', 29, WHEN + datetime.timedelta(seconds=2)), - ('Bhettye Rhubble', 27, None), + ("Phred Phlyntstone", 32, _datetime_to_rfc3339(WHEN)), + ("Bharney Rhubble", 33, WHEN + datetime.timedelta(seconds=1)), + ("Wylma Phlyntstone", 29, WHEN + datetime.timedelta(seconds=2)), + ("Bhettye Rhubble", 27, None), ] def _row_data(row): joined = row[2] if isinstance(row[2], datetime.datetime): joined = _microseconds_from_datetime(joined) * 1e-6 - return {'full_name': row[0], - 'age': str(row[1]), - 'joined': joined} + return {"full_name": row[0], "age": str(row[1]), "joined": joined} SENT = { - 'rows': [{ - 'json': _row_data(row), - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [ + {"json": _row_data(row), "insertId": str(i)} + for i, row in enumerate(ROWS) + ] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): # Test with using string IDs for the table. errors = client.insert_rows( - '{}.{}'.format(self.DS_ID, self.TABLE_ID), - ROWS, - selected_fields=schema) + "{}.{}".format(self.DS_ID, self.TABLE_ID), ROWS, selected_fields=schema + ) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once() _, req = conn.api_request.call_args - self.assertEqual(req['method'], 'POST') - self.assertEqual(req['path'], '/%s' % PATH) - self.assertEqual(req['data'], SENT) + self.assertEqual(req["method"], "POST") + self.assertEqual(req["path"], "/%s" % PATH) + self.assertEqual(req["data"], SENT) def test_insert_rows_w_list_of_dictionaries(self): import datetime @@ -3386,152 +3292,163 @@ def test_insert_rows_w_list_of_dictionaries(self): from google.cloud.bigquery.table import Table, SchemaField WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED'), - SchemaField('joined', 'TIMESTAMP', mode='NULLABLE'), + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), ] table = Table(self.TABLE_REF, schema=schema) ROWS = [ { - 'full_name': 'Phred Phlyntstone', 'age': 32, - 'joined': _datetime_to_rfc3339(WHEN) - }, - { - 'full_name': 'Bharney Rhubble', 'age': 33, - 'joined': WHEN + datetime.timedelta(seconds=1) + "full_name": "Phred Phlyntstone", + "age": 32, + "joined": _datetime_to_rfc3339(WHEN), }, { - 'full_name': 'Wylma Phlyntstone', 'age': 29, - 'joined': WHEN + datetime.timedelta(seconds=2) + "full_name": "Bharney Rhubble", + "age": 33, + "joined": WHEN + datetime.timedelta(seconds=1), }, { - 'full_name': 'Bhettye Rhubble', 'age': 27, 'joined': None + "full_name": "Wylma Phlyntstone", + "age": 29, + "joined": WHEN + datetime.timedelta(seconds=2), }, + {"full_name": "Bhettye Rhubble", "age": 27, "joined": None}, ] def _row_data(row): - joined = row['joined'] + joined = row["joined"] if isinstance(joined, datetime.datetime): - row['joined'] = _microseconds_from_datetime(joined) * 1e-6 - row['age'] = str(row['age']) + row["joined"] = _microseconds_from_datetime(joined) * 1e-6 + row["age"] = str(row["age"]) return row SENT = { - 'rows': [{ - 'json': _row_data(row), - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [ + {"json": _row_data(row), "insertId": str(i)} + for i, row in enumerate(ROWS) + ] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): errors = client.insert_rows(table, ROWS) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_w_list_of_Rows(self): from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED'), + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] table = Table(self.TABLE_REF, schema=schema) - f2i = {'full_name': 0, 'age': 1} + f2i = {"full_name": 0, "age": 1} ROWS = [ - Row(('Phred Phlyntstone', 32), f2i), - Row(('Bharney Rhubble', 33), f2i), - Row(('Wylma Phlyntstone', 29), f2i), - Row(('Bhettye Rhubble', 27), f2i), + Row(("Phred Phlyntstone", 32), f2i), + Row(("Bharney Rhubble", 33), f2i), + Row(("Wylma Phlyntstone", 29), f2i), + Row(("Bhettye Rhubble", 27), f2i), ] def _row_data(row): - return {'full_name': row[0], 'age': str(row[1])} + return {"full_name": row[0], "age": str(row[1])} SENT = { - 'rows': [{ - 'json': _row_data(row), - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [ + {"json": _row_data(row), "insertId": str(i)} + for i, row in enumerate(ROWS) + ] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): errors = client.insert_rows(table, ROWS) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_w_skip_invalid_and_ignore_unknown(self): from google.cloud.bigquery.table import Table, SchemaField - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) RESPONSE = { - 'insertErrors': [ - {'index': 1, - 'errors': [ - {'reason': 'REASON', - 'location': 'LOCATION', - 'debugInfo': 'INFO', - 'message': 'MESSAGE'} - ]}, - ]} + "insertErrors": [ + { + "index": 1, + "errors": [ + { + "reason": "REASON", + "location": "LOCATION", + "debugInfo": "INFO", + "message": "MESSAGE", + } + ], + } + ] + } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(RESPONSE) schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED'), - SchemaField('voter', 'BOOLEAN', mode='NULLABLE'), + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("voter", "BOOLEAN", mode="NULLABLE"), ] table = Table(self.TABLE_REF, schema=schema) ROWS = [ - ('Phred Phlyntstone', 32, True), - ('Bharney Rhubble', 33, False), - ('Wylma Phlyntstone', 29, True), - ('Bhettye Rhubble', 27, True), + ("Phred Phlyntstone", 32, True), + ("Bharney Rhubble", 33, False), + ("Wylma Phlyntstone", 29, True), + ("Bhettye Rhubble", 27, True), ] def _row_data(row): return { - 'full_name': row[0], - 'age': str(row[1]), - 'voter': row[2] and 'true' or 'false', + "full_name": row[0], + "age": str(row[1]), + "voter": row[2] and "true" or "false", } SENT = { - 'skipInvalidRows': True, - 'ignoreUnknownValues': True, - 'templateSuffix': '20160303', - 'rows': [{'insertId': index, 'json': _row_data(row)} - for index, row in enumerate(ROWS)], + "skipInvalidRows": True, + "ignoreUnknownValues": True, + "templateSuffix": "20160303", + "rows": [ + {"insertId": index, "json": _row_data(row)} + for index, row in enumerate(ROWS) + ], } errors = client.insert_rows( @@ -3540,119 +3457,119 @@ def _row_data(row): row_ids=[index for index, _ in enumerate(ROWS)], skip_invalid_rows=True, ignore_unknown_values=True, - template_suffix='20160303', + template_suffix="20160303", ) self.assertEqual(len(errors), 1) - self.assertEqual(errors[0]['index'], 1) - self.assertEqual(len(errors[0]['errors']), 1) - self.assertEqual(errors[0]['errors'][0], - RESPONSE['insertErrors'][0]['errors'][0]) + self.assertEqual(errors[0]["index"], 1) + self.assertEqual(len(errors[0]["errors"]), 1) + self.assertEqual( + errors[0]["errors"][0], RESPONSE["insertErrors"][0]["errors"][0] + ) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_w_repeated_fields(self): from google.cloud.bigquery.table import Table, SchemaField - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) - full_name = SchemaField('color', 'STRING', mode='REPEATED') - index = SchemaField('index', 'INTEGER', 'REPEATED') - score = SchemaField('score', 'FLOAT', 'REPEATED') - struct = SchemaField('struct', 'RECORD', mode='REPEATED', - fields=[index, score]) + full_name = SchemaField("color", "STRING", mode="REPEATED") + index = SchemaField("index", "INTEGER", "REPEATED") + score = SchemaField("score", "FLOAT", "REPEATED") + struct = SchemaField("struct", "RECORD", mode="REPEATED", fields=[index, score]) table = Table(self.TABLE_REF, schema=[full_name, struct]) - ROWS = [ - (['red', 'green'], [{'index': [1, 2], 'score': [3.1415, 1.414]}]), - ] + ROWS = [(["red", "green"], [{"index": [1, 2], "score": [3.1415, 1.414]}])] def _row_data(row): - return {'color': row[0], - 'struct': row[1]} + return {"color": row[0], "struct": row[1]} SENT = { - 'rows': [{ - 'json': _row_data(row), - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [ + {"json": _row_data(row), "insertId": str(i)} + for i, row in enumerate(ROWS) + ] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): errors = client.insert_rows(table, ROWS) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_w_record_schema(self): from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - area_code = SchemaField('area_code', 'STRING', 'REQUIRED') - local_number = SchemaField('local_number', 'STRING', 'REQUIRED') - rank = SchemaField('rank', 'INTEGER', 'REQUIRED') - phone = SchemaField('phone', 'RECORD', mode='NULLABLE', - fields=[area_code, local_number, rank]) + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + area_code = SchemaField("area_code", "STRING", "REQUIRED") + local_number = SchemaField("local_number", "STRING", "REQUIRED") + rank = SchemaField("rank", "INTEGER", "REQUIRED") + phone = SchemaField( + "phone", "RECORD", mode="NULLABLE", fields=[area_code, local_number, rank] + ) ROWS = [ - ('Phred Phlyntstone', {'area_code': '800', - 'local_number': '555-1212', - 'rank': 1}), - ('Bharney Rhubble', {'area_code': '877', - 'local_number': '768-5309', - 'rank': 2}), - ('Wylma Phlyntstone', None), + ( + "Phred Phlyntstone", + {"area_code": "800", "local_number": "555-1212", "rank": 1}, + ), + ( + "Bharney Rhubble", + {"area_code": "877", "local_number": "768-5309", "rank": 2}, + ), + ("Wylma Phlyntstone", None), ] def _row_data(row): - return {'full_name': row[0], - 'phone': row[1]} + return {"full_name": row[0], "phone": row[1]} SENT = { - 'rows': [{ - 'json': _row_data(row), - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [ + {"json": _row_data(row), "insertId": str(i)} + for i, row in enumerate(ROWS) + ] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): - errors = client.insert_rows(self.TABLE_REF, ROWS, - selected_fields=[full_name, phone]) + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): + errors = client.insert_rows( + self.TABLE_REF, ROWS, selected_fields=[full_name, phone] + ) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_errors(self): from google.cloud.bigquery.table import Table ROWS = [ - ('Phred Phlyntstone', 32, True), - ('Bharney Rhubble', 33, False), - ('Wylma Phlyntstone', 29, True), - ('Bhettye Rhubble', 27, True), + ("Phred Phlyntstone", 32, True), + ("Bharney Rhubble", 33, False), + ("Wylma Phlyntstone", 29, True), + ("Bhettye Rhubble", 27, True), ] creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) # table ref with no selected fields with self.assertRaises(ValueError): @@ -3669,170 +3586,154 @@ def test_insert_rows_errors(self): def test_insert_rows_w_numeric(self): from google.cloud.bigquery import table - project = 'PROJECT' - ds_id = 'DS_ID' - table_id = 'TABLE_ID' + project = "PROJECT" + ds_id = "DS_ID" + table_id = "TABLE_ID" creds = _make_credentials() http = object() client = self._make_one(project=project, credentials=creds, _http=http) conn = client._connection = _make_connection({}) table_ref = DatasetReference(project, ds_id).table(table_id) schema = [ - table.SchemaField('account', 'STRING'), - table.SchemaField('balance', 'NUMERIC'), + table.SchemaField("account", "STRING"), + table.SchemaField("balance", "NUMERIC"), ] insert_table = table.Table(table_ref, schema=schema) rows = [ - ('Savings', decimal.Decimal('23.47')), - ('Checking', decimal.Decimal('1.98')), - ('Mortgage', decimal.Decimal('-12345678909.87654321')), + ("Savings", decimal.Decimal("23.47")), + ("Checking", decimal.Decimal("1.98")), + ("Mortgage", decimal.Decimal("-12345678909.87654321")), ] - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(rows)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))): errors = client.insert_rows(insert_table, rows) self.assertEqual(len(errors), 0) rows_json = [ - {'account': 'Savings', 'balance': '23.47'}, - {'account': 'Checking', 'balance': '1.98'}, - { - 'account': 'Mortgage', - 'balance': '-12345678909.87654321', - }, + {"account": "Savings", "balance": "23.47"}, + {"account": "Checking", "balance": "1.98"}, + {"account": "Mortgage", "balance": "-12345678909.87654321"}, ] sent = { - 'rows': [{ - 'json': row, - 'insertId': str(i), - } for i, row in enumerate(rows_json)], + "rows": [ + {"json": row, "insertId": str(i)} for i, row in enumerate(rows_json) + ] } conn.api_request.assert_called_once_with( - method='POST', - path='/projects/{}/datasets/{}/tables/{}/insertAll'.format( - project, ds_id, table_id), - data=sent) + method="POST", + path="/projects/{}/datasets/{}/tables/{}/insertAll".format( + project, ds_id, table_id + ), + data=sent, + ) def test_insert_rows_json(self): from google.cloud.bigquery.table import Table, SchemaField from google.cloud.bigquery.dataset import DatasetReference - PROJECT = 'PROJECT' - DS_ID = 'DS_ID' - TABLE_ID = 'TABLE_ID' - PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - PROJECT, DS_ID, TABLE_ID) + PROJECT = "PROJECT" + DS_ID = "DS_ID" + TABLE_ID = "TABLE_ID" + PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( + PROJECT, + DS_ID, + TABLE_ID, + ) creds = _make_credentials() http = object() client = self._make_one(project=PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection({}) table_ref = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) schema = [ - SchemaField('full_name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED'), - SchemaField('joined', 'TIMESTAMP', mode='NULLABLE'), + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("joined", "TIMESTAMP", mode="NULLABLE"), ] table = Table(table_ref, schema=schema) ROWS = [ { - 'full_name': 'Phred Phlyntstone', 'age': '32', - 'joined': '2015-07-24T19:53:19.006000Z' - }, - { - 'full_name': 'Bharney Rhubble', 'age': '33', - 'joined': 1437767600.006 - }, - { - 'full_name': 'Wylma Phlyntstone', 'age': '29', - 'joined': 1437767601.006 - }, - { - 'full_name': 'Bhettye Rhubble', 'age': '27', 'joined': None + "full_name": "Phred Phlyntstone", + "age": "32", + "joined": "2015-07-24T19:53:19.006000Z", }, + {"full_name": "Bharney Rhubble", "age": "33", "joined": 1437767600.006}, + {"full_name": "Wylma Phlyntstone", "age": "29", "joined": 1437767601.006}, + {"full_name": "Bhettye Rhubble", "age": "27", "joined": None}, ] SENT = { - 'rows': [{ - 'json': row, - 'insertId': str(i), - } for i, row in enumerate(ROWS)], + "rows": [{"json": row, "insertId": str(i)} for i, row in enumerate(ROWS)] } - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(ROWS)))): + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(ROWS)))): errors = client.insert_rows_json(table, ROWS) self.assertEqual(len(errors), 0) conn.api_request.assert_called_once_with( - method='POST', - path='/%s' % PATH, - data=SENT) + method="POST", path="/%s" % PATH, data=SENT + ) def test_insert_rows_json_with_string_id(self): - rows = [{'col1': 'val1'}] + rows = [{"col1": "val1"}] creds = _make_credentials() http = object() client = self._make_one( - project='default-project', credentials=creds, _http=http) + project="default-project", credentials=creds, _http=http + ) conn = client._connection = _make_connection({}) - with mock.patch('uuid.uuid4', side_effect=map(str, range(len(rows)))): - errors = client.insert_rows_json('proj.dset.tbl', rows) + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(rows)))): + errors = client.insert_rows_json("proj.dset.tbl", rows) self.assertEqual(len(errors), 0) expected = { - 'rows': [{ - 'json': row, - 'insertId': str(i), - } for i, row in enumerate(rows)], + "rows": [{"json": row, "insertId": str(i)} for i, row in enumerate(rows)] } conn.api_request.assert_called_once_with( - method='POST', - path='/projects/proj/datasets/dset/tables/tbl/insertAll', - data=expected) + method="POST", + path="/projects/proj/datasets/dset/tables/tbl/insertAll", + data=expected, + ) def test_list_partitions(self): from google.cloud.bigquery.table import Table rows = 3 meta_info = _make_list_partitons_meta_info( - self.PROJECT, self.DS_ID, self.TABLE_ID, rows) + self.PROJECT, self.DS_ID, self.TABLE_ID, rows + ) data = { - 'totalRows': str(rows), - 'rows': [ - {'f': [ - {'v': '20180101'}, - ]}, - {'f': [ - {'v': '20180102'}, - ]}, - {'f': [ - {'v': '20180103'}, - ]}, - ] + "totalRows": str(rows), + "rows": [ + {"f": [{"v": "20180101"}]}, + {"f": [{"v": "20180102"}]}, + {"f": [{"v": "20180103"}]}, + ], } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) client._connection = _make_connection(meta_info, data) table = Table(self.TABLE_REF) partition_list = client.list_partitions(table) self.assertEqual(len(partition_list), rows) - self.assertIn('20180102', partition_list) + self.assertIn("20180102", partition_list) def test_list_partitions_with_string_id(self): meta_info = _make_list_partitons_meta_info( - self.PROJECT, self.DS_ID, self.TABLE_ID, 0) + self.PROJECT, self.DS_ID, self.TABLE_ID, 0 + ) creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) client._connection = _make_connection(meta_info, {}) partition_list = client.list_partitions( - '{}.{}'.format(self.DS_ID, self.TABLE_ID)) + "{}.{}".format(self.DS_ID, self.TABLE_ID) + ) self.assertEqual(len(partition_list), 0) @@ -3843,54 +3744,57 @@ def test_list_rows(self): from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row - PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/data" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) + WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) WHEN_1 = WHEN + datetime.timedelta(seconds=1) WHEN_2 = WHEN + datetime.timedelta(seconds=2) ROWS = 1234 - TOKEN = 'TOKEN' + TOKEN = "TOKEN" def _bigquery_timestamp_float_repr(ts_float): # Preserve microsecond precision for E+09 timestamps - return '%0.15E' % (ts_float,) + return "%0.15E" % (ts_float,) DATA = { - 'totalRows': str(ROWS), - 'pageToken': TOKEN, - 'rows': [ - {'f': [ - {'v': 'Phred Phlyntstone'}, - {'v': '32'}, - {'v': _bigquery_timestamp_float_repr(WHEN_TS)}, - ]}, - {'f': [ - {'v': 'Bharney Rhubble'}, - {'v': '33'}, - {'v': _bigquery_timestamp_float_repr(WHEN_TS + 1)}, - ]}, - {'f': [ - {'v': 'Wylma Phlyntstone'}, - {'v': '29'}, - {'v': _bigquery_timestamp_float_repr(WHEN_TS + 2)}, - ]}, - {'f': [ - {'v': 'Bhettye Rhubble'}, - {'v': None}, - {'v': None}, - ]}, - ] + "totalRows": str(ROWS), + "pageToken": TOKEN, + "rows": [ + { + "f": [ + {"v": "Phred Phlyntstone"}, + {"v": "32"}, + {"v": _bigquery_timestamp_float_repr(WHEN_TS)}, + ] + }, + { + "f": [ + {"v": "Bharney Rhubble"}, + {"v": "33"}, + {"v": _bigquery_timestamp_float_repr(WHEN_TS + 1)}, + ] + }, + { + "f": [ + {"v": "Wylma Phlyntstone"}, + {"v": "29"}, + {"v": _bigquery_timestamp_float_repr(WHEN_TS + 2)}, + ] + }, + {"f": [{"v": "Bhettye Rhubble"}, {"v": None}, {"v": None}]}, + ], } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(DATA, DATA) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='NULLABLE') - joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="NULLABLE") + joined = SchemaField("joined", "TIMESTAMP", mode="NULLABLE") table = Table(self.TABLE_REF, schema=[full_name, age, joined]) iterator = client.list_rows(table) @@ -3899,36 +3803,31 @@ def _bigquery_timestamp_float_repr(ts_float): total_rows = iterator.total_rows page_token = iterator.next_page_token - f2i = {'full_name': 0, 'age': 1, 'joined': 2} + f2i = {"full_name": 0, "age": 1, "joined": 2} self.assertEqual(len(rows), 4) - self.assertEqual(rows[0], Row(('Phred Phlyntstone', 32, WHEN), f2i)) - self.assertEqual(rows[1], Row(('Bharney Rhubble', 33, WHEN_1), f2i)) - self.assertEqual(rows[2], Row(('Wylma Phlyntstone', 29, WHEN_2), f2i)) - self.assertEqual(rows[3], Row(('Bhettye Rhubble', None, None), f2i)) + self.assertEqual(rows[0], Row(("Phred Phlyntstone", 32, WHEN), f2i)) + self.assertEqual(rows[1], Row(("Bharney Rhubble", 33, WHEN_1), f2i)) + self.assertEqual(rows[2], Row(("Wylma Phlyntstone", 29, WHEN_2), f2i)) + self.assertEqual(rows[3], Row(("Bhettye Rhubble", None, None), f2i)) self.assertEqual(total_rows, ROWS) self.assertEqual(page_token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, - query_params={}) + method="GET", path="/%s" % PATH, query_params={} + ) def test_list_rows_empty_table(self): - response = { - 'totalRows': '0', - 'rows': [], - } + response = {"totalRows": "0", "rows": []} creds = _make_credentials() http = object() - client = self._make_one( - project=self.PROJECT, credentials=creds, _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) client._connection = _make_connection(response, response) # Table that has no schema because it's an empty table. rows = tuple( client.list_rows( # Test with using a string for the table ID. - '{}.{}.{}'.format( + "{}.{}.{}".format( self.TABLE_REF.project, self.TABLE_REF.dataset_id, self.TABLE_REF.table_id, @@ -3943,114 +3842,123 @@ def test_list_rows_query_params(self): creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) - table = Table(self.TABLE_REF, - schema=[SchemaField('age', 'INTEGER', mode='NULLABLE')]) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + table = Table( + self.TABLE_REF, schema=[SchemaField("age", "INTEGER", mode="NULLABLE")] + ) tests = [ ({}, {}), - ({'start_index': 1}, {'startIndex': 1}), - ({'max_results': 2}, {'maxResults': 2}), - ({'start_index': 1, 'max_results': 2}, - {'startIndex': 1, 'maxResults': 2}), + ({"start_index": 1}, {"startIndex": 1}), + ({"max_results": 2}, {"maxResults": 2}), + ({"start_index": 1, "max_results": 2}, {"startIndex": 1, "maxResults": 2}), ] conn = client._connection = _make_connection(*len(tests) * [{}]) for i, test in enumerate(tests): iterator = client.list_rows(table, **test[0]) six.next(iterator.pages) req = conn.api_request.call_args_list[i] - self.assertEqual(req[1]['query_params'], test[1], - 'for kwargs %s' % test[0]) + self.assertEqual(req[1]["query_params"], test[1], "for kwargs %s" % test[0]) def test_list_rows_repeated_fields(self): from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/data" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) ROWS = 1234 - TOKEN = 'TOKEN' + TOKEN = "TOKEN" DATA = { - 'totalRows': ROWS, - 'pageToken': TOKEN, - 'rows': [ - {'f': [ - {'v': [{'v': 'red'}, {'v': 'green'}]}, - {'v': [{ - 'v': { - 'f': [ - {'v': [{'v': '1'}, {'v': '2'}]}, - {'v': [{'v': '3.1415'}, {'v': '1.414'}]}, - ]} - }]}, - ]}, - ] + "totalRows": ROWS, + "pageToken": TOKEN, + "rows": [ + { + "f": [ + {"v": [{"v": "red"}, {"v": "green"}]}, + { + "v": [ + { + "v": { + "f": [ + {"v": [{"v": "1"}, {"v": "2"}]}, + {"v": [{"v": "3.1415"}, {"v": "1.414"}]}, + ] + } + } + ] + }, + ] + } + ], } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(DATA) - color = SchemaField('color', 'STRING', mode='REPEATED') - index = SchemaField('index', 'INTEGER', 'REPEATED') - score = SchemaField('score', 'FLOAT', 'REPEATED') - struct = SchemaField('struct', 'RECORD', mode='REPEATED', - fields=[index, score]) - - iterator = client.list_rows(self.TABLE_REF, - selected_fields=[color, struct]) + color = SchemaField("color", "STRING", mode="REPEATED") + index = SchemaField("index", "INTEGER", "REPEATED") + score = SchemaField("score", "FLOAT", "REPEATED") + struct = SchemaField("struct", "RECORD", mode="REPEATED", fields=[index, score]) + + iterator = client.list_rows(self.TABLE_REF, selected_fields=[color, struct]) page = six.next(iterator.pages) rows = list(page) total_rows = iterator.total_rows page_token = iterator.next_page_token self.assertEqual(len(rows), 1) - self.assertEqual(rows[0][0], ['red', 'green']) - self.assertEqual(rows[0][1], [{'index': [1, 2], - 'score': [3.1415, 1.414]}]) + self.assertEqual(rows[0][0], ["red", "green"]) + self.assertEqual(rows[0][1], [{"index": [1, 2], "score": [3.1415, 1.414]}]) self.assertEqual(total_rows, ROWS) self.assertEqual(page_token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', - path='/%s' % PATH, - query_params={'selectedFields': 'color,struct'}) + method="GET", + path="/%s" % PATH, + query_params={"selectedFields": "color,struct"}, + ) def test_list_rows_w_record_schema(self): from google.cloud.bigquery.table import Table, SchemaField - PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_ID, self.TABLE_ID) + PATH = "projects/%s/datasets/%s/tables/%s/data" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_ID, + ) ROWS = 1234 - TOKEN = 'TOKEN' + TOKEN = "TOKEN" DATA = { - 'totalRows': ROWS, - 'pageToken': TOKEN, - 'rows': [ - {'f': [ - {'v': 'Phred Phlyntstone'}, - {'v': {'f': [{'v': '800'}, {'v': '555-1212'}, {'v': 1}]}}, - ]}, - {'f': [ - {'v': 'Bharney Rhubble'}, - {'v': {'f': [{'v': '877'}, {'v': '768-5309'}, {'v': 2}]}}, - ]}, - {'f': [ - {'v': 'Wylma Phlyntstone'}, - {'v': None}, - ]}, - ] + "totalRows": ROWS, + "pageToken": TOKEN, + "rows": [ + { + "f": [ + {"v": "Phred Phlyntstone"}, + {"v": {"f": [{"v": "800"}, {"v": "555-1212"}, {"v": 1}]}}, + ] + }, + { + "f": [ + {"v": "Bharney Rhubble"}, + {"v": {"f": [{"v": "877"}, {"v": "768-5309"}, {"v": 2}]}}, + ] + }, + {"f": [{"v": "Wylma Phlyntstone"}, {"v": None}]}, + ], } creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = _make_connection(DATA) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - area_code = SchemaField('area_code', 'STRING', 'REQUIRED') - local_number = SchemaField('local_number', 'STRING', 'REQUIRED') - rank = SchemaField('rank', 'INTEGER', 'REQUIRED') - phone = SchemaField('phone', 'RECORD', mode='NULLABLE', - fields=[area_code, local_number, rank]) + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + area_code = SchemaField("area_code", "STRING", "REQUIRED") + local_number = SchemaField("local_number", "STRING", "REQUIRED") + rank = SchemaField("rank", "INTEGER", "REQUIRED") + phone = SchemaField( + "phone", "RECORD", mode="NULLABLE", fields=[area_code, local_number, rank] + ) table = Table(self.TABLE_REF, schema=[full_name, phone]) iterator = client.list_rows(table) @@ -4060,29 +3968,29 @@ def test_list_rows_w_record_schema(self): page_token = iterator.next_page_token self.assertEqual(len(rows), 3) - self.assertEqual(rows[0][0], 'Phred Phlyntstone') - self.assertEqual(rows[0][1], {'area_code': '800', - 'local_number': '555-1212', - 'rank': 1}) - self.assertEqual(rows[1][0], 'Bharney Rhubble') - self.assertEqual(rows[1][1], {'area_code': '877', - 'local_number': '768-5309', - 'rank': 2}) - self.assertEqual(rows[2][0], 'Wylma Phlyntstone') + self.assertEqual(rows[0][0], "Phred Phlyntstone") + self.assertEqual( + rows[0][1], {"area_code": "800", "local_number": "555-1212", "rank": 1} + ) + self.assertEqual(rows[1][0], "Bharney Rhubble") + self.assertEqual( + rows[1][1], {"area_code": "877", "local_number": "768-5309", "rank": 2} + ) + self.assertEqual(rows[2][0], "Wylma Phlyntstone") self.assertIsNone(rows[2][1]) self.assertEqual(total_rows, ROWS) self.assertEqual(page_token, TOKEN) conn.api_request.assert_called_once_with( - method='GET', path='/%s' % PATH, query_params={}) + method="GET", path="/%s" % PATH, query_params={} + ) def test_list_rows_errors(self): from google.cloud.bigquery.table import Table creds = _make_credentials() http = object() - client = self._make_one(project=self.PROJECT, credentials=creds, - _http=http) + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) # table ref with no selected fields with self.assertRaises(ValueError): @@ -4104,26 +4012,26 @@ def _call_fut(self, job_id, prefix=None): return _make_job_id(job_id, prefix=prefix) def test__make_job_id_wo_suffix(self): - job_id = self._call_fut('job_id') + job_id = self._call_fut("job_id") - self.assertEqual(job_id, 'job_id') + self.assertEqual(job_id, "job_id") def test__make_job_id_w_suffix(self): - with mock.patch('uuid.uuid4', side_effect=['212345']): - job_id = self._call_fut(None, prefix='job_id') + with mock.patch("uuid.uuid4", side_effect=["212345"]): + job_id = self._call_fut(None, prefix="job_id") - self.assertEqual(job_id, 'job_id212345') + self.assertEqual(job_id, "job_id212345") def test__make_random_job_id(self): - with mock.patch('uuid.uuid4', side_effect=['212345']): + with mock.patch("uuid.uuid4", side_effect=["212345"]): job_id = self._call_fut(None) - self.assertEqual(job_id, '212345') + self.assertEqual(job_id, "212345") def test__make_job_id_w_job_id_overrides_prefix(self): - job_id = self._call_fut('job_id', prefix='unused_prefix') + job_id = self._call_fut("job_id", prefix="unused_prefix") - self.assertEqual(job_id, 'job_id') + self.assertEqual(job_id, "job_id") class TestClientUpload(object): @@ -4131,69 +4039,73 @@ class TestClientUpload(object): # "load_table_from_file" portions of `Client`. It also uses # `pytest`-style tests rather than `unittest`-style. from google.cloud.bigquery.job import SourceFormat - TABLE_REF = DatasetReference( - 'project_id', 'test_dataset').table('test_table') - LOCATION = 'us-central' + TABLE_REF = DatasetReference("project_id", "test_dataset").table("test_table") + + LOCATION = "us-central" @staticmethod def _make_client(transport=None, location=None): from google.cloud.bigquery import _http from google.cloud.bigquery import client - cl = client.Client(project='project_id', - credentials=_make_credentials(), - _http=transport, location=location) + cl = client.Client( + project="project_id", + credentials=_make_credentials(), + _http=transport, + location=location, + ) cl._connection = mock.create_autospec(_http.Connection, instance=True) return cl @staticmethod - def _make_response(status_code, content='', headers={}): + def _make_response(status_code, content="", headers={}): """Make a mock HTTP response.""" import requests + response = requests.Response() - response.request = requests.Request( - 'POST', 'http://example.com').prepare() - response._content = content.encode('utf-8') + response.request = requests.Request("POST", "http://example.com").prepare() + response._content = content.encode("utf-8") response.headers.update(headers) response.status_code = status_code return response @classmethod - def _make_do_upload_patch(cls, client, method, - resource={}, side_effect=None): + def _make_do_upload_patch(cls, client, method, resource={}, side_effect=None): """Patches the low-level upload helpers.""" if side_effect is None: - side_effect = [cls._make_response( - http_client.OK, - json.dumps(resource), - {'Content-Type': 'application/json'})] - return mock.patch.object( - client, method, side_effect=side_effect, autospec=True) + side_effect = [ + cls._make_response( + http_client.OK, + json.dumps(resource), + {"Content-Type": "application/json"}, + ) + ] + return mock.patch.object(client, method, side_effect=side_effect, autospec=True) EXPECTED_CONFIGURATION = { - 'jobReference': {'projectId': 'project_id', 'jobId': 'job_id'}, - 'configuration': { - 'load': { - 'sourceFormat': SourceFormat.CSV, - 'destinationTable': { - 'projectId': 'project_id', - 'datasetId': 'test_dataset', - 'tableId': 'test_table' - } + "jobReference": {"projectId": "project_id", "jobId": "job_id"}, + "configuration": { + "load": { + "sourceFormat": SourceFormat.CSV, + "destinationTable": { + "projectId": "project_id", + "datasetId": "test_dataset", + "tableId": "test_table", + }, } - } + }, } @staticmethod def _make_file_obj(): - return io.BytesIO(b'hello, is it me you\'re looking for?') + return io.BytesIO(b"hello, is it me you're looking for?") def _make_gzip_file_obj(self, writable): if writable: - return gzip.GzipFile(mode='w', fileobj=io.BytesIO()) + return gzip.GzipFile(mode="w", fileobj=io.BytesIO()) else: - return gzip.GzipFile(mode='r', fileobj=self._make_file_obj()) + return gzip.GzipFile(mode="r", fileobj=self._make_file_obj()) @staticmethod def _make_config(): @@ -4213,16 +4125,19 @@ def test_load_table_from_file_resumable(self): file_obj = self._make_file_obj() do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: - client.load_table_from_file(file_obj, self.TABLE_REF, - job_id='job_id', - job_config=self._make_config()) + client.load_table_from_file( + file_obj, + self.TABLE_REF, + job_id="job_id", + job_config=self._make_config(), + ) do_upload.assert_called_once_with( - file_obj, - self.EXPECTED_CONFIGURATION, - _DEFAULT_NUM_RETRIES) + file_obj, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_w_explicit_project(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES @@ -4231,20 +4146,24 @@ def test_load_table_from_file_w_explicit_project(self): file_obj = self._make_file_obj() do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: client.load_table_from_file( - file_obj, self.TABLE_REF, job_id='job_id', - project='other-project', location=self.LOCATION, - job_config=self._make_config()) + file_obj, + self.TABLE_REF, + job_id="job_id", + project="other-project", + location=self.LOCATION, + job_config=self._make_config(), + ) expected_resource = copy.deepcopy(self.EXPECTED_CONFIGURATION) - expected_resource['jobReference']['location'] = self.LOCATION - expected_resource['jobReference']['projectId'] = 'other-project' + expected_resource["jobReference"]["location"] = self.LOCATION + expected_resource["jobReference"]["projectId"] = "other-project" do_upload.assert_called_once_with( - file_obj, - expected_resource, - _DEFAULT_NUM_RETRIES) + file_obj, expected_resource, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_w_client_location(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES @@ -4253,27 +4172,28 @@ def test_load_table_from_file_w_client_location(self): file_obj = self._make_file_obj() do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: client.load_table_from_file( file_obj, # Test with string for table ID. - '{}.{}.{}'.format( + "{}.{}.{}".format( self.TABLE_REF.project, self.TABLE_REF.dataset_id, self.TABLE_REF.table_id, ), - job_id='job_id', - project='other-project', - job_config=self._make_config()) + job_id="job_id", + project="other-project", + job_config=self._make_config(), + ) expected_resource = copy.deepcopy(self.EXPECTED_CONFIGURATION) - expected_resource['jobReference']['location'] = self.LOCATION - expected_resource['jobReference']['projectId'] = 'other-project' + expected_resource["jobReference"]["location"] = self.LOCATION + expected_resource["jobReference"]["projectId"] = "other-project" do_upload.assert_called_once_with( - file_obj, - expected_resource, - _DEFAULT_NUM_RETRIES) + file_obj, expected_resource, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_resumable_metadata(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES @@ -4287,50 +4207,51 @@ def test_load_table_from_file_resumable_metadata(self): config.allow_jagged_rows = False config.allow_quoted_newlines = False config.create_disposition = CreateDisposition.CREATE_IF_NEEDED - config.encoding = 'utf8' - config.field_delimiter = ',' + config.encoding = "utf8" + config.field_delimiter = "," config.ignore_unknown_values = False config.max_bad_records = 0 config.quote_character = '"' config.skip_leading_rows = 1 config.write_disposition = WriteDisposition.WRITE_APPEND - config.null_marker = r'\N' + config.null_marker = r"\N" expected_config = { - 'jobReference': {'projectId': 'project_id', 'jobId': 'job_id'}, - 'configuration': { - 'load': { - 'destinationTable': { - 'projectId': self.TABLE_REF.project, - 'datasetId': self.TABLE_REF.dataset_id, - 'tableId': self.TABLE_REF.table_id, + "jobReference": {"projectId": "project_id", "jobId": "job_id"}, + "configuration": { + "load": { + "destinationTable": { + "projectId": self.TABLE_REF.project, + "datasetId": self.TABLE_REF.dataset_id, + "tableId": self.TABLE_REF.table_id, }, - 'sourceFormat': config.source_format, - 'allowJaggedRows': config.allow_jagged_rows, - 'allowQuotedNewlines': config.allow_quoted_newlines, - 'createDisposition': config.create_disposition, - 'encoding': config.encoding, - 'fieldDelimiter': config.field_delimiter, - 'ignoreUnknownValues': config.ignore_unknown_values, - 'maxBadRecords': config.max_bad_records, - 'quote': config.quote_character, - 'skipLeadingRows': str(config.skip_leading_rows), - 'writeDisposition': config.write_disposition, - 'nullMarker': config.null_marker, - }, + "sourceFormat": config.source_format, + "allowJaggedRows": config.allow_jagged_rows, + "allowQuotedNewlines": config.allow_quoted_newlines, + "createDisposition": config.create_disposition, + "encoding": config.encoding, + "fieldDelimiter": config.field_delimiter, + "ignoreUnknownValues": config.ignore_unknown_values, + "maxBadRecords": config.max_bad_records, + "quote": config.quote_character, + "skipLeadingRows": str(config.skip_leading_rows), + "writeDisposition": config.write_disposition, + "nullMarker": config.null_marker, + } }, } do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', expected_config) + client, "_do_resumable_upload", expected_config + ) with do_upload_patch as do_upload: client.load_table_from_file( - file_obj, self.TABLE_REF, job_id='job_id', job_config=config) + file_obj, self.TABLE_REF, job_id="job_id", job_config=config + ) do_upload.assert_called_once_with( - file_obj, - expected_config, - _DEFAULT_NUM_RETRIES) + file_obj, expected_config, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_multipart(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES @@ -4341,17 +4262,20 @@ def test_load_table_from_file_multipart(self): config = self._make_config() do_upload_patch = self._make_do_upload_patch( - client, '_do_multipart_upload', self.EXPECTED_CONFIGURATION) + client, "_do_multipart_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: client.load_table_from_file( - file_obj, self.TABLE_REF, job_id='job_id', job_config=config, - size=file_obj_size) + file_obj, + self.TABLE_REF, + job_id="job_id", + job_config=config, + size=file_obj_size, + ) do_upload.assert_called_once_with( - file_obj, - self.EXPECTED_CONFIGURATION, - file_obj_size, - _DEFAULT_NUM_RETRIES) + file_obj, self.EXPECTED_CONFIGURATION, file_obj_size, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_with_retries(self): client = self._make_client() @@ -4359,16 +4283,20 @@ def test_load_table_from_file_with_retries(self): num_retries = 20 do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: client.load_table_from_file( - file_obj, self.TABLE_REF, num_retries=num_retries, - job_id='job_id', job_config=self._make_config()) + file_obj, + self.TABLE_REF, + num_retries=num_retries, + job_id="job_id", + job_config=self._make_config(), + ) do_upload.assert_called_once_with( - file_obj, - self.EXPECTED_CONFIGURATION, - num_retries) + file_obj, self.EXPECTED_CONFIGURATION, num_retries + ) def test_load_table_from_file_with_rewind(self): client = self._make_client() @@ -4376,9 +4304,9 @@ def test_load_table_from_file_with_rewind(self): file_obj.seek(2) with self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION): - client.load_table_from_file( - file_obj, self.TABLE_REF, rewind=True) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ): + client.load_table_from_file(file_obj, self.TABLE_REF, rewind=True) assert file_obj.tell() == 0 @@ -4389,16 +4317,19 @@ def test_load_table_from_file_with_readable_gzip(self): gzip_file = self._make_gzip_file_obj(writable=False) do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', self.EXPECTED_CONFIGURATION) + client, "_do_resumable_upload", self.EXPECTED_CONFIGURATION + ) with do_upload_patch as do_upload: client.load_table_from_file( - gzip_file, self.TABLE_REF, job_id='job_id', - job_config=self._make_config()) + gzip_file, + self.TABLE_REF, + job_id="job_id", + job_config=self._make_config(), + ) do_upload.assert_called_once_with( - gzip_file, - self.EXPECTED_CONFIGURATION, - _DEFAULT_NUM_RETRIES) + gzip_file, self.EXPECTED_CONFIGURATION, _DEFAULT_NUM_RETRIES + ) def test_load_table_from_file_with_writable_gzip(self): client = self._make_client() @@ -4406,8 +4337,11 @@ def test_load_table_from_file_with_writable_gzip(self): with pytest.raises(ValueError): client.load_table_from_file( - gzip_file, self.TABLE_REF, job_id='job_id', - job_config=self._make_config()) + gzip_file, + self.TABLE_REF, + job_id="job_id", + job_config=self._make_config(), + ) def test_load_table_from_file_failure(self): from google.resumable_media import InvalidResponse @@ -4417,83 +4351,87 @@ def test_load_table_from_file_failure(self): file_obj = self._make_file_obj() response = self._make_response( - content='Someone is already in this spot.', - status_code=http_client.CONFLICT) + content="Someone is already in this spot.", status_code=http_client.CONFLICT + ) do_upload_patch = self._make_do_upload_patch( - client, '_do_resumable_upload', - side_effect=InvalidResponse(response)) + client, "_do_resumable_upload", side_effect=InvalidResponse(response) + ) with do_upload_patch, pytest.raises(exceptions.Conflict) as exc_info: - client.load_table_from_file( - file_obj, self.TABLE_REF, rewind=True) + client.load_table_from_file(file_obj, self.TABLE_REF, rewind=True) assert response.text in exc_info.value.message assert exc_info.value.errors == [] def test_load_table_from_file_bad_mode(self): client = self._make_client() - file_obj = mock.Mock(spec=['mode']) - file_obj.mode = 'x' + file_obj = mock.Mock(spec=["mode"]) + file_obj.mode = "x" with pytest.raises(ValueError): client.load_table_from_file(file_obj, self.TABLE_REF) - @unittest.skipIf(pandas is None, 'Requires `pandas`') - @unittest.skipIf(pyarrow is None, 'Requires `pyarrow`') + @unittest.skipIf(pandas is None, "Requires `pandas`") + @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job client = self._make_client() - records = [ - {'name': 'Monty', 'age': 100}, - {'name': 'Python', 'age': 60}, - ] + records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] dataframe = pandas.DataFrame(records) load_patch = mock.patch( - 'google.cloud.bigquery.client.Client.load_table_from_file', - autospec=True) + "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True + ) with load_patch as load_table_from_file: client.load_table_from_dataframe(dataframe, self.TABLE_REF) load_table_from_file.assert_called_once_with( - client, mock.ANY, self.TABLE_REF, num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, job_id=None, job_id_prefix=None, location=None, - project=None, job_config=mock.ANY) + client, + mock.ANY, + self.TABLE_REF, + num_retries=_DEFAULT_NUM_RETRIES, + rewind=True, + job_id=None, + job_id_prefix=None, + location=None, + project=None, + job_config=mock.ANY, + ) sent_file = load_table_from_file.mock_calls[0][1][1] sent_bytes = sent_file.getvalue() assert isinstance(sent_bytes, bytes) assert len(sent_bytes) > 0 - sent_config = load_table_from_file.mock_calls[0][2]['job_config'] + sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, 'Requires `pandas`') - @unittest.skipIf(pyarrow is None, 'Requires `pyarrow`') + @unittest.skipIf(pandas is None, "Requires `pandas`") + @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_client_location(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job client = self._make_client(location=self.LOCATION) - records = [ - {'name': 'Monty', 'age': 100}, - {'name': 'Python', 'age': 60}, - ] + records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] dataframe = pandas.DataFrame(records) load_patch = mock.patch( - 'google.cloud.bigquery.client.Client.load_table_from_file', - autospec=True) + "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True + ) with load_patch as load_table_from_file: client.load_table_from_dataframe(dataframe, self.TABLE_REF) load_table_from_file.assert_called_once_with( - client, mock.ANY, self.TABLE_REF, + client, + mock.ANY, + self.TABLE_REF, num_retries=_DEFAULT_NUM_RETRIES, - rewind=True, job_id=None, + rewind=True, + job_id=None, job_id_prefix=None, location=self.LOCATION, project=None, @@ -4505,34 +4443,32 @@ def test_load_table_from_dataframe_w_client_location(self): assert isinstance(sent_bytes, bytes) assert len(sent_bytes) > 0 - sent_config = load_table_from_file.mock_calls[0][2]['job_config'] + sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config.source_format == job.SourceFormat.PARQUET - @unittest.skipIf(pandas is None, 'Requires `pandas`') - @unittest.skipIf(pyarrow is None, 'Requires `pyarrow`') + @unittest.skipIf(pandas is None, "Requires `pandas`") + @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_custom_job_config(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job client = self._make_client() - records = [ - {'name': 'Monty', 'age': 100}, - {'name': 'Python', 'age': 60}, - ] + records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] dataframe = pandas.DataFrame(records) job_config = job.LoadJobConfig() load_patch = mock.patch( - 'google.cloud.bigquery.client.Client.load_table_from_file', - autospec=True) + "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True + ) with load_patch as load_table_from_file: client.load_table_from_dataframe( - dataframe, self.TABLE_REF, - job_config=job_config, - location=self.LOCATION) + dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION + ) load_table_from_file.assert_called_once_with( - client, mock.ANY, self.TABLE_REF, + client, + mock.ANY, + self.TABLE_REF, num_retries=_DEFAULT_NUM_RETRIES, rewind=True, job_id=None, @@ -4542,7 +4478,7 @@ def test_load_table_from_dataframe_w_custom_job_config(self): job_config=mock.ANY, ) - sent_config = load_table_from_file.mock_calls[0][2]['job_config'] + sent_config = load_table_from_file.mock_calls[0][2]["job_config"] assert sent_config is job_config assert sent_config.source_format == job.SourceFormat.PARQUET @@ -4553,16 +4489,20 @@ def _make_resumable_upload_responses(cls, size): """Make a series of responses for a successful resumable upload.""" from google import resumable_media - resumable_url = 'http://test.invalid?upload_id=and-then-there-was-1' + resumable_url = "http://test.invalid?upload_id=and-then-there-was-1" initial_response = cls._make_response( - http_client.OK, '', {'location': resumable_url}) + http_client.OK, "", {"location": resumable_url} + ) data_response = cls._make_response( resumable_media.PERMANENT_REDIRECT, - '', {'range': 'bytes=0-{:d}'.format(size - 1)}) + "", + {"range": "bytes=0-{:d}".format(size - 1)}, + ) final_response = cls._make_response( http_client.OK, - json.dumps({'size': size}), - {'Content-Type': 'application/json'}) + json.dumps({"size": size}), + {"Content-Type": "application/json"}, + ) return [initial_response, data_response, final_response] @staticmethod @@ -4570,7 +4510,8 @@ def _make_transport(responses=None): import google.auth.transport.requests transport = mock.create_autospec( - google.auth.transport.requests.AuthorizedSession, instance=True) + google.auth.transport.requests.AuthorizedSession, instance=True + ) transport.request.side_effect = responses return transport @@ -4578,24 +4519,25 @@ def test__do_resumable_upload(self): file_obj = self._make_file_obj() file_obj_len = len(file_obj.getvalue()) transport = self._make_transport( - self._make_resumable_upload_responses(file_obj_len)) + self._make_resumable_upload_responses(file_obj_len) + ) client = self._make_client(transport) result = client._do_resumable_upload( - file_obj, - self.EXPECTED_CONFIGURATION, - None) + file_obj, self.EXPECTED_CONFIGURATION, None + ) - content = result.content.decode('utf-8') - assert json.loads(content) == {'size': file_obj_len} + content = result.content.decode("utf-8") + assert json.loads(content) == {"size": file_obj_len} # Verify that configuration data was passed in with the initial # request. transport.request.assert_any_call( - 'POST', + "POST", mock.ANY, - data=json.dumps(self.EXPECTED_CONFIGURATION).encode('utf-8'), - headers=mock.ANY) + data=json.dumps(self.EXPECTED_CONFIGURATION).encode("utf-8"), + headers=mock.ANY, + ) def test__do_multipart_upload(self): transport = self._make_transport([self._make_response(http_client.OK)]) @@ -4604,28 +4546,27 @@ def test__do_multipart_upload(self): file_obj_len = len(file_obj.getvalue()) client._do_multipart_upload( - file_obj, - self.EXPECTED_CONFIGURATION, - file_obj_len, - None) + file_obj, self.EXPECTED_CONFIGURATION, file_obj_len, None + ) # Verify that configuration data was passed in with the initial # request. request_args = transport.request.mock_calls[0][2] - request_data = request_args['data'].decode('utf-8') - request_headers = request_args['headers'] + request_data = request_args["data"].decode("utf-8") + request_headers = request_args["headers"] request_content = email.message_from_string( - 'Content-Type: {}\r\n{}'.format( - request_headers['content-type'].decode('utf-8'), - request_data)) + "Content-Type: {}\r\n{}".format( + request_headers["content-type"].decode("utf-8"), request_data + ) + ) # There should be two payloads: the configuration and the binary daya. configuration_data = request_content.get_payload(0).get_payload() binary_data = request_content.get_payload(1).get_payload() assert json.loads(configuration_data) == self.EXPECTED_CONFIGURATION - assert binary_data.encode('utf-8') == file_obj.getvalue() + assert binary_data.encode("utf-8") == file_obj.getvalue() def test__do_multipart_upload_wrong_size(self): client = self._make_client() @@ -4633,8 +4574,4 @@ def test__do_multipart_upload_wrong_size(self): file_obj_len = len(file_obj.getvalue()) with pytest.raises(ValueError): - client._do_multipart_upload( - file_obj, - {}, - file_obj_len + 1, - None) + client._do_multipart_upload(file_obj, {}, file_obj_len + 1, None) diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index d438c1d478a9..f477904c2f7d 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -18,7 +18,6 @@ class TestAccessEntry(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.dataset import AccessEntry @@ -29,24 +28,24 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - entry = self._make_one('OWNER', 'userByEmail', 'phred@example.com') - self.assertEqual(entry.role, 'OWNER') - self.assertEqual(entry.entity_type, 'userByEmail') - self.assertEqual(entry.entity_id, 'phred@example.com') + entry = self._make_one("OWNER", "userByEmail", "phred@example.com") + self.assertEqual(entry.role, "OWNER") + self.assertEqual(entry.entity_type, "userByEmail") + self.assertEqual(entry.entity_id, "phred@example.com") def test_ctor_bad_entity_type(self): with self.assertRaises(ValueError): - self._make_one(None, 'unknown', None) + self._make_one(None, "unknown", None) def test_ctor_view_with_role(self): - role = 'READER' - entity_type = 'view' + role = "READER" + entity_type = "view" with self.assertRaises(ValueError): self._make_one(role, entity_type, None) def test_ctor_view_success(self): role = None - entity_type = 'view' + entity_type = "view" entity_id = object() entry = self._make_one(role, entity_type, entity_id) self.assertEqual(entry.role, role) @@ -55,76 +54,75 @@ def test_ctor_view_success(self): def test_ctor_nonview_without_role(self): role = None - entity_type = 'userByEmail' + entity_type = "userByEmail" with self.assertRaises(ValueError): self._make_one(role, entity_type, None) def test___eq___role_mismatch(self): - entry = self._make_one('OWNER', 'userByEmail', 'phred@example.com') - other = self._make_one('WRITER', 'userByEmail', 'phred@example.com') + entry = self._make_one("OWNER", "userByEmail", "phred@example.com") + other = self._make_one("WRITER", "userByEmail", "phred@example.com") self.assertNotEqual(entry, other) def test___eq___entity_type_mismatch(self): - entry = self._make_one('OWNER', 'userByEmail', 'phred@example.com') - other = self._make_one('OWNER', 'groupByEmail', 'phred@example.com') + entry = self._make_one("OWNER", "userByEmail", "phred@example.com") + other = self._make_one("OWNER", "groupByEmail", "phred@example.com") self.assertNotEqual(entry, other) def test___eq___entity_id_mismatch(self): - entry = self._make_one('OWNER', 'userByEmail', 'phred@example.com') - other = self._make_one('OWNER', 'userByEmail', 'bharney@example.com') + entry = self._make_one("OWNER", "userByEmail", "phred@example.com") + other = self._make_one("OWNER", "userByEmail", "bharney@example.com") self.assertNotEqual(entry, other) def test___eq___hit(self): - entry = self._make_one('OWNER', 'userByEmail', 'phred@example.com') - other = self._make_one('OWNER', 'userByEmail', 'phred@example.com') + entry = self._make_one("OWNER", "userByEmail", "phred@example.com") + other = self._make_one("OWNER", "userByEmail", "phred@example.com") self.assertEqual(entry, other) def test__eq___type_mismatch(self): - entry = self._make_one('OWNER', 'userByEmail', 'silly@example.com') + entry = self._make_one("OWNER", "userByEmail", "silly@example.com") self.assertNotEqual(entry, object()) self.assertEqual(entry, mock.ANY) def test_to_api_repr(self): - entry = self._make_one('OWNER', 'userByEmail', 'salmon@example.com') + entry = self._make_one("OWNER", "userByEmail", "salmon@example.com") resource = entry.to_api_repr() - exp_resource = {'role': 'OWNER', 'userByEmail': 'salmon@example.com'} + exp_resource = {"role": "OWNER", "userByEmail": "salmon@example.com"} self.assertEqual(resource, exp_resource) def test_to_api_repr_view(self): view = { - 'projectId': 'my-project', - 'datasetId': 'my_dataset', - 'tableId': 'my_table' + "projectId": "my-project", + "datasetId": "my_dataset", + "tableId": "my_table", } - entry = self._make_one(None, 'view', view) + entry = self._make_one(None, "view", view) resource = entry.to_api_repr() - exp_resource = {'view': view} + exp_resource = {"view": view} self.assertEqual(resource, exp_resource) def test_from_api_repr(self): - resource = {'role': 'OWNER', 'userByEmail': 'salmon@example.com'} + resource = {"role": "OWNER", "userByEmail": "salmon@example.com"} entry = self._get_target_class().from_api_repr(resource) - self.assertEqual(entry.role, 'OWNER') - self.assertEqual(entry.entity_type, 'userByEmail') - self.assertEqual(entry.entity_id, 'salmon@example.com') + self.assertEqual(entry.role, "OWNER") + self.assertEqual(entry.entity_type, "userByEmail") + self.assertEqual(entry.entity_id, "salmon@example.com") def test_from_api_repr_w_unknown_entity_type(self): - resource = {'role': 'READER', 'unknown': 'UNKNOWN'} + resource = {"role": "READER", "unknown": "UNKNOWN"} with self.assertRaises(ValueError): self._get_target_class().from_api_repr(resource) def test_from_api_repr_entries_w_extra_keys(self): resource = { - 'role': 'READER', - 'specialGroup': 'projectReaders', - 'userByEmail': 'salmon@example.com', + "role": "READER", + "specialGroup": "projectReaders", + "userByEmail": "salmon@example.com", } with self.assertRaises(ValueError): self._get_target_class().from_api_repr(resource) class TestDatasetReference(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.dataset import DatasetReference @@ -135,116 +133,107 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - dataset_ref = self._make_one('some-project-1', 'dataset_1') - self.assertEqual(dataset_ref.project, 'some-project-1') - self.assertEqual(dataset_ref.dataset_id, 'dataset_1') + dataset_ref = self._make_one("some-project-1", "dataset_1") + self.assertEqual(dataset_ref.project, "some-project-1") + self.assertEqual(dataset_ref.dataset_id, "dataset_1") def test_ctor_bad_args(self): with self.assertRaises(ValueError): - self._make_one(1, 'd') + self._make_one(1, "d") with self.assertRaises(ValueError): - self._make_one('p', 2) + self._make_one("p", 2) def test_table(self): - dataset_ref = self._make_one('some-project-1', 'dataset_1') - table_ref = dataset_ref.table('table_1') - self.assertEqual(table_ref.dataset_id, 'dataset_1') - self.assertEqual(table_ref.project, 'some-project-1') - self.assertEqual(table_ref.table_id, 'table_1') + dataset_ref = self._make_one("some-project-1", "dataset_1") + table_ref = dataset_ref.table("table_1") + self.assertEqual(table_ref.dataset_id, "dataset_1") + self.assertEqual(table_ref.project, "some-project-1") + self.assertEqual(table_ref.table_id, "table_1") def test_to_api_repr(self): - dataset = self._make_one('project_1', 'dataset_1') + dataset = self._make_one("project_1", "dataset_1") resource = dataset.to_api_repr() - self.assertEqual( - resource, - { - 'projectId': 'project_1', - 'datasetId': 'dataset_1', - }) + self.assertEqual(resource, {"projectId": "project_1", "datasetId": "dataset_1"}) def test_from_api_repr(self): cls = self._get_target_class() - expected = self._make_one('project_1', 'dataset_1') + expected = self._make_one("project_1", "dataset_1") - got = cls.from_api_repr( - { - 'projectId': 'project_1', - 'datasetId': 'dataset_1', - }) + got = cls.from_api_repr({"projectId": "project_1", "datasetId": "dataset_1"}) self.assertEqual(expected, got) def test_from_string(self): cls = self._get_target_class() - got = cls.from_string('string-project.string_dataset') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') + got = cls.from_string("string-project.string_dataset") + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") def test_from_string_legacy_string(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string-project:string_dataset') + cls.from_string("string-project:string_dataset") def test_from_string_not_fully_qualified(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string_dataset') + cls.from_string("string_dataset") with self.assertRaises(ValueError): - cls.from_string('a.b.c') + cls.from_string("a.b.c") def test_from_string_with_default_project(self): cls = self._get_target_class() - got = cls.from_string( - 'string_dataset', default_project='default-project') - self.assertEqual(got.project, 'default-project') - self.assertEqual(got.dataset_id, 'string_dataset') + got = cls.from_string("string_dataset", default_project="default-project") + self.assertEqual(got.project, "default-project") + self.assertEqual(got.dataset_id, "string_dataset") def test_from_string_ignores_default_project(self): cls = self._get_target_class() got = cls.from_string( - 'string-project.string_dataset', default_project='default-project') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') + "string-project.string_dataset", default_project="default-project" + ) + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") def test___eq___wrong_type(self): - dataset = self._make_one('project_1', 'dataset_1') + dataset = self._make_one("project_1", "dataset_1") other = object() self.assertNotEqual(dataset, other) self.assertEqual(dataset, mock.ANY) def test___eq___project_mismatch(self): - dataset = self._make_one('project_1', 'dataset_1') - other = self._make_one('project_2', 'dataset_1') + dataset = self._make_one("project_1", "dataset_1") + other = self._make_one("project_2", "dataset_1") self.assertNotEqual(dataset, other) def test___eq___dataset_mismatch(self): - dataset = self._make_one('project_1', 'dataset_1') - other = self._make_one('project_1', 'dataset_2') + dataset = self._make_one("project_1", "dataset_1") + other = self._make_one("project_1", "dataset_2") self.assertNotEqual(dataset, other) def test___eq___equality(self): - dataset = self._make_one('project_1', 'dataset_1') - other = self._make_one('project_1', 'dataset_1') + dataset = self._make_one("project_1", "dataset_1") + other = self._make_one("project_1", "dataset_1") self.assertEqual(dataset, other) def test___hash__set_equality(self): - dataset1 = self._make_one('project_1', 'dataset_1') - dataset2 = self._make_one('project_1', 'dataset_2') + dataset1 = self._make_one("project_1", "dataset_1") + dataset2 = self._make_one("project_1", "dataset_2") set_one = {dataset1, dataset2} set_two = {dataset1, dataset2} self.assertEqual(set_one, set_two) def test___hash__not_equals(self): - dataset1 = self._make_one('project_1', 'dataset_1') - dataset2 = self._make_one('project_1', 'dataset_2') + dataset1 = self._make_one("project_1", "dataset_1") + dataset2 = self._make_one("project_1", "dataset_2") set_one = {dataset1} set_two = {dataset2} self.assertNotEqual(set_one, set_two) def test___repr__(self): - dataset = self._make_one('project1', 'dataset1') + dataset = self._make_one("project1", "dataset1") expected = "DatasetReference('project1', 'dataset1')" self.assertEqual(repr(dataset), expected) @@ -252,8 +241,8 @@ def test___repr__(self): class TestDataset(unittest.TestCase): from google.cloud.bigquery.dataset import DatasetReference - PROJECT = 'project' - DS_ID = 'dataset-id' + PROJECT = "project" + DS_ID = "dataset-id" DS_REF = DatasetReference(PROJECT, DS_ID) @staticmethod @@ -270,48 +259,46 @@ def _setUpConstants(self): from google.cloud._helpers import UTC self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( - tzinfo=UTC) - self.ETAG = 'ETAG' - self.DS_FULL_ID = '%s:%s' % (self.PROJECT, self.DS_ID) - self.RESOURCE_URL = 'http://example.com/path/to/resource' + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) + self.ETAG = "ETAG" + self.DS_FULL_ID = "%s:%s" % (self.PROJECT, self.DS_ID) + self.RESOURCE_URL = "http://example.com/path/to/resource" def _make_resource(self): self._setUpConstants() - USER_EMAIL = 'phred@example.com' - GROUP_EMAIL = 'group-name@lists.example.com' + USER_EMAIL = "phred@example.com" + GROUP_EMAIL = "group-name@lists.example.com" return { - 'creationTime': self.WHEN_TS * 1000, - 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, - 'etag': self.ETAG, - 'id': self.DS_FULL_ID, - 'lastModifiedTime': self.WHEN_TS * 1000, - 'location': 'US', - 'selfLink': self.RESOURCE_URL, - 'defaultTableExpirationMs': 3600, - 'access': [ - {'role': 'OWNER', 'userByEmail': USER_EMAIL}, - {'role': 'OWNER', 'groupByEmail': GROUP_EMAIL}, - {'role': 'WRITER', 'specialGroup': 'projectWriters'}, - {'role': 'READER', 'specialGroup': 'projectReaders'}], + "creationTime": self.WHEN_TS * 1000, + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, + "etag": self.ETAG, + "id": self.DS_FULL_ID, + "lastModifiedTime": self.WHEN_TS * 1000, + "location": "US", + "selfLink": self.RESOURCE_URL, + "defaultTableExpirationMs": 3600, + "access": [ + {"role": "OWNER", "userByEmail": USER_EMAIL}, + {"role": "OWNER", "groupByEmail": GROUP_EMAIL}, + {"role": "WRITER", "specialGroup": "projectWriters"}, + {"role": "READER", "specialGroup": "projectReaders"}, + ], } def _verify_access_entry(self, access_entries, resource): r_entries = [] - for r_entry in resource['access']: - role = r_entry.pop('role') + for r_entry in resource["access"]: + role = r_entry.pop("role") for entity_type, entity_id in sorted(r_entry.items()): - r_entries.append({ - 'role': role, - 'entity_type': entity_type, - 'entity_id': entity_id}) + r_entries.append( + {"role": role, "entity_type": entity_type, "entity_id": entity_id} + ) self.assertEqual(len(access_entries), len(r_entries)) for a_entry, r_entry in zip(access_entries, r_entries): - self.assertEqual(a_entry.role, r_entry['role']) - self.assertEqual(a_entry.entity_type, r_entry['entity_type']) - self.assertEqual(a_entry.entity_id, r_entry['entity_id']) + self.assertEqual(a_entry.role, r_entry["role"]) + self.assertEqual(a_entry.entity_type, r_entry["entity_type"]) + self.assertEqual(a_entry.entity_id, r_entry["entity_id"]) def _verify_readonly_resource_properties(self, dataset, resource): @@ -320,19 +307,19 @@ def _verify_readonly_resource_properties(self, dataset, resource): self.assertEqual(dataset.reference.project, self.PROJECT) self.assertEqual(dataset.reference.dataset_id, self.DS_ID) - if 'creationTime' in resource: + if "creationTime" in resource: self.assertEqual(dataset.created, self.WHEN) else: self.assertIsNone(dataset.created) - if 'etag' in resource: + if "etag" in resource: self.assertEqual(dataset.etag, self.ETAG) else: self.assertIsNone(dataset.etag) - if 'lastModifiedTime' in resource: + if "lastModifiedTime" in resource: self.assertEqual(dataset.modified, self.WHEN) else: self.assertIsNone(dataset.modified) - if 'selfLink' in resource: + if "selfLink" in resource: self.assertEqual(dataset.self_link, self.RESOURCE_URL) else: self.assertIsNone(dataset.self_link) @@ -341,16 +328,18 @@ def _verify_resource_properties(self, dataset, resource): self._verify_readonly_resource_properties(dataset, resource) - if 'defaultTableExpirationMs' in resource: - self.assertEqual(dataset.default_table_expiration_ms, - int(resource.get('defaultTableExpirationMs'))) + if "defaultTableExpirationMs" in resource: + self.assertEqual( + dataset.default_table_expiration_ms, + int(resource.get("defaultTableExpirationMs")), + ) else: self.assertIsNone(dataset.default_table_expiration_ms) - self.assertEqual(dataset.description, resource.get('description')) - self.assertEqual(dataset.friendly_name, resource.get('friendlyName')) - self.assertEqual(dataset.location, resource.get('location')) + self.assertEqual(dataset.description, resource.get("description")) + self.assertEqual(dataset.friendly_name, resource.get("friendlyName")) + self.assertEqual(dataset.location, resource.get("location")) - if 'access' in resource: + if "access" in resource: self._verify_access_entry(dataset.access_entries, resource) else: self.assertEqual(dataset.access_entries, []) @@ -360,8 +349,8 @@ def test_ctor_defaults(self): self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, self.PROJECT) self.assertEqual( - dataset.path, - '/projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID)) + dataset.path, "/projects/%s/datasets/%s" % (self.PROJECT, self.DS_ID) + ) self.assertEqual(dataset.access_entries, []) self.assertIsNone(dataset.created) @@ -378,17 +367,17 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from google.cloud.bigquery.dataset import DatasetReference, AccessEntry - phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') - bharney = AccessEntry('OWNER', 'userByEmail', 'bharney@example.com') + phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") + bharney = AccessEntry("OWNER", "userByEmail", "bharney@example.com") entries = [phred, bharney] - OTHER_PROJECT = 'foo-bar-123' + OTHER_PROJECT = "foo-bar-123" dataset = self._make_one(DatasetReference(OTHER_PROJECT, self.DS_ID)) dataset.access_entries = entries self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertEqual(dataset.project, OTHER_PROJECT) self.assertEqual( - dataset.path, - '/projects/%s/datasets/%s' % (OTHER_PROJECT, self.DS_ID)) + dataset.path, "/projects/%s/datasets/%s" % (OTHER_PROJECT, self.DS_ID) + ) self.assertEqual(dataset.access_entries, entries) self.assertIsNone(dataset.created) @@ -411,7 +400,7 @@ def test_access_entries_setter_invalid_field(self): from google.cloud.bigquery.dataset import AccessEntry dataset = self._make_one(self.DS_REF) - phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') + phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") with self.assertRaises(ValueError): dataset.access_entries = [phred, object()] @@ -419,15 +408,15 @@ def test_access_entries_setter(self): from google.cloud.bigquery.dataset import AccessEntry dataset = self._make_one(self.DS_REF) - phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') - bharney = AccessEntry('OWNER', 'userByEmail', 'bharney@example.com') + phred = AccessEntry("OWNER", "userByEmail", "phred@example.com") + bharney = AccessEntry("OWNER", "userByEmail", "bharney@example.com") dataset.access_entries = [phred, bharney] self.assertEqual(dataset.access_entries, [phred, bharney]) def test_default_table_expiration_ms_setter_bad_value(self): dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): - dataset.default_table_expiration_ms = 'bogus' + dataset.default_table_expiration_ms = "bogus" def test_default_table_expiration_ms_setter(self): dataset = self._make_one(self.DS_REF) @@ -441,8 +430,8 @@ def test_description_setter_bad_value(self): def test_description_setter(self): dataset = self._make_one(self.DS_REF) - dataset.description = 'DESCRIPTION' - self.assertEqual(dataset.description, 'DESCRIPTION') + dataset.description = "DESCRIPTION" + self.assertEqual(dataset.description, "DESCRIPTION") def test_friendly_name_setter_bad_value(self): dataset = self._make_one(self.DS_REF) @@ -451,8 +440,8 @@ def test_friendly_name_setter_bad_value(self): def test_friendly_name_setter(self): dataset = self._make_one(self.DS_REF) - dataset.friendly_name = 'FRIENDLY' - self.assertEqual(dataset.friendly_name, 'FRIENDLY') + dataset.friendly_name = "FRIENDLY" + self.assertEqual(dataset.friendly_name, "FRIENDLY") def test_location_setter_bad_value(self): dataset = self._make_one(self.DS_REF) @@ -461,20 +450,20 @@ def test_location_setter_bad_value(self): def test_location_setter(self): dataset = self._make_one(self.DS_REF) - dataset.location = 'LOCATION' - self.assertEqual(dataset.location, 'LOCATION') + dataset.location = "LOCATION" + self.assertEqual(dataset.location, "LOCATION") def test_labels_update_in_place(self): dataset = self._make_one(self.DS_REF) - del dataset._properties['labels'] # don't start w/ existing dict + del dataset._properties["labels"] # don't start w/ existing dict labels = dataset.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(dataset.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(dataset.labels, {"foo": "bar"}) def test_labels_setter(self): dataset = self._make_one(self.DS_REF) - dataset.labels = {'color': 'green'} - self.assertEqual(dataset.labels, {'color': 'green'}) + dataset.labels = {"color": "green"} + self.assertEqual(dataset.labels, {"color": "green"}) def test_labels_setter_bad_value(self): dataset = self._make_one(self.DS_REF) @@ -495,11 +484,8 @@ def test_from_api_repr_missing_identity(self): def test_from_api_repr_bare(self): self._setUpConstants() RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_ID), - 'datasetReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - } + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "datasetReference": {"projectId": self.PROJECT, "datasetId": self.DS_ID}, } klass = self._get_target_class() dataset = klass.from_api_repr(RESOURCE) @@ -513,62 +499,60 @@ def test_from_api_repr_w_properties(self): def test_to_api_repr_w_custom_field(self): dataset = self._make_one(self.DS_REF) - dataset._properties['newAlphaProperty'] = 'unreleased property' + dataset._properties["newAlphaProperty"] = "unreleased property" resource = dataset.to_api_repr() exp_resource = { - 'datasetReference': self.DS_REF.to_api_repr(), - 'labels': {}, - 'newAlphaProperty': 'unreleased property', + "datasetReference": self.DS_REF.to_api_repr(), + "labels": {}, + "newAlphaProperty": "unreleased property", } self.assertEqual(resource, exp_resource) def test_from_string(self): cls = self._get_target_class() - got = cls.from_string('string-project.string_dataset') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') + got = cls.from_string("string-project.string_dataset") + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") def test_from_string_legacy_string(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string-project:string_dataset') + cls.from_string("string-project:string_dataset") def test__build_resource_w_custom_field(self): dataset = self._make_one(self.DS_REF) - dataset._properties['newAlphaProperty'] = 'unreleased property' - resource = dataset._build_resource(['newAlphaProperty']) + dataset._properties["newAlphaProperty"] = "unreleased property" + resource = dataset._build_resource(["newAlphaProperty"]) - exp_resource = { - 'newAlphaProperty': 'unreleased property' - } + exp_resource = {"newAlphaProperty": "unreleased property"} self.assertEqual(resource, exp_resource) def test__build_resource_w_custom_field_not_in__properties(self): dataset = self._make_one(self.DS_REF) - dataset.bad = 'value' + dataset.bad = "value" with self.assertRaises(ValueError): - dataset._build_resource(['bad']) + dataset._build_resource(["bad"]) def test_table(self): from google.cloud.bigquery.table import TableReference dataset = self._make_one(self.DS_REF) - table = dataset.table('table_id') + table = dataset.table("table_id") self.assertIsInstance(table, TableReference) - self.assertEqual(table.table_id, 'table_id') + self.assertEqual(table.table_id, "table_id") self.assertEqual(table.dataset_id, self.DS_ID) self.assertEqual(table.project, self.PROJECT) def test___repr__(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = self._make_one(DatasetReference('project1', 'dataset1')) + + dataset = self._make_one(DatasetReference("project1", "dataset1")) expected = "Dataset(DatasetReference('project1', 'dataset1'))" self.assertEqual(repr(dataset), expected) class TestDatasetListItem(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.dataset import DatasetListItem @@ -579,61 +563,43 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - project = 'test-project' - dataset_id = 'test_dataset' + project = "test-project" + dataset_id = "test_dataset" resource = { - 'kind': 'bigquery#dataset', - 'id': '{}:{}'.format(project, dataset_id), - 'datasetReference': { - 'projectId': project, - 'datasetId': dataset_id, - }, - 'friendlyName': 'Data of the Test', - 'labels': { - 'some-stuff': 'this-is-a-label', - }, + "kind": "bigquery#dataset", + "id": "{}:{}".format(project, dataset_id), + "datasetReference": {"projectId": project, "datasetId": dataset_id}, + "friendlyName": "Data of the Test", + "labels": {"some-stuff": "this-is-a-label"}, } dataset = self._make_one(resource) self.assertEqual(dataset.project, project) self.assertEqual(dataset.dataset_id, dataset_id) - self.assertEqual( - dataset.full_dataset_id, - '{}:{}'.format(project, dataset_id)) + self.assertEqual(dataset.full_dataset_id, "{}:{}".format(project, dataset_id)) self.assertEqual(dataset.reference.project, project) self.assertEqual(dataset.reference.dataset_id, dataset_id) - self.assertEqual(dataset.friendly_name, 'Data of the Test') - self.assertEqual(dataset.labels['some-stuff'], 'this-is-a-label') + self.assertEqual(dataset.friendly_name, "Data of the Test") + self.assertEqual(dataset.labels["some-stuff"], "this-is-a-label") def test_ctor_missing_properties(self): resource = { - 'datasetReference': { - 'projectId': 'testproject', - 'datasetId': 'testdataset', - }, + "datasetReference": {"projectId": "testproject", "datasetId": "testdataset"} } dataset = self._make_one(resource) - self.assertEqual(dataset.project, 'testproject') - self.assertEqual(dataset.dataset_id, 'testdataset') + self.assertEqual(dataset.project, "testproject") + self.assertEqual(dataset.dataset_id, "testdataset") self.assertIsNone(dataset.full_dataset_id) self.assertIsNone(dataset.friendly_name) self.assertEqual(dataset.labels, {}) def test_ctor_wo_project(self): - resource = { - 'datasetReference': { - 'datasetId': 'testdataset', - }, - } + resource = {"datasetReference": {"datasetId": "testdataset"}} with self.assertRaises(ValueError): self._make_one(resource) def test_ctor_wo_dataset(self): - resource = { - 'datasetReference': { - 'projectId': 'testproject', - }, - } + resource = {"datasetReference": {"projectId": "testproject"}} with self.assertRaises(ValueError): self._make_one(resource) @@ -643,30 +609,22 @@ def test_ctor_wo_reference(self): def test_labels_update_in_place(self): resource = { - 'datasetReference': { - 'projectId': 'testproject', - 'datasetId': 'testdataset', - }, + "datasetReference": {"projectId": "testproject", "datasetId": "testdataset"} } dataset = self._make_one(resource) labels = dataset.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(dataset.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(dataset.labels, {"foo": "bar"}) def test_table(self): from google.cloud.bigquery.table import TableReference - project = 'test-project' - dataset_id = 'test_dataset' - resource = { - 'datasetReference': { - 'projectId': project, - 'datasetId': dataset_id, - }, - } + project = "test-project" + dataset_id = "test_dataset" + resource = {"datasetReference": {"projectId": project, "datasetId": dataset_id}} dataset = self._make_one(resource) - table = dataset.table('table_id') + table = dataset.table("table_id") self.assertIsInstance(table, TableReference) - self.assertEqual(table.table_id, 'table_id') + self.assertEqual(table.table_id, "table_id") self.assertEqual(table.dataset_id, dataset_id) self.assertEqual(table.project, project) diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py index f0430f06a1e5..bcc3e0879f87 100644 --- a/bigquery/tests/unit/test_dbapi__helpers.py +++ b/bigquery/tests/unit/test_dbapi__helpers.py @@ -23,77 +23,72 @@ class TestQueryParameters(unittest.TestCase): - def test_scalar_to_query_parameter(self): expected_types = [ - (True, 'BOOL'), - (False, 'BOOL'), - (123, 'INT64'), - (-123456789, 'INT64'), - (1.25, 'FLOAT64'), - (decimal.Decimal('1.25'), 'NUMERIC'), - (b'I am some bytes', 'BYTES'), - (u'I am a string', 'STRING'), - (datetime.date(2017, 4, 1), 'DATE'), - (datetime.time(12, 34, 56), 'TIME'), - (datetime.datetime(2012, 3, 4, 5, 6, 7), 'DATETIME'), + (True, "BOOL"), + (False, "BOOL"), + (123, "INT64"), + (-123456789, "INT64"), + (1.25, "FLOAT64"), + (decimal.Decimal("1.25"), "NUMERIC"), + (b"I am some bytes", "BYTES"), + (u"I am a string", "STRING"), + (datetime.date(2017, 4, 1), "DATE"), + (datetime.time(12, 34, 56), "TIME"), + (datetime.datetime(2012, 3, 4, 5, 6, 7), "DATETIME"), ( datetime.datetime( - 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC), - 'TIMESTAMP', + 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC + ), + "TIMESTAMP", ), ] for value, expected_type in expected_types: - msg = 'value: {} expected_type: {}'.format(value, expected_type) + msg = "value: {} expected_type: {}".format(value, expected_type) parameter = _helpers.scalar_to_query_parameter(value) self.assertIsNone(parameter.name, msg=msg) self.assertEqual(parameter.type_, expected_type, msg=msg) self.assertEqual(parameter.value, value, msg=msg) - named_parameter = _helpers.scalar_to_query_parameter( - value, name='myvar') - self.assertEqual(named_parameter.name, 'myvar', msg=msg) + named_parameter = _helpers.scalar_to_query_parameter(value, name="myvar") + self.assertEqual(named_parameter.name, "myvar", msg=msg) self.assertEqual(named_parameter.type_, expected_type, msg=msg) self.assertEqual(named_parameter.value, value, msg=msg) def test_scalar_to_query_parameter_w_unexpected_type(self): with self.assertRaises(exceptions.ProgrammingError): - _helpers.scalar_to_query_parameter(value={'a': 'dictionary'}) + _helpers.scalar_to_query_parameter(value={"a": "dictionary"}) def test_scalar_to_query_parameter_w_special_floats(self): - nan_parameter = _helpers.scalar_to_query_parameter(float('nan')) + nan_parameter = _helpers.scalar_to_query_parameter(float("nan")) self.assertTrue(math.isnan(nan_parameter.value)) - self.assertEqual(nan_parameter.type_, 'FLOAT64') - inf_parameter = _helpers.scalar_to_query_parameter(float('inf')) + self.assertEqual(nan_parameter.type_, "FLOAT64") + inf_parameter = _helpers.scalar_to_query_parameter(float("inf")) self.assertTrue(math.isinf(inf_parameter.value)) - self.assertEqual(inf_parameter.type_, 'FLOAT64') + self.assertEqual(inf_parameter.type_, "FLOAT64") def test_to_query_parameters_w_dict(self): - parameters = { - 'somebool': True, - 'somestring': u'a-string-value', - } + parameters = {"somebool": True, "somestring": u"a-string-value"} query_parameters = _helpers.to_query_parameters(parameters) query_parameter_tuples = [] for param in query_parameters: - query_parameter_tuples.append( - (param.name, param.type_, param.value)) + query_parameter_tuples.append((param.name, param.type_, param.value)) self.assertSequenceEqual( sorted(query_parameter_tuples), - sorted([ - ('somebool', 'BOOL', True), - ('somestring', 'STRING', u'a-string-value'), - ])) + sorted( + [ + ("somebool", "BOOL", True), + ("somestring", "STRING", u"a-string-value"), + ] + ), + ) def test_to_query_parameters_w_list(self): - parameters = [True, u'a-string-value'] + parameters = [True, u"a-string-value"] query_parameters = _helpers.to_query_parameters(parameters) query_parameter_tuples = [] for param in query_parameters: - query_parameter_tuples.append( - (param.name, param.type_, param.value)) + query_parameter_tuples.append((param.name, param.type_, param.value)) self.assertSequenceEqual( sorted(query_parameter_tuples), - sorted([ - (None, 'BOOL', True), - (None, 'STRING', u'a-string-value'), - ])) + sorted([(None, "BOOL", True), (None, "STRING", u"a-string-value")]), + ) diff --git a/bigquery/tests/unit/test_dbapi_connection.py b/bigquery/tests/unit/test_dbapi_connection.py index 176d5f989b41..19acec05bd34 100644 --- a/bigquery/tests/unit/test_dbapi_connection.py +++ b/bigquery/tests/unit/test_dbapi_connection.py @@ -18,10 +18,10 @@ class TestConnection(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.dbapi import Connection + return Connection def _make_one(self, *args, **kw): @@ -29,20 +29,23 @@ def _make_one(self, *args, **kw): def _mock_client(self, rows=None, schema=None): from google.cloud.bigquery import client + mock_client = mock.create_autospec(client.Client) return mock_client def test_ctor(self): from google.cloud.bigquery.dbapi import Connection + mock_client = self._mock_client() connection = self._make_one(client=mock_client) self.assertIsInstance(connection, Connection) self.assertIs(connection._client, mock_client) - @mock.patch('google.cloud.bigquery.Client', autospec=True) + @mock.patch("google.cloud.bigquery.Client", autospec=True) def test_connect_wo_client(self, mock_client): from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection + connection = connect() self.assertIsInstance(connection, Connection) self.assertIsNotNone(connection._client) @@ -50,6 +53,7 @@ def test_connect_wo_client(self, mock_client): def test_connect_w_client(self): from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Connection + mock_client = self._mock_client() connection = connect(client=mock_client) self.assertIsInstance(connection, Connection) @@ -67,6 +71,7 @@ def test_commit(self): def test_cursor(self): from google.cloud.bigquery.dbapi import Cursor + connection = self._make_one(client=self._mock_client()) cursor = connection.cursor() self.assertIsInstance(cursor, Cursor) diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py index d0ad5401cf85..4a675c73958d 100644 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -18,17 +18,16 @@ class TestCursor(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.dbapi import Cursor + return Cursor def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _mock_client( - self, rows=None, schema=None, num_dml_affected_rows=None): + def _mock_client(self, rows=None, schema=None, num_dml_affected_rows=None): from google.cloud.bigquery import client if rows is None: @@ -40,31 +39,34 @@ def _mock_client( mock_client.query.return_value = self._mock_job( total_rows=total_rows, schema=schema, - num_dml_affected_rows=num_dml_affected_rows) + num_dml_affected_rows=num_dml_affected_rows, + ) mock_client.list_rows.return_value = rows return mock_client - def _mock_job( - self, total_rows=0, schema=None, num_dml_affected_rows=None): + def _mock_job(self, total_rows=0, schema=None, num_dml_affected_rows=None): from google.cloud.bigquery import job + mock_job = mock.create_autospec(job.QueryJob) mock_job.error_result = None - mock_job.state = 'DONE' + mock_job.state = "DONE" mock_job.result.return_value = mock_job mock_job._query_results = self._mock_results( - total_rows=total_rows, schema=schema, - num_dml_affected_rows=num_dml_affected_rows) + total_rows=total_rows, + schema=schema, + num_dml_affected_rows=num_dml_affected_rows, + ) if num_dml_affected_rows is None: mock_job.statement_type = None # API sends back None for SELECT else: - mock_job.statement_type = 'UPDATE' + mock_job.statement_type = "UPDATE" return mock_job - def _mock_results( - self, total_rows=0, schema=None, num_dml_affected_rows=None): + def _mock_results(self, total_rows=0, schema=None, num_dml_affected_rows=None): from google.cloud.bigquery import query + mock_results = mock.create_autospec(query._QueryResults) mock_results.schema = schema mock_results.num_dml_affected_rows = num_dml_affected_rows @@ -74,6 +76,7 @@ def _mock_results( def test_ctor(self): from google.cloud.bigquery.dbapi import connect from google.cloud.bigquery.dbapi import Cursor + connection = connect(self._mock_client()) cursor = self._make_one(connection) self.assertIsInstance(cursor, Cursor) @@ -81,6 +84,7 @@ def test_ctor(self): def test_close(self): from google.cloud.bigquery.dbapi import connect + connection = connect(self._mock_client()) cursor = connection.cursor() # close() is a no-op, there is nothing to test. @@ -88,47 +92,46 @@ def test_close(self): def test_fetchone_wo_execute_raises_error(self): from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) cursor = connection.cursor() self.assertRaises(dbapi.Error, cursor.fetchone) def test_fetchone_w_row(self): from google.cloud.bigquery import dbapi - connection = dbapi.connect( - self._mock_client(rows=[(1,)])) + + connection = dbapi.connect(self._mock_client(rows=[(1,)])) cursor = connection.cursor() - cursor.execute('SELECT 1;') + cursor.execute("SELECT 1;") row = cursor.fetchone() self.assertEqual(row, (1,)) self.assertIsNone(cursor.fetchone()) def test_fetchmany_wo_execute_raises_error(self): from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) cursor = connection.cursor() self.assertRaises(dbapi.Error, cursor.fetchmany) def test_fetchmany_w_row(self): from google.cloud.bigquery import dbapi - connection = dbapi.connect( - self._mock_client(rows=[(1,)])) + + connection = dbapi.connect(self._mock_client(rows=[(1,)])) cursor = connection.cursor() - cursor.execute('SELECT 1;') + cursor.execute("SELECT 1;") rows = cursor.fetchmany() self.assertEqual(len(rows), 1) self.assertEqual(rows[0], (1,)) def test_fetchmany_w_size(self): from google.cloud.bigquery import dbapi + connection = dbapi.connect( - self._mock_client( - rows=[ - (1, 2, 3), - (4, 5, 6), - (7, 8, 9), - ])) + self._mock_client(rows=[(1, 2, 3), (4, 5, 6), (7, 8, 9)]) + ) cursor = connection.cursor() - cursor.execute('SELECT a, b, c;') + cursor.execute("SELECT a, b, c;") rows = cursor.fetchmany(size=2) self.assertEqual(len(rows), 2) self.assertEqual(rows[0], (1, 2, 3)) @@ -141,15 +144,12 @@ def test_fetchmany_w_size(self): def test_fetchmany_w_arraysize(self): from google.cloud.bigquery import dbapi + connection = dbapi.connect( - self._mock_client( - rows=[ - (1, 2, 3), - (4, 5, 6), - (7, 8, 9), - ])) + self._mock_client(rows=[(1, 2, 3), (4, 5, 6), (7, 8, 9)]) + ) cursor = connection.cursor() - cursor.execute('SELECT a, b, c;') + cursor.execute("SELECT a, b, c;") cursor.arraysize = 2 rows = cursor.fetchmany() self.assertEqual(len(rows), 2) @@ -163,16 +163,17 @@ def test_fetchmany_w_arraysize(self): def test_fetchall_wo_execute_raises_error(self): from google.cloud.bigquery import dbapi + connection = dbapi.connect(self._mock_client()) cursor = connection.cursor() self.assertRaises(dbapi.Error, cursor.fetchall) def test_fetchall_w_row(self): from google.cloud.bigquery import dbapi - connection = dbapi.connect( - self._mock_client(rows=[(1,)])) + + connection = dbapi.connect(self._mock_client(rows=[(1,)])) cursor = connection.cursor() - cursor.execute('SELECT 1;') + cursor.execute("SELECT 1;") self.assertIsNone(cursor.description) self.assertEqual(cursor.rowcount, 1) rows = cursor.fetchall() @@ -181,20 +182,21 @@ def test_fetchall_w_row(self): def test_execute_custom_job_id(self): from google.cloud.bigquery.dbapi import connect + client = self._mock_client(rows=[], num_dml_affected_rows=0) connection = connect(client) cursor = connection.cursor() - cursor.execute('SELECT 1;', job_id='foo') + cursor.execute("SELECT 1;", job_id="foo") args, kwargs = client.query.call_args - self.assertEqual(args[0], 'SELECT 1;') - self.assertEqual(kwargs['job_id'], 'foo') + self.assertEqual(args[0], "SELECT 1;") + self.assertEqual(kwargs["job_id"], "foo") def test_execute_w_dml(self): from google.cloud.bigquery.dbapi import connect - connection = connect( - self._mock_client(rows=[], num_dml_affected_rows=12)) + + connection = connect(self._mock_client(rows=[], num_dml_affected_rows=12)) cursor = connection.cursor() - cursor.execute('DELETE FROM UserSessions WHERE user_id = \'test\';') + cursor.execute("DELETE FROM UserSessions WHERE user_id = 'test';") rows = cursor.fetchall() self.assertIsNone(cursor.description) self.assertEqual(cursor.rowcount, 12) @@ -204,39 +206,43 @@ def test_execute_w_query(self): from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery import dbapi - connection = dbapi.connect(self._mock_client( - rows=[('hello', 'world', 1), ('howdy', 'y\'all', 2)], - schema=[ - SchemaField('a', 'STRING', mode='NULLABLE'), - SchemaField('b', 'STRING', mode='REQUIRED'), - SchemaField('c', 'INTEGER', mode='NULLABLE')])) + connection = dbapi.connect( + self._mock_client( + rows=[("hello", "world", 1), ("howdy", "y'all", 2)], + schema=[ + SchemaField("a", "STRING", mode="NULLABLE"), + SchemaField("b", "STRING", mode="REQUIRED"), + SchemaField("c", "INTEGER", mode="NULLABLE"), + ], + ) + ) cursor = connection.cursor() - cursor.execute('SELECT a, b, c FROM hello_world WHERE d > 3;') + cursor.execute("SELECT a, b, c FROM hello_world WHERE d > 3;") # Verify the description. self.assertEqual(len(cursor.description), 3) a_name, a_type, _, _, _, _, a_null_ok = cursor.description[0] - self.assertEqual(a_name, 'a') - self.assertEqual(a_type, 'STRING') + self.assertEqual(a_name, "a") + self.assertEqual(a_type, "STRING") self.assertEqual(a_type, dbapi.STRING) self.assertTrue(a_null_ok) b_name, b_type, _, _, _, _, b_null_ok = cursor.description[1] - self.assertEqual(b_name, 'b') - self.assertEqual(b_type, 'STRING') + self.assertEqual(b_name, "b") + self.assertEqual(b_type, "STRING") self.assertEqual(b_type, dbapi.STRING) self.assertFalse(b_null_ok) c_name, c_type, _, _, _, _, c_null_ok = cursor.description[2] - self.assertEqual(c_name, 'c') - self.assertEqual(c_type, 'INTEGER') + self.assertEqual(c_name, "c") + self.assertEqual(c_type, "INTEGER") self.assertEqual(c_type, dbapi.NUMBER) self.assertTrue(c_null_ok) # Verify the results. self.assertEqual(cursor.rowcount, 2) row = cursor.fetchone() - self.assertEqual(row, ('hello', 'world', 1)) + self.assertEqual(row, ("hello", "world", 1)) row = cursor.fetchone() - self.assertEqual(row, ('howdy', 'y\'all', 2)) + self.assertEqual(row, ("howdy", "y'all", 2)) row = cursor.fetchone() self.assertIsNone(row) @@ -249,60 +255,64 @@ def test_execute_raises_if_result_raises(self): from google.cloud.bigquery.dbapi import exceptions job = mock.create_autospec(job.QueryJob) - job.result.side_effect = google.cloud.exceptions.GoogleCloudError('') + job.result.side_effect = google.cloud.exceptions.GoogleCloudError("") client = mock.create_autospec(client.Client) client.query.return_value = job connection = connect(client) cursor = connection.cursor() with self.assertRaises(exceptions.DatabaseError): - cursor.execute('SELECT 1') + cursor.execute("SELECT 1") def test_executemany_w_dml(self): from google.cloud.bigquery.dbapi import connect - connection = connect( - self._mock_client(rows=[], num_dml_affected_rows=12)) + + connection = connect(self._mock_client(rows=[], num_dml_affected_rows=12)) cursor = connection.cursor() cursor.executemany( - 'DELETE FROM UserSessions WHERE user_id = %s;', - (('test',), ('anothertest',))) + "DELETE FROM UserSessions WHERE user_id = %s;", + (("test",), ("anothertest",)), + ) self.assertIsNone(cursor.description) self.assertEqual(cursor.rowcount, 12) def test__format_operation_w_dict(self): from google.cloud.bigquery.dbapi import cursor + formatted_operation = cursor._format_operation( - 'SELECT %(somevalue)s, %(a `weird` one)s;', - { - 'somevalue': 'hi', - 'a `weird` one': 'world', - }) + "SELECT %(somevalue)s, %(a `weird` one)s;", + {"somevalue": "hi", "a `weird` one": "world"}, + ) self.assertEqual( - formatted_operation, 'SELECT @`somevalue`, @`a \\`weird\\` one`;') + formatted_operation, "SELECT @`somevalue`, @`a \\`weird\\` one`;" + ) def test__format_operation_w_wrong_dict(self): from google.cloud.bigquery import dbapi from google.cloud.bigquery.dbapi import cursor + self.assertRaises( dbapi.ProgrammingError, cursor._format_operation, - 'SELECT %(somevalue)s, %(othervalue)s;', - { - 'somevalue-not-here': 'hi', - 'othervalue': 'world', - }) + "SELECT %(somevalue)s, %(othervalue)s;", + {"somevalue-not-here": "hi", "othervalue": "world"}, + ) def test__format_operation_w_sequence(self): from google.cloud.bigquery.dbapi import cursor + formatted_operation = cursor._format_operation( - 'SELECT %s, %s;', ('hello', 'world')) - self.assertEqual(formatted_operation, 'SELECT ?, ?;') + "SELECT %s, %s;", ("hello", "world") + ) + self.assertEqual(formatted_operation, "SELECT ?, ?;") def test__format_operation_w_too_short_sequence(self): from google.cloud.bigquery import dbapi from google.cloud.bigquery.dbapi import cursor + self.assertRaises( dbapi.ProgrammingError, cursor._format_operation, - 'SELECT %s, %s;', - ('hello',)) + "SELECT %s, %s;", + ("hello",), + ) diff --git a/bigquery/tests/unit/test_dbapi_types.py b/bigquery/tests/unit/test_dbapi_types.py index 1803ea6d3c10..e05660ffed14 100644 --- a/bigquery/tests/unit/test_dbapi_types.py +++ b/bigquery/tests/unit/test_dbapi_types.py @@ -21,20 +21,22 @@ class TestTypes(unittest.TestCase): def test_binary_type(self): - self.assertEqual('BYTES', types.BINARY) - self.assertEqual('RECORD', types.BINARY) - self.assertEqual('STRUCT', types.BINARY) - self.assertNotEqual('STRING', types.BINARY) + self.assertEqual("BYTES", types.BINARY) + self.assertEqual("RECORD", types.BINARY) + self.assertEqual("STRUCT", types.BINARY) + self.assertNotEqual("STRING", types.BINARY) def test_binary_constructor(self): - self.assertEqual(types.Binary(u'hello'), b'hello') - self.assertEqual(types.Binary(u'\u1f60'), u'\u1f60'.encode('utf-8')) + self.assertEqual(types.Binary(u"hello"), b"hello") + self.assertEqual(types.Binary(u"\u1f60"), u"\u1f60".encode("utf-8")) def test_timefromticks(self): somedatetime = datetime.datetime( - 2017, 2, 18, 12, 47, 26, tzinfo=google.cloud._helpers.UTC) + 2017, 2, 18, 12, 47, 26, tzinfo=google.cloud._helpers.UTC + ) epoch = datetime.datetime(1970, 1, 1, tzinfo=google.cloud._helpers.UTC) ticks = (somedatetime - epoch).total_seconds() self.assertEqual( types.TimeFromTicks(ticks, google.cloud._helpers.UTC), - datetime.time(12, 47, 26, tzinfo=google.cloud._helpers.UTC)) + datetime.time(12, 47, 26, tzinfo=google.cloud._helpers.UTC), + ) diff --git a/bigquery/tests/unit/test_external_config.py b/bigquery/tests/unit/test_external_config.py index 480645122554..ddf95e317969 100644 --- a/bigquery/tests/unit/test_external_config.py +++ b/bigquery/tests/unit/test_external_config.py @@ -22,15 +22,15 @@ class TestExternalConfig(unittest.TestCase): - SOURCE_URIS = ['gs://foo', 'gs://bar'] + SOURCE_URIS = ["gs://foo", "gs://bar"] BASE_RESOURCE = { - 'sourceFormat': '', - 'sourceUris': SOURCE_URIS, - 'maxBadRecords': 17, - 'autodetect': True, - 'ignoreUnknownValues': False, - 'compression': 'compression', + "sourceFormat": "", + "sourceUris": SOURCE_URIS, + "maxBadRecords": 17, + "autodetect": True, + "ignoreUnknownValues": False, + "compression": "compression", } def test_from_api_repr_base(self): @@ -43,23 +43,24 @@ def test_from_api_repr_base(self): got_resource = ec.to_api_repr() self.assertEqual(got_resource, self.BASE_RESOURCE) - resource = _copy_and_update(self.BASE_RESOURCE, { - 'schema': { - 'fields': [ - { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, - }, - ], + resource = _copy_and_update( + self.BASE_RESOURCE, + { + "schema": { + "fields": [ + { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + } + ] + } }, - }) + ) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) - exp_schema = [ - schema.SchemaField('full_name', 'STRING', mode='REQUIRED') - ] + exp_schema = [schema.SchemaField("full_name", "STRING", mode="REQUIRED")] self.assertEqual(ec.schema, exp_schema) self.assertIsNone(ec.options) @@ -67,75 +68,76 @@ def test_from_api_repr_base(self): self.assertEqual(got_resource, resource) def test_to_api_repr_base(self): - ec = external_config.ExternalConfig('') + ec = external_config.ExternalConfig("") ec.source_uris = self.SOURCE_URIS ec.max_bad_records = 17 ec.autodetect = True ec.ignore_unknown_values = False - ec.compression = 'compression' - ec.schema = [ - schema.SchemaField('full_name', 'STRING', mode='REQUIRED') - ] + ec.compression = "compression" + ec.schema = [schema.SchemaField("full_name", "STRING", mode="REQUIRED")] exp_schema = { - 'fields': [ + "fields": [ { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, - }, + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + } ] } got_resource = ec.to_api_repr() exp_resource = { - 'sourceFormat': '', - 'sourceUris': self.SOURCE_URIS, - 'maxBadRecords': 17, - 'autodetect': True, - 'ignoreUnknownValues': False, - 'compression': 'compression', - 'schema': exp_schema + "sourceFormat": "", + "sourceUris": self.SOURCE_URIS, + "maxBadRecords": 17, + "autodetect": True, + "ignoreUnknownValues": False, + "compression": "compression", + "schema": exp_schema, } self.assertEqual(got_resource, exp_resource) def test_schema_None(self): - ec = external_config.ExternalConfig('') + ec = external_config.ExternalConfig("") ec.schema = None got = ec.to_api_repr() - want = {'sourceFormat': '', 'schema': None} + want = {"sourceFormat": "", "schema": None} self.assertEqual(got, want) def test_schema_empty(self): - ec = external_config.ExternalConfig('') + ec = external_config.ExternalConfig("") ec.schema = [] got = ec.to_api_repr() - want = {'sourceFormat': '', 'schema': {'fields': []}} + want = {"sourceFormat": "", "schema": {"fields": []}} self.assertEqual(got, want) def _verify_base(self, ec): self.assertEqual(ec.autodetect, True) - self.assertEqual(ec.compression, 'compression') + self.assertEqual(ec.compression, "compression") self.assertEqual(ec.ignore_unknown_values, False) self.assertEqual(ec.max_bad_records, 17) self.assertEqual(ec.source_uris, self.SOURCE_URIS) def test_to_api_repr_source_format(self): - ec = external_config.ExternalConfig('CSV') + ec = external_config.ExternalConfig("CSV") got = ec.to_api_repr() - want = {'sourceFormat': 'CSV'} + want = {"sourceFormat": "CSV"} self.assertEqual(got, want) def test_from_api_repr_sheets(self): - resource = _copy_and_update(self.BASE_RESOURCE, { - 'sourceFormat': 'GOOGLE_SHEETS', - 'googleSheetsOptions': {'skipLeadingRows': '123'}, - }) + resource = _copy_and_update( + self.BASE_RESOURCE, + { + "sourceFormat": "GOOGLE_SHEETS", + "googleSheetsOptions": {"skipLeadingRows": "123"}, + }, + ) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) - self.assertEqual(ec.source_format, 'GOOGLE_SHEETS') + self.assertEqual(ec.source_format, "GOOGLE_SHEETS") self.assertIsInstance(ec.options, external_config.GoogleSheetsOptions) self.assertEqual(ec.options.skip_leading_rows, 123) @@ -143,21 +145,21 @@ def test_from_api_repr_sheets(self): self.assertEqual(got_resource, resource) - del resource['googleSheetsOptions']['skipLeadingRows'] + del resource["googleSheetsOptions"]["skipLeadingRows"] ec = external_config.ExternalConfig.from_api_repr(resource) self.assertIsNone(ec.options.skip_leading_rows) got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) def test_to_api_repr_sheets(self): - ec = external_config.ExternalConfig('GOOGLE_SHEETS') + ec = external_config.ExternalConfig("GOOGLE_SHEETS") options = external_config.GoogleSheetsOptions() options.skip_leading_rows = 123 ec._options = options exp_resource = { - 'sourceFormat': 'GOOGLE_SHEETS', - 'googleSheetsOptions': {'skipLeadingRows': '123'}, + "sourceFormat": "GOOGLE_SHEETS", + "googleSheetsOptions": {"skipLeadingRows": "123"}, } got_resource = ec.to_api_repr() @@ -165,60 +167,63 @@ def test_to_api_repr_sheets(self): self.assertEqual(got_resource, exp_resource) def test_from_api_repr_csv(self): - resource = _copy_and_update(self.BASE_RESOURCE, { - 'sourceFormat': 'CSV', - 'csvOptions': { - 'fieldDelimiter': 'fieldDelimiter', - 'skipLeadingRows': '123', - 'quote': 'quote', - 'allowQuotedNewlines': True, - 'allowJaggedRows': False, - 'encoding': 'encoding', + resource = _copy_and_update( + self.BASE_RESOURCE, + { + "sourceFormat": "CSV", + "csvOptions": { + "fieldDelimiter": "fieldDelimiter", + "skipLeadingRows": "123", + "quote": "quote", + "allowQuotedNewlines": True, + "allowJaggedRows": False, + "encoding": "encoding", + }, }, - }) + ) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) - self.assertEqual(ec.source_format, 'CSV') + self.assertEqual(ec.source_format, "CSV") self.assertIsInstance(ec.options, external_config.CSVOptions) - self.assertEqual(ec.options.field_delimiter, 'fieldDelimiter') + self.assertEqual(ec.options.field_delimiter, "fieldDelimiter") self.assertEqual(ec.options.skip_leading_rows, 123) - self.assertEqual(ec.options.quote_character, 'quote') + self.assertEqual(ec.options.quote_character, "quote") self.assertEqual(ec.options.allow_quoted_newlines, True) self.assertEqual(ec.options.allow_jagged_rows, False) - self.assertEqual(ec.options.encoding, 'encoding') + self.assertEqual(ec.options.encoding, "encoding") got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) - del resource['csvOptions']['skipLeadingRows'] + del resource["csvOptions"]["skipLeadingRows"] ec = external_config.ExternalConfig.from_api_repr(resource) self.assertIsNone(ec.options.skip_leading_rows) got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) def test_to_api_repr_csv(self): - ec = external_config.ExternalConfig('CSV') + ec = external_config.ExternalConfig("CSV") options = external_config.CSVOptions() options.allow_quoted_newlines = True - options.encoding = 'encoding' - options.field_delimiter = 'fieldDelimiter' - options.quote_character = 'quote' + options.encoding = "encoding" + options.field_delimiter = "fieldDelimiter" + options.quote_character = "quote" options.skip_leading_rows = 123 options.allow_jagged_rows = False ec._options = options exp_resource = { - 'sourceFormat': 'CSV', - 'csvOptions': { - 'fieldDelimiter': 'fieldDelimiter', - 'skipLeadingRows': '123', - 'quote': 'quote', - 'allowQuotedNewlines': True, - 'allowJaggedRows': False, - 'encoding': 'encoding', + "sourceFormat": "CSV", + "csvOptions": { + "fieldDelimiter": "fieldDelimiter", + "skipLeadingRows": "123", + "quote": "quote", + "allowQuotedNewlines": True, + "allowJaggedRows": False, + "encoding": "encoding", }, } @@ -227,125 +232,126 @@ def test_to_api_repr_csv(self): self.assertEqual(got_resource, exp_resource) def test_from_api_repr_bigtable(self): - qualifier_encoded = base64.standard_b64encode(b'q').decode('ascii') - resource = _copy_and_update(self.BASE_RESOURCE, { - 'sourceFormat': 'BIGTABLE', - 'bigtableOptions': { - 'ignoreUnspecifiedColumnFamilies': True, - 'readRowkeyAsString': False, - 'columnFamilies': [ - { - 'familyId': 'familyId', - 'type': 'type', - 'encoding': 'encoding', - 'columns': [ - { - 'qualifierString': 'q', - 'fieldName': 'fieldName1', - 'type': 'type1', - 'encoding': 'encoding1', - 'onlyReadLatest': True, - }, - { - 'qualifierEncoded': qualifier_encoded, - 'fieldName': 'fieldName2', - 'type': 'type2', - 'encoding': 'encoding2', - }, - - ], - 'onlyReadLatest': False, - } - ], + qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") + resource = _copy_and_update( + self.BASE_RESOURCE, + { + "sourceFormat": "BIGTABLE", + "bigtableOptions": { + "ignoreUnspecifiedColumnFamilies": True, + "readRowkeyAsString": False, + "columnFamilies": [ + { + "familyId": "familyId", + "type": "type", + "encoding": "encoding", + "columns": [ + { + "qualifierString": "q", + "fieldName": "fieldName1", + "type": "type1", + "encoding": "encoding1", + "onlyReadLatest": True, + }, + { + "qualifierEncoded": qualifier_encoded, + "fieldName": "fieldName2", + "type": "type2", + "encoding": "encoding2", + }, + ], + "onlyReadLatest": False, + } + ], + }, }, - }) + ) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) - self.assertEqual(ec.source_format, 'BIGTABLE') + self.assertEqual(ec.source_format, "BIGTABLE") self.assertIsInstance(ec.options, external_config.BigtableOptions) self.assertEqual(ec.options.ignore_unspecified_column_families, True) self.assertEqual(ec.options.read_rowkey_as_string, False) self.assertEqual(len(ec.options.column_families), 1) fam1 = ec.options.column_families[0] self.assertIsInstance(fam1, external_config.BigtableColumnFamily) - self.assertEqual(fam1.family_id, 'familyId') - self.assertEqual(fam1.type_, 'type') - self.assertEqual(fam1.encoding, 'encoding') + self.assertEqual(fam1.family_id, "familyId") + self.assertEqual(fam1.type_, "type") + self.assertEqual(fam1.encoding, "encoding") self.assertEqual(len(fam1.columns), 2) self.assertFalse(fam1.only_read_latest) col1 = fam1.columns[0] - self.assertEqual(col1.qualifier_string, 'q') - self.assertEqual(col1.field_name, 'fieldName1') - self.assertEqual(col1.type_, 'type1') - self.assertEqual(col1.encoding, 'encoding1') + self.assertEqual(col1.qualifier_string, "q") + self.assertEqual(col1.field_name, "fieldName1") + self.assertEqual(col1.type_, "type1") + self.assertEqual(col1.encoding, "encoding1") self.assertTrue(col1.only_read_latest) self.assertIsNone(col1.qualifier_encoded) col2 = ec.options.column_families[0].columns[1] - self.assertEqual(col2.qualifier_encoded, b'q') - self.assertEqual(col2.field_name, 'fieldName2') - self.assertEqual(col2.type_, 'type2') - self.assertEqual(col2.encoding, 'encoding2') + self.assertEqual(col2.qualifier_encoded, b"q") + self.assertEqual(col2.field_name, "fieldName2") + self.assertEqual(col2.type_, "type2") + self.assertEqual(col2.encoding, "encoding2") got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) def test_to_api_repr_bigtable(self): - ec = external_config.ExternalConfig('BIGTABLE') + ec = external_config.ExternalConfig("BIGTABLE") options = external_config.BigtableOptions() options.ignore_unspecified_column_families = True options.read_rowkey_as_string = False ec._options = options fam1 = external_config.BigtableColumnFamily() - fam1.family_id = 'familyId' - fam1.type_ = 'type' - fam1.encoding = 'encoding' + fam1.family_id = "familyId" + fam1.type_ = "type" + fam1.encoding = "encoding" fam1.only_read_latest = False col1 = external_config.BigtableColumn() - col1.qualifier_string = 'q' - col1.field_name = 'fieldName1' - col1.type_ = 'type1' - col1.encoding = 'encoding1' + col1.qualifier_string = "q" + col1.field_name = "fieldName1" + col1.type_ = "type1" + col1.encoding = "encoding1" col1.only_read_latest = True col2 = external_config.BigtableColumn() - col2.qualifier_encoded = b'q' - col2.field_name = 'fieldName2' - col2.type_ = 'type2' - col2.encoding = 'encoding2' + col2.qualifier_encoded = b"q" + col2.field_name = "fieldName2" + col2.type_ = "type2" + col2.encoding = "encoding2" fam1.columns = [col1, col2] options.column_families = [fam1] - qualifier_encoded = base64.standard_b64encode(b'q').decode('ascii') + qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") exp_resource = { - 'sourceFormat': 'BIGTABLE', - 'bigtableOptions': { - 'ignoreUnspecifiedColumnFamilies': True, - 'readRowkeyAsString': False, - 'columnFamilies': [ + "sourceFormat": "BIGTABLE", + "bigtableOptions": { + "ignoreUnspecifiedColumnFamilies": True, + "readRowkeyAsString": False, + "columnFamilies": [ { - 'familyId': 'familyId', - 'type': 'type', - 'encoding': 'encoding', - 'columns': [ + "familyId": "familyId", + "type": "type", + "encoding": "encoding", + "columns": [ { - 'qualifierString': 'q', - 'fieldName': 'fieldName1', - 'type': 'type1', - 'encoding': 'encoding1', - 'onlyReadLatest': True, + "qualifierString": "q", + "fieldName": "fieldName1", + "type": "type1", + "encoding": "encoding1", + "onlyReadLatest": True, }, { - 'qualifierEncoded': qualifier_encoded, - 'fieldName': 'fieldName2', - 'type': 'type2', - 'encoding': 'encoding2', + "qualifierEncoded": qualifier_encoded, + "fieldName": "fieldName2", + "type": "type2", + "encoding": "encoding2", }, - ], - 'onlyReadLatest': False, + "onlyReadLatest": False, } ], }, diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 998a397e0bed..8d5aef8f4603 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -17,6 +17,7 @@ import mock from six.moves import http_client + try: import pandas except (ImportError, AttributeError): # pragma: NO COVER @@ -29,14 +30,13 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='test-project', connection=None): +def _make_client(project="test-project", connection=None): from google.cloud.bigquery.client import Client if connection is None: connection = _make_connection() - client = Client( - project=project, credentials=_make_credentials(), _http=object()) + client = Client(project=project, credentials=_make_credentials(), _http=object()) client._connection = connection return client @@ -46,25 +46,21 @@ def _make_connection(*responses): from google.cloud.exceptions import NotFound mock_conn = mock.create_autospec(google.cloud.bigquery._http.Connection) - mock_conn.api_request.side_effect = list(responses) + [NotFound('miss')] + mock_conn.api_request.side_effect = list(responses) + [NotFound("miss")] return mock_conn class Test__error_result_to_exception(unittest.TestCase): - def _call_fut(self, *args, **kwargs): from google.cloud.bigquery import job return job._error_result_to_exception(*args, **kwargs) def test_simple(self): - error_result = { - 'reason': 'invalid', - 'message': 'bad request' - } + error_result = {"reason": "invalid", "message": "bad request"} exception = self._call_fut(error_result) self.assertEqual(exception.code, http_client.BAD_REQUEST) - self.assertTrue(exception.message.startswith('bad request')) + self.assertTrue(exception.message.startswith("bad request")) self.assertIn(error_result, exception.errors) def test_missing_reason(self): @@ -74,9 +70,9 @@ def test_missing_reason(self): class Test_JobReference(unittest.TestCase): - JOB_ID = 'job-id' - PROJECT = 'test-project-123' - LOCATION = 'us-central' + JOB_ID = "job-id" + PROJECT = "test-project-123" + LOCATION = "us-central" @staticmethod def _get_target_class(): @@ -97,17 +93,20 @@ def test_ctor(self): def test__to_api_repr(self): job_ref = self._make_one(self.JOB_ID, self.PROJECT, self.LOCATION) - self.assertEqual(job_ref._to_api_repr(), { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': self.LOCATION, - }) + self.assertEqual( + job_ref._to_api_repr(), + { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": self.LOCATION, + }, + ) def test_from_api_repr(self): api_repr = { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': self.LOCATION, + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": self.LOCATION, } job_ref = self._get_target_class()._from_api_repr(api_repr) @@ -118,9 +117,9 @@ def test_from_api_repr(self): class Test_AsyncJob(unittest.TestCase): - JOB_ID = 'job-id' - PROJECT = 'test-project-123' - LOCATION = 'us-central' + JOB_ID = "job-id" + PROJECT = "test-project-123" + LOCATION = "us-central" @staticmethod def _get_target_class(): @@ -133,7 +132,7 @@ def _make_one(self, job_id, client): def _make_derived_class(self): class Derived(self._get_target_class()): - _JOB_TYPE = 'derived' + _JOB_TYPE = "derived" return Derived @@ -158,22 +157,17 @@ def test_ctor_w_bare_job_id(self): self.assertIs(job._client, client) self.assertEqual( job._properties, - { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - } + {"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}}, ) self.assertIsInstance(job._completion_lock, type(threading.Lock())) self.assertEqual( - job.path, - '/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID)) + job.path, "/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID) + ) def test_ctor_w_job_ref(self): import threading - other_project = 'other-project-234' + other_project = "other-project-234" client = _make_client(project=other_project) job_ref = self._job_reference(self.JOB_ID, self.PROJECT, self.LOCATION) job = self._make_one(job_ref, client) @@ -185,18 +179,18 @@ def test_ctor_w_job_ref(self): self.assertEqual( job._properties, { - 'jobReference': { - 'projectId': self.PROJECT, - 'location': self.LOCATION, - 'jobId': self.JOB_ID, - }, - } + "jobReference": { + "projectId": self.PROJECT, + "location": self.LOCATION, + "jobId": self.JOB_ID, + } + }, ) self.assertFalse(job._result_set) self.assertIsInstance(job._completion_lock, type(threading.Lock())) self.assertEqual( - job.path, - '/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID)) + job.path, "/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID) + ) def test__require_client_w_none(self): client = _make_client(project=self.PROJECT) @@ -215,7 +209,7 @@ def test_job_type(self): client = _make_client(project=self.PROJECT) derived = self._make_derived(self.JOB_ID, client) - self.assertEqual(derived.job_type, 'derived') + self.assertEqual(derived.job_type, "derived") def test_labels_miss(self): client = _make_client(project=self.PROJECT) @@ -226,40 +220,38 @@ def test_labels_update_in_place(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) labels = job.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(job.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(job.labels, {"foo": "bar"}) def test_labels_hit(self): - labels = { - 'foo': 'bar', - } + labels = {"foo": "bar"} client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['labels'] = labels + job._properties["labels"] = labels self.assertEqual(job.labels, labels) def test_etag(self): - etag = 'ETAG-123' + etag = "ETAG-123" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.etag) - job._properties['etag'] = etag + job._properties["etag"] = etag self.assertEqual(job.etag, etag) def test_self_link(self): - self_link = 'https://api.example.com/123' + self_link = "https://api.example.com/123" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.self_link) - job._properties['selfLink'] = self_link + job._properties["selfLink"] = self_link self.assertEqual(job.self_link, self_link) def test_user_email(self): - user_email = 'user@example.com' + user_email = "user@example.com" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.user_email) - job._properties['user_email'] = user_email + job._properties["user_email"] = user_email self.assertEqual(job.user_email, user_email) @staticmethod @@ -267,9 +259,10 @@ def _datetime_and_millis(): import datetime import pytz from google.cloud._helpers import _millis + now = datetime.datetime.utcnow().replace( - microsecond=123000, # stats timestamps have ms precision - tzinfo=pytz.UTC) + microsecond=123000, tzinfo=pytz.UTC # stats timestamps have ms precision + ) return now, _millis(now) def test_created(self): @@ -277,9 +270,9 @@ def test_created(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.created) - stats = job._properties['statistics'] = {} + stats = job._properties["statistics"] = {} self.assertIsNone(job.created) - stats['creationTime'] = millis + stats["creationTime"] = millis self.assertEqual(job.created, now) def test_started(self): @@ -287,9 +280,9 @@ def test_started(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.started) - stats = job._properties['statistics'] = {} + stats = job._properties["statistics"] = {} self.assertIsNone(job.started) - stats['startTime'] = millis + stats["startTime"] = millis self.assertEqual(job.started, now) def test_ended(self): @@ -297,63 +290,65 @@ def test_ended(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.ended) - stats = job._properties['statistics'] = {} + stats = job._properties["statistics"] = {} self.assertIsNone(job.ended) - stats['endTime'] = millis + stats["endTime"] = millis self.assertEqual(job.ended, now) def test__job_statistics(self): - statistics = {'foo': 'bar'} + statistics = {"foo": "bar"} client = _make_client(project=self.PROJECT) derived = self._make_derived(self.JOB_ID, client) self.assertEqual(derived._job_statistics(), {}) - stats = derived._properties['statistics'] = {} + stats = derived._properties["statistics"] = {} self.assertEqual(derived._job_statistics(), {}) - stats['derived'] = statistics + stats["derived"] = statistics self.assertEqual(derived._job_statistics(), statistics) def test_error_result(self): error_result = { - 'debugInfo': 'DEBUG INFO', - 'location': 'LOCATION', - 'message': 'MESSAGE', - 'reason': 'REASON' + "debugInfo": "DEBUG INFO", + "location": "LOCATION", + "message": "MESSAGE", + "reason": "REASON", } client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.error_result) - status = job._properties['status'] = {} + status = job._properties["status"] = {} self.assertIsNone(job.error_result) - status['errorResult'] = error_result + status["errorResult"] = error_result self.assertEqual(job.error_result, error_result) def test_errors(self): - errors = [{ - 'debugInfo': 'DEBUG INFO', - 'location': 'LOCATION', - 'message': 'MESSAGE', - 'reason': 'REASON' - }] + errors = [ + { + "debugInfo": "DEBUG INFO", + "location": "LOCATION", + "message": "MESSAGE", + "reason": "REASON", + } + ] client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.errors) - status = job._properties['status'] = {} + status = job._properties["status"] = {} self.assertIsNone(job.errors) - status['errors'] = errors + status["errors"] = errors self.assertEqual(job.errors, errors) def test_state(self): - state = 'STATE' + state = "STATE" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) self.assertIsNone(job.state) - status = job._properties['status'] = {} + status = job._properties["status"] = {} self.assertIsNone(job.state) - status['state'] = state + status["state"] = state self.assertEqual(job.state, state) def test__scrub_local_properties(self): - before = {'foo': 'bar'} + before = {"foo": "bar"} resource = before.copy() client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) @@ -361,7 +356,7 @@ def test__scrub_local_properties(self): self.assertEqual(resource, before) def test__copy_configuration_properties(self): - before = {'foo': 'bar'} + before = {"foo": "bar"} resource = before.copy() client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) @@ -376,18 +371,14 @@ def _set_properties_job(self): job._copy_configuration_properties = mock.Mock() job._set_future_result = mock.Mock() job._properties = { - 'jobReference': job._properties['jobReference'], - 'foo': 'bar', + "jobReference": job._properties["jobReference"], + "foo": "bar", } return job def test__set_properties_no_stats(self): - config = { - 'test': True, - } - resource = { - 'configuration': config, - } + config = {"test": True} + resource = {"configuration": config} job = self._set_properties_job() job._set_properties(resource) @@ -399,22 +390,15 @@ def test__set_properties_no_stats(self): def test__set_properties_w_creation_time(self): now, millis = self._datetime_and_millis() - config = { - 'test': True, - } - stats = { - 'creationTime': str(millis), - } - resource = { - 'configuration': config, - 'statistics': stats, - } + config = {"test": True} + stats = {"creationTime": str(millis)} + resource = {"configuration": config, "statistics": stats} job = self._set_properties_job() job._set_properties(resource) cleaned = copy.deepcopy(resource) - cleaned['statistics']['creationTime'] = float(millis) + cleaned["statistics"]["creationTime"] = float(millis) self.assertEqual(job._properties, cleaned) job._scrub_local_properties.assert_called_once_with(resource) @@ -422,22 +406,15 @@ def test__set_properties_w_creation_time(self): def test__set_properties_w_start_time(self): now, millis = self._datetime_and_millis() - config = { - 'test': True, - } - stats = { - 'startTime': str(millis), - } - resource = { - 'configuration': config, - 'statistics': stats, - } + config = {"test": True} + stats = {"startTime": str(millis)} + resource = {"configuration": config, "statistics": stats} job = self._set_properties_job() job._set_properties(resource) cleaned = copy.deepcopy(resource) - cleaned['statistics']['startTime'] = float(millis) + cleaned["statistics"]["startTime"] = float(millis) self.assertEqual(job._properties, cleaned) job._scrub_local_properties.assert_called_once_with(resource) @@ -445,22 +422,15 @@ def test__set_properties_w_start_time(self): def test__set_properties_w_end_time(self): now, millis = self._datetime_and_millis() - config = { - 'test': True, - } - stats = { - 'endTime': str(millis), - } - resource = { - 'configuration': config, - 'statistics': stats, - } + config = {"test": True} + stats = {"endTime": str(millis)} + resource = {"configuration": config, "statistics": stats} job = self._set_properties_job() job._set_properties(resource) cleaned = copy.deepcopy(resource) - cleaned['statistics']['endTime'] = float(millis) + cleaned["statistics"]["endTime"] = float(millis) self.assertEqual(job._properties, cleaned) job._scrub_local_properties.assert_called_once_with(resource) @@ -474,47 +444,38 @@ def test__get_resource_config_missing_job_ref(self): klass._get_resource_config(resource) def test__get_resource_config_missing_job_id(self): - resource = { - 'jobReference': {}, - } + resource = {"jobReference": {}} klass = self._make_derived_class() with self.assertRaises(KeyError): klass._get_resource_config(resource) def test__get_resource_config_missing_configuration(self): - resource = { - 'jobReference': {'jobId': self.JOB_ID}, - } + resource = {"jobReference": {"jobId": self.JOB_ID}} klass = self._make_derived_class() with self.assertRaises(KeyError): klass._get_resource_config(resource) def test__get_resource_config_missing_config_type(self): - resource = { - 'jobReference': {'jobId': self.JOB_ID}, - 'configuration': {}, - } + resource = {"jobReference": {"jobId": self.JOB_ID}, "configuration": {}} klass = self._make_derived_class() with self.assertRaises(KeyError): klass._get_resource_config(resource) def test__get_resource_config_ok(self): - derived_config = {'foo': 'bar'} + derived_config = {"foo": "bar"} resource = { - 'jobReference': {'jobId': self.JOB_ID}, - 'configuration': { - 'derived': derived_config, - }, + "jobReference": {"jobId": self.JOB_ID}, + "configuration": {"derived": derived_config}, } klass = self._make_derived_class() job_id, config = klass._get_resource_config(resource) self.assertEqual(job_id, self.JOB_ID) - self.assertEqual(config, {'derived': derived_config}) + self.assertEqual(config, {"derived": derived_config}) def test__build_resource(self): client = _make_client(project=self.PROJECT) @@ -530,7 +491,7 @@ def test_to_api_repr(self): def test__begin_already(self): job = self._set_properties_job() - job._properties['status'] = {'state': 'WHATEVER'} + job._properties["status"] = {"state": "WHATEVER"} with self.assertRaises(ValueError): job._begin() @@ -539,14 +500,12 @@ def test__begin_defaults(self): from google.cloud.bigquery.retry import DEFAULT_RETRY resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } job = self._set_properties_job() builder = job.to_api_repr = mock.Mock() @@ -558,8 +517,8 @@ def test__begin_defaults(self): call_api.assert_called_once_with( DEFAULT_RETRY, - method='POST', - path='/projects/{}/jobs'.format(self.PROJECT), + method="POST", + path="/projects/{}/jobs".format(self.PROJECT), data=resource, ) self.assertEqual(job._properties, resource) @@ -567,16 +526,14 @@ def test__begin_defaults(self): def test__begin_explicit(self): from google.cloud.bigquery.retry import DEFAULT_RETRY - other_project = 'other-project-234' + other_project = "other-project-234" resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } job = self._set_properties_job() builder = job.to_api_repr = mock.Mock() @@ -590,8 +547,8 @@ def test__begin_explicit(self): call_api.assert_called_once_with( retry, - method='POST', - path='/projects/{}/jobs'.format(self.PROJECT), + method="POST", + path="/projects/{}/jobs".format(self.PROJECT), data=resource, ) self.assertEqual(job._properties, resource) @@ -601,35 +558,30 @@ def test_exists_defaults_miss(self): from google.cloud.bigquery.retry import DEFAULT_RETRY job = self._set_properties_job() - job._properties['jobReference']['location'] = self.LOCATION + job._properties["jobReference"]["location"] = self.LOCATION call_api = job._client._call_api = mock.Mock() - call_api.side_effect = NotFound('testing') + call_api.side_effect = NotFound("testing") self.assertFalse(job.exists()) call_api.assert_called_once_with( DEFAULT_RETRY, - method='GET', - path='/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID), - query_params={ - 'fields': 'id', - 'location': self.LOCATION, - } + method="GET", + path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), + query_params={"fields": "id", "location": self.LOCATION}, ) def test_exists_explicit_hit(self): from google.cloud.bigquery.retry import DEFAULT_RETRY - other_project = 'other-project-234' + other_project = "other-project-234" resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } job = self._set_properties_job() client = _make_client(project=other_project) @@ -641,26 +593,24 @@ def test_exists_explicit_hit(self): call_api.assert_called_once_with( retry, - method='GET', - path='/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID), - query_params={'fields': 'id'} + method="GET", + path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), + query_params={"fields": "id"}, ) def test_reload_defaults(self): from google.cloud.bigquery.retry import DEFAULT_RETRY resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } job = self._set_properties_job() - job._properties['jobReference']['location'] = self.LOCATION + job._properties["jobReference"]["location"] = self.LOCATION call_api = job._client._call_api = mock.Mock() call_api.return_value = resource @@ -668,25 +618,23 @@ def test_reload_defaults(self): call_api.assert_called_once_with( DEFAULT_RETRY, - method='GET', - path='/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID), - query_params={'location': self.LOCATION}, + method="GET", + path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), + query_params={"location": self.LOCATION}, ) self.assertEqual(job._properties, resource) def test_reload_explicit(self): from google.cloud.bigquery.retry import DEFAULT_RETRY - other_project = 'other-project-234' + other_project = "other-project-234" resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } job = self._set_properties_job() client = _make_client(project=other_project) @@ -698,51 +646,46 @@ def test_reload_explicit(self): call_api.assert_called_once_with( retry, - method='GET', - path='/projects/{}/jobs/{}'.format(self.PROJECT, self.JOB_ID), + method="GET", + path="/projects/{}/jobs/{}".format(self.PROJECT, self.JOB_ID), query_params={}, ) self.assertEqual(job._properties, resource) def test_cancel_defaults(self): resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } - response = {'job': resource} + response = {"job": resource} job = self._set_properties_job() - job._properties['jobReference']['location'] = self.LOCATION + job._properties["jobReference"]["location"] = self.LOCATION connection = job._client._connection = _make_connection(response) self.assertTrue(job.cancel()) connection.api_request.assert_called_once_with( - method='POST', - path='/projects/{}/jobs/{}/cancel'.format( - self.PROJECT, self.JOB_ID), - query_params={'location': self.LOCATION}, + method="POST", + path="/projects/{}/jobs/{}/cancel".format(self.PROJECT, self.JOB_ID), + query_params={"location": self.LOCATION}, ) self.assertEqual(job._properties, resource) def test_cancel_explicit(self): - other_project = 'other-project-234' + other_project = "other-project-234" resource = { - 'jobReference': { - 'jobId': self.JOB_ID, - 'projectId': self.PROJECT, - 'location': None, + "jobReference": { + "jobId": self.JOB_ID, + "projectId": self.PROJECT, + "location": None, }, - 'configuration': { - 'test': True, - } + "configuration": {"test": True}, } - response = {'job': resource} + response = {"job": resource} job = self._set_properties_job() client = _make_client(project=other_project) connection = client._connection = _make_connection(response) @@ -750,9 +693,8 @@ def test_cancel_explicit(self): self.assertTrue(job.cancel(client=client)) connection.api_request.assert_called_once_with( - method='POST', - path='/projects/{}/jobs/{}/cancel'.format( - self.PROJECT, self.JOB_ID), + method="POST", + path="/projects/{}/jobs/{}/cancel".format(self.PROJECT, self.JOB_ID), query_params={}, ) self.assertEqual(job._properties, resource) @@ -771,7 +713,7 @@ def test__set_future_result_wo_done(self): def test__set_future_result_w_result_set(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = {'state': 'DONE'} + job._properties["status"] = {"state": "DONE"} job._result_set = True set_exception = job.set_exception = mock.Mock() set_result = job.set_result = mock.Mock() @@ -786,12 +728,9 @@ def test__set_future_result_w_done_wo_result_set_w_error(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = { - 'state': 'DONE', - 'errorResult': { - 'reason': 'notFound', - 'message': 'testing' - } + job._properties["status"] = { + "state": "DONE", + "errorResult": {"reason": "notFound", "message": "testing"}, } set_exception = job.set_exception = mock.Mock() set_result = job.set_result = mock.Mock() @@ -802,14 +741,14 @@ def test__set_future_result_w_done_wo_result_set_w_error(self): args, kw = set_exception.call_args exception, = args self.assertIsInstance(exception, NotFound) - self.assertEqual(exception.message, 'testing') + self.assertEqual(exception.message, "testing") self.assertEqual(kw, {}) set_result.assert_not_called() def test__set_future_result_w_done_wo_result_set_wo_error(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = {'state': 'DONE'} + job._properties["status"] = {"state": "DONE"} set_exception = job.set_exception = mock.Mock() set_result = job.set_result = mock.Mock() @@ -844,11 +783,11 @@ def test_done_explicit_wo_state(self): def test_done_already(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = {'state': 'DONE'} + job._properties["status"] = {"state": "DONE"} self.assertTrue(job.done()) - @mock.patch('google.api_core.future.polling.PollingFuture.result') + @mock.patch("google.api_core.future.polling.PollingFuture.result") def test_result_default_wo_state(self, result): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) @@ -859,11 +798,11 @@ def test_result_default_wo_state(self, result): begin.assert_called_once() result.assert_called_once_with(timeout=None) - @mock.patch('google.api_core.future.polling.PollingFuture.result') + @mock.patch("google.api_core.future.polling.PollingFuture.result") def test_result_explicit_w_state(self, result): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = {'state': 'DONE'} + job._properties["status"] = {"state": "DONE"} begin = job._begin = mock.Mock() timeout = 1 @@ -881,28 +820,20 @@ def test_cancelled_wo_error_result(self): def test_cancelled_w_error_result_not_stopped(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = { - 'errorResult': { - 'reason': 'other', - } - } + job._properties["status"] = {"errorResult": {"reason": "other"}} self.assertFalse(job.cancelled()) def test_cancelled_w_error_result_w_stopped(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) - job._properties['status'] = { - 'errorResult': { - 'reason': 'stopped', - } - } + job._properties["status"] = {"errorResult": {"reason": "stopped"}} self.assertTrue(job.cancelled()) class Test_JobConfig(unittest.TestCase): - JOB_TYPE = 'testing' + JOB_TYPE = "testing" @staticmethod def _get_target_class(): @@ -938,50 +869,48 @@ def test_fill_from_default_conflict(self): from google.cloud.bigquery import QueryJobConfig basic_job_config = QueryJobConfig() - conflicting_job_config = self._make_one('conflicting_job_type') + conflicting_job_config = self._make_one("conflicting_job_type") self.assertNotEqual( - basic_job_config._job_type, conflicting_job_config._job_type) + basic_job_config._job_type, conflicting_job_config._job_type + ) with self.assertRaises(TypeError): - basic_job_config._fill_from_default( - conflicting_job_config) + basic_job_config._fill_from_default(conflicting_job_config) - @mock.patch('google.cloud.bigquery._helpers._get_sub_prop') + @mock.patch("google.cloud.bigquery._helpers._get_sub_prop") def test__get_sub_prop_wo_default(self, _get_sub_prop): job_config = self._make_one() - key = 'key' - self.assertIs( - job_config._get_sub_prop(key), _get_sub_prop.return_value) + key = "key" + self.assertIs(job_config._get_sub_prop(key), _get_sub_prop.return_value) _get_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], default=None) + job_config._properties, [self.JOB_TYPE, key], default=None + ) - @mock.patch('google.cloud.bigquery._helpers._get_sub_prop') + @mock.patch("google.cloud.bigquery._helpers._get_sub_prop") def test__get_sub_prop_w_default(self, _get_sub_prop): job_config = self._make_one() - key = 'key' - default = 'default' + key = "key" + default = "default" self.assertIs( - job_config._get_sub_prop(key, default=default), - _get_sub_prop.return_value) + job_config._get_sub_prop(key, default=default), _get_sub_prop.return_value + ) _get_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], default=default) + job_config._properties, [self.JOB_TYPE, key], default=default + ) - @mock.patch('google.cloud.bigquery._helpers._set_sub_prop') + @mock.patch("google.cloud.bigquery._helpers._set_sub_prop") def test__set_sub_prop(self, _set_sub_prop): job_config = self._make_one() - key = 'key' - value = 'value' + key = "key" + value = "value" job_config._set_sub_prop(key, value) _set_sub_prop.assert_called_once_with( - job_config._properties, [self.JOB_TYPE, key], value) + job_config._properties, [self.JOB_TYPE, key], value + ) def test_to_api_repr(self): job_config = self._make_one() - expected = job_config._properties = { - self.JOB_TYPE: { - 'foo': 'bar', - } - } + expected = job_config._properties = {self.JOB_TYPE: {"foo": "bar"}} found = job_config.to_api_repr() self.assertEqual(found, expected) self.assertIsNot(found, expected) # copied @@ -996,15 +925,13 @@ def test_labels_miss(self): def test_labels_update_in_place(self): job_config = self._make_one() labels = job_config.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(job_config.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(job_config.labels, {"foo": "bar"}) def test_labels_hit(self): - labels = { - 'foo': 'bar', - } + labels = {"foo": "bar"} job_config = self._make_one() - job_config._properties['labels'] = labels + job_config._properties["labels"] = labels self.assertEqual(job_config.labels, labels) def test_labels_setter_invalid(self): @@ -1014,26 +941,24 @@ def test_labels_setter_invalid(self): job_config.labels = labels def test_labels_setter(self): - labels = { - 'foo': 'bar', - } + labels = {"foo": "bar"} job_config = self._make_one() job_config.labels = labels - self.assertEqual(job_config._properties['labels'], labels) + self.assertEqual(job_config._properties["labels"], labels) class _Base(object): from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.table import TableReference - PROJECT = 'project' - SOURCE1 = 'http://example.com/source1.csv' - DS_ID = 'dataset_id' + PROJECT = "project" + SOURCE1 = "http://example.com/source1.csv" + DS_ID = "dataset_id" DS_REF = DatasetReference(PROJECT, DS_ID) - TABLE_ID = 'table_id' + TABLE_ID = "table_id" TABLE_REF = TableReference(DS_REF, TABLE_ID) - JOB_ID = 'JOB_ID' - KMS_KEY_NAME = 'projects/1/locations/global/keyRings/1/cryptoKeys/1' + JOB_ID = "JOB_ID" + KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -1043,12 +968,11 @@ def _setUpConstants(self): from google.cloud._helpers import UTC self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( - tzinfo=UTC) - self.ETAG = 'ETAG' - self.FULL_JOB_ID = '%s:%s' % (self.PROJECT, self.JOB_ID) - self.RESOURCE_URL = 'http://example.com/path/to/resource' - self.USER_EMAIL = 'phred@example.com' + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) + self.ETAG = "ETAG" + self.FULL_JOB_ID = "%s:%s" % (self.PROJECT, self.JOB_ID) + self.RESOURCE_URL = "http://example.com/path/to/resource" + self.USER_EMAIL = "phred@example.com" def _table_ref(self, table_id): from google.cloud.bigquery.table import TableReference @@ -1058,36 +982,26 @@ def _table_ref(self, table_id): def _make_resource(self, started=False, ended=False): self._setUpConstants() resource = { - 'configuration': { - self.JOB_TYPE: { - }, - }, - 'statistics': { - 'creationTime': self.WHEN_TS * 1000, - self.JOB_TYPE: { - } - }, - 'etag': self.ETAG, - 'id': self.FULL_JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'selfLink': self.RESOURCE_URL, - 'user_email': self.USER_EMAIL, + "configuration": {self.JOB_TYPE: {}}, + "statistics": {"creationTime": self.WHEN_TS * 1000, self.JOB_TYPE: {}}, + "etag": self.ETAG, + "id": self.FULL_JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "selfLink": self.RESOURCE_URL, + "user_email": self.USER_EMAIL, } if started or ended: - resource['statistics']['startTime'] = self.WHEN_TS * 1000 + resource["statistics"]["startTime"] = self.WHEN_TS * 1000 if ended: - resource['statistics']['endTime'] = (self.WHEN_TS + 1000) * 1000 + resource["statistics"]["endTime"] = (self.WHEN_TS + 1000) * 1000 - if self.JOB_TYPE == 'query': - resource['configuration']['query']['destinationTable'] = { - 'projectId': self.PROJECT, - 'datasetId': '_temp_dataset', - 'tableId': '_temp_table', + if self.JOB_TYPE == "query": + resource["configuration"]["query"]["destinationTable"] = { + "projectId": self.PROJECT, + "datasetId": "_temp_dataset", + "tableId": "_temp_table", } return resource @@ -1111,50 +1025,52 @@ def _verifyInitialReadonlyProperties(self, job): def _verifyReadonlyResourceProperties(self, job, resource): from datetime import timedelta - statistics = resource.get('statistics', {}) + statistics = resource.get("statistics", {}) - if 'creationTime' in statistics: + if "creationTime" in statistics: self.assertEqual(job.created, self.WHEN) else: self.assertIsNone(job.created) - if 'startTime' in statistics: + if "startTime" in statistics: self.assertEqual(job.started, self.WHEN) else: self.assertIsNone(job.started) - if 'endTime' in statistics: + if "endTime" in statistics: self.assertEqual(job.ended, self.WHEN + timedelta(seconds=1000)) else: self.assertIsNone(job.ended) - if 'etag' in resource: + if "etag" in resource: self.assertEqual(job.etag, self.ETAG) else: self.assertIsNone(job.etag) - if 'selfLink' in resource: + if "selfLink" in resource: self.assertEqual(job.self_link, self.RESOURCE_URL) else: self.assertIsNone(job.self_link) - if 'user_email' in resource: + if "user_email" in resource: self.assertEqual(job.user_email, self.USER_EMAIL) else: self.assertIsNone(job.user_email) class TestLoadJobConfig(unittest.TestCase, _Base): - JOB_TYPE = 'load' + JOB_TYPE = "load" @staticmethod def _get_target_class(): from google.cloud.bigquery.job import LoadJobConfig + return LoadJobConfig def test_ctor_w_properties(self): config = self._get_target_class()( - allow_jagged_rows=True, allow_quoted_newlines=True) + allow_jagged_rows=True, allow_quoted_newlines=True + ) self.assertTrue(config.allow_jagged_rows) self.assertTrue(config.allow_quoted_newlines) @@ -1165,13 +1081,13 @@ def test_allow_jagged_rows_missing(self): def test_allow_jagged_rows_hit(self): config = self._get_target_class()() - config._properties['load']['allowJaggedRows'] = True + config._properties["load"]["allowJaggedRows"] = True self.assertTrue(config.allow_jagged_rows) def test_allow_jagged_rows_setter(self): config = self._get_target_class()() config.allow_jagged_rows = True - self.assertTrue(config._properties['load']['allowJaggedRows']) + self.assertTrue(config._properties["load"]["allowJaggedRows"]) def test_allow_quoted_newlines_missing(self): config = self._get_target_class()() @@ -1179,13 +1095,13 @@ def test_allow_quoted_newlines_missing(self): def test_allow_quoted_newlines_hit(self): config = self._get_target_class()() - config._properties['load']['allowQuotedNewlines'] = True + config._properties["load"]["allowQuotedNewlines"] = True self.assertTrue(config.allow_quoted_newlines) def test_allow_quoted_newlines_setter(self): config = self._get_target_class()() config.allow_quoted_newlines = True - self.assertTrue(config._properties['load']['allowQuotedNewlines']) + self.assertTrue(config._properties["load"]["allowQuotedNewlines"]) def test_autodetect_missing(self): config = self._get_target_class()() @@ -1193,13 +1109,13 @@ def test_autodetect_missing(self): def test_autodetect_hit(self): config = self._get_target_class()() - config._properties['load']['autodetect'] = True + config._properties["load"]["autodetect"] = True self.assertTrue(config.autodetect) def test_autodetect_setter(self): config = self._get_target_class()() config.autodetect = True - self.assertTrue(config._properties['load']['autodetect']) + self.assertTrue(config._properties["load"]["autodetect"]) def test_clustering_fields_miss(self): config = self._get_target_class()() @@ -1207,28 +1123,23 @@ def test_clustering_fields_miss(self): def test_clustering_fields_hit(self): config = self._get_target_class()() - fields = ['email', 'postal_code'] - config._properties['load']['clustering'] = { - 'fields': fields, - } + fields = ["email", "postal_code"] + config._properties["load"]["clustering"] = {"fields": fields} self.assertEqual(config.clustering_fields, fields) def test_clustering_fields_setter(self): - fields = ['email', 'postal_code'] + fields = ["email", "postal_code"] config = self._get_target_class()() config.clustering_fields = fields - self.assertEqual( - config._properties['load']['clustering'], {'fields': fields}) + self.assertEqual(config._properties["load"]["clustering"], {"fields": fields}) def test_clustering_fields_setter_w_none(self): config = self._get_target_class()() - fields = ['email', 'postal_code'] - config._properties['load']['clustering'] = { - 'fields': fields, - } + fields = ["email", "postal_code"] + config._properties["load"]["clustering"] = {"fields": fields} config.clustering_fields = None self.assertIsNone(config.clustering_fields) - self.assertNotIn('clustering', config._properties['load']) + self.assertNotIn("clustering", config._properties["load"]) def test_create_disposition_missing(self): config = self._get_target_class()() @@ -1239,7 +1150,7 @@ def test_create_disposition_hit(self): disposition = CreateDisposition.CREATE_IF_NEEDED config = self._get_target_class()() - config._properties['load']['createDisposition'] = disposition + config._properties["load"]["createDisposition"] = disposition self.assertEqual(config.create_disposition, disposition) def test_create_disposition_setter(self): @@ -1248,8 +1159,7 @@ def test_create_disposition_setter(self): disposition = CreateDisposition.CREATE_IF_NEEDED config = self._get_target_class()() config.create_disposition = disposition - self.assertEqual( - config._properties['load']['createDisposition'], disposition) + self.assertEqual(config._properties["load"]["createDisposition"], disposition) def test_destination_encryption_configuration_missing(self): config = self._get_target_class()() @@ -1258,146 +1168,135 @@ def test_destination_encryption_configuration_missing(self): def test_destination_encryption_configuration_hit(self): from google.cloud.bigquery.table import EncryptionConfiguration - kms_key_name = 'kms-key-name' + kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) config = self._get_target_class()() - config._properties['load']['destinationEncryptionConfiguration'] = { - 'kmsKeyName': kms_key_name, + config._properties["load"]["destinationEncryptionConfiguration"] = { + "kmsKeyName": kms_key_name } self.assertEqual( - config.destination_encryption_configuration, - encryption_configuration) + config.destination_encryption_configuration, encryption_configuration + ) def test_destination_encryption_configuration_setter(self): from google.cloud.bigquery.table import EncryptionConfiguration - kms_key_name = 'kms-key-name' + kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) config = self._get_target_class()() config.destination_encryption_configuration = encryption_configuration - expected = { - 'kmsKeyName': kms_key_name, - } + expected = {"kmsKeyName": kms_key_name} self.assertEqual( - config._properties['load']['destinationEncryptionConfiguration'], - expected) + config._properties["load"]["destinationEncryptionConfiguration"], expected + ) def test_destination_encryption_configuration_setter_w_none(self): - kms_key_name = 'kms-key-name' + kms_key_name = "kms-key-name" config = self._get_target_class()() - config._properties['load']['destinationEncryptionConfiguration'] = { - 'kmsKeyName': kms_key_name, + config._properties["load"]["destinationEncryptionConfiguration"] = { + "kmsKeyName": kms_key_name } config.destination_encryption_configuration = None self.assertIsNone(config.destination_encryption_configuration) self.assertNotIn( - 'destinationEncryptionConfiguration', config._properties['load']) + "destinationEncryptionConfiguration", config._properties["load"] + ) def test_destination_table_description_missing(self): config = self._get_target_class()() self.assertIsNone(config.destination_table_description) def test_destination_table_description_hit(self): - description = 'Description' + description = "Description" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'description': description, + config._properties["load"]["destinationTableProperties"] = { + "description": description } - self.assertEqual( - config.destination_table_description, description) + self.assertEqual(config.destination_table_description, description) def test_destination_table_description_setter(self): - description = 'Description' + description = "Description" config = self._get_target_class()() config.destination_table_description = description - expected = { - 'description': description, - } + expected = {"description": description} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_destination_table_description_setter_w_fn_already(self): - description = 'Description' - friendly_name = 'Friendly Name' + description = "Description" + friendly_name = "Friendly Name" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'friendlyName': friendly_name, + config._properties["load"]["destinationTableProperties"] = { + "friendlyName": friendly_name } config.destination_table_description = description - expected = { - 'friendlyName': friendly_name, - 'description': description, - } + expected = {"friendlyName": friendly_name, "description": description} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_destination_table_description_w_none(self): - description = 'Description' - friendly_name = 'Friendly Name' + description = "Description" + friendly_name = "Friendly Name" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'description': description, - 'friendlyName': friendly_name, + config._properties["load"]["destinationTableProperties"] = { + "description": description, + "friendlyName": friendly_name, } config.destination_table_description = None - expected = { - 'friendlyName': friendly_name, - } + expected = {"friendlyName": friendly_name} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_destination_table_friendly_name_missing(self): config = self._get_target_class()() self.assertIsNone(config.destination_table_friendly_name) def test_destination_table_friendly_name_hit(self): - friendly_name = 'Friendly Name' + friendly_name = "Friendly Name" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'friendlyName': friendly_name, + config._properties["load"]["destinationTableProperties"] = { + "friendlyName": friendly_name } - self.assertEqual( - config.destination_table_friendly_name, friendly_name) + self.assertEqual(config.destination_table_friendly_name, friendly_name) def test_destination_table_friendly_name_setter(self): - friendly_name = 'Friendly Name' + friendly_name = "Friendly Name" config = self._get_target_class()() config.destination_table_friendly_name = friendly_name - expected = { - 'friendlyName': friendly_name, - } + expected = {"friendlyName": friendly_name} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_destination_table_friendly_name_setter_w_descr_already(self): - friendly_name = 'Friendly Name' - description = 'Description' + friendly_name = "Friendly Name" + description = "Description" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'description': description, + config._properties["load"]["destinationTableProperties"] = { + "description": description } config.destination_table_friendly_name = friendly_name - expected = { - 'friendlyName': friendly_name, - 'description': description, - } + expected = {"friendlyName": friendly_name, "description": description} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_destination_table_friendly_name_w_none(self): - friendly_name = 'Friendly Name' - description = 'Description' + friendly_name = "Friendly Name" + description = "Description" config = self._get_target_class()() - config._properties['load']['destinationTableProperties'] = { - 'description': description, - 'friendlyName': friendly_name, + config._properties["load"]["destinationTableProperties"] = { + "description": description, + "friendlyName": friendly_name, } config.destination_table_friendly_name = None - expected = { - 'description': description, - } + expected = {"description": description} self.assertEqual( - config._properties['load']['destinationTableProperties'], expected) + config._properties["load"]["destinationTableProperties"], expected + ) def test_encoding_missing(self): config = self._get_target_class()() @@ -1408,7 +1307,7 @@ def test_encoding_hit(self): encoding = Encoding.UTF_8 config = self._get_target_class()() - config._properties['load']['encoding'] = encoding + config._properties["load"]["encoding"] = encoding self.assertEqual(config.encoding, encoding) def test_encoding_setter(self): @@ -1417,25 +1316,23 @@ def test_encoding_setter(self): encoding = Encoding.UTF_8 config = self._get_target_class()() config.encoding = encoding - self.assertEqual( - config._properties['load']['encoding'], encoding) + self.assertEqual(config._properties["load"]["encoding"], encoding) def test_field_delimiter_missing(self): config = self._get_target_class()() self.assertIsNone(config.field_delimiter) def test_field_delimiter_hit(self): - field_delimiter = '|' + field_delimiter = "|" config = self._get_target_class()() - config._properties['load']['fieldDelimiter'] = field_delimiter + config._properties["load"]["fieldDelimiter"] = field_delimiter self.assertEqual(config.field_delimiter, field_delimiter) def test_field_delimiter_setter(self): - field_delimiter = '|' + field_delimiter = "|" config = self._get_target_class()() config.field_delimiter = field_delimiter - self.assertEqual( - config._properties['load']['fieldDelimiter'], field_delimiter) + self.assertEqual(config._properties["load"]["fieldDelimiter"], field_delimiter) def test_ignore_unknown_values_missing(self): config = self._get_target_class()() @@ -1443,13 +1340,13 @@ def test_ignore_unknown_values_missing(self): def test_ignore_unknown_values_hit(self): config = self._get_target_class()() - config._properties['load']['ignoreUnknownValues'] = True + config._properties["load"]["ignoreUnknownValues"] = True self.assertTrue(config.ignore_unknown_values) def test_ignore_unknown_values_setter(self): config = self._get_target_class()() config.ignore_unknown_values = True - self.assertTrue(config._properties['load']['ignoreUnknownValues']) + self.assertTrue(config._properties["load"]["ignoreUnknownValues"]) def test_max_bad_records_missing(self): config = self._get_target_class()() @@ -1458,32 +1355,30 @@ def test_max_bad_records_missing(self): def test_max_bad_records_hit(self): max_bad_records = 13 config = self._get_target_class()() - config._properties['load']['maxBadRecords'] = max_bad_records + config._properties["load"]["maxBadRecords"] = max_bad_records self.assertEqual(config.max_bad_records, max_bad_records) def test_max_bad_records_setter(self): max_bad_records = 13 config = self._get_target_class()() config.max_bad_records = max_bad_records - self.assertEqual( - config._properties['load']['maxBadRecords'], max_bad_records) + self.assertEqual(config._properties["load"]["maxBadRecords"], max_bad_records) def test_null_marker_missing(self): config = self._get_target_class()() self.assertIsNone(config.null_marker) def test_null_marker_hit(self): - null_marker = 'XXX' + null_marker = "XXX" config = self._get_target_class()() - config._properties['load']['nullMarker'] = null_marker + config._properties["load"]["nullMarker"] = null_marker self.assertEqual(config.null_marker, null_marker) def test_null_marker_setter(self): - null_marker = 'XXX' + null_marker = "XXX" config = self._get_target_class()() config.null_marker = null_marker - self.assertEqual( - config._properties['load']['nullMarker'], null_marker) + self.assertEqual(config._properties["load"]["nullMarker"], null_marker) def test_quote_character_missing(self): config = self._get_target_class()() @@ -1492,15 +1387,14 @@ def test_quote_character_missing(self): def test_quote_character_hit(self): quote_character = "'" config = self._get_target_class()() - config._properties['load']['quote'] = quote_character + config._properties["load"]["quote"] = quote_character self.assertEqual(config.quote_character, quote_character) def test_quote_character_setter(self): quote_character = "'" config = self._get_target_class()() config.quote_character = quote_character - self.assertEqual( - config._properties['load']['quote'], quote_character) + self.assertEqual(config._properties["load"]["quote"], quote_character) def test_schema_missing(self): config = self._get_target_class()() @@ -1511,17 +1405,14 @@ def test_schema_hit(self): config = self._get_target_class()() all_props_repr = { - 'mode': 'REQUIRED', - 'name': 'foo', - 'type': 'INTEGER', - 'description': 'Foo', + "mode": "REQUIRED", + "name": "foo", + "type": "INTEGER", + "description": "Foo", } - minimal_repr = { - 'name': 'bar', - 'type': 'STRING', - } - config._properties['load']['schema'] = { - 'fields': [all_props_repr, minimal_repr], + minimal_repr = {"name": "bar", "type": "STRING"} + config._properties["load"]["schema"] = { + "fields": [all_props_repr, minimal_repr] } all_props, minimal = config.schema self.assertEqual(all_props, SchemaField.from_api_repr(all_props_repr)) @@ -1531,24 +1422,24 @@ def test_schema_setter(self): from google.cloud.bigquery.schema import SchemaField config = self._get_target_class()() - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") config.schema = [full_name, age] full_name_repr = { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, } age_repr = { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None, + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, } self.assertEqual( - config._properties['load']['schema'], - {'fields': [full_name_repr, age_repr]}) + config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} + ) def test_schema_update_options_missing(self): config = self._get_target_class()() @@ -1562,7 +1453,7 @@ def test_schema_update_options_hit(self): SchemaUpdateOption.ALLOW_FIELD_RELAXATION, ] config = self._get_target_class()() - config._properties['load']['schemaUpdateOptions'] = options + config._properties["load"]["schemaUpdateOptions"] = options self.assertEqual(config.schema_update_options, options) def test_schema_update_options_setter(self): @@ -1574,8 +1465,7 @@ def test_schema_update_options_setter(self): ] config = self._get_target_class()() config.schema_update_options = options - self.assertEqual( - config._properties['load']['schemaUpdateOptions'], options) + self.assertEqual(config._properties["load"]["schemaUpdateOptions"], options) def test_skip_leading_rows_missing(self): config = self._get_target_class()() @@ -1584,13 +1474,13 @@ def test_skip_leading_rows_missing(self): def test_skip_leading_rows_hit_w_str(self): skip_leading_rows = 1 config = self._get_target_class()() - config._properties['load']['skipLeadingRows'] = str(skip_leading_rows) + config._properties["load"]["skipLeadingRows"] = str(skip_leading_rows) self.assertEqual(config.skip_leading_rows, skip_leading_rows) def test_skip_leading_rows_hit_w_integer(self): skip_leading_rows = 1 config = self._get_target_class()() - config._properties['load']['skipLeadingRows'] = skip_leading_rows + config._properties["load"]["skipLeadingRows"] = skip_leading_rows self.assertEqual(config.skip_leading_rows, skip_leading_rows) def test_skip_leading_rows_setter(self): @@ -1598,8 +1488,8 @@ def test_skip_leading_rows_setter(self): config = self._get_target_class()() config.skip_leading_rows = skip_leading_rows self.assertEqual( - config._properties['load']['skipLeadingRows'], - str(skip_leading_rows)) + config._properties["load"]["skipLeadingRows"], str(skip_leading_rows) + ) def test_source_format_missing(self): config = self._get_target_class()() @@ -1610,7 +1500,7 @@ def test_source_format_hit(self): source_format = SourceFormat.CSV config = self._get_target_class()() - config._properties['load']['sourceFormat'] = source_format + config._properties["load"]["sourceFormat"] = source_format self.assertEqual(config.source_format, source_format) def test_source_format_setter(self): @@ -1619,8 +1509,7 @@ def test_source_format_setter(self): source_format = SourceFormat.CSV config = self._get_target_class()() config.source_format = source_format - self.assertEqual( - config._properties['load']['sourceFormat'], source_format) + self.assertEqual(config._properties["load"]["sourceFormat"], source_format) def test_time_partitioning_miss(self): config = self._get_target_class()() @@ -1630,14 +1519,14 @@ def test_time_partitioning_hit(self): from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery.table import TimePartitioningType - field = 'creation_date' + field = "creation_date" year_ms = 86400 * 1000 * 365 config = self._get_target_class()() - config._properties['load']['timePartitioning'] = { - 'type': TimePartitioningType.DAY, - 'field': field, - 'expirationMs': str(year_ms), - 'requirePartitionFilter': False, + config._properties["load"]["timePartitioning"] = { + "type": TimePartitioningType.DAY, + "field": field, + "expirationMs": str(year_ms), + "requirePartitionFilter": False, } expected = TimePartitioning( type_=TimePartitioningType.DAY, @@ -1651,7 +1540,7 @@ def test_time_partitioning_setter(self): from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery.table import TimePartitioningType - field = 'creation_date' + field = "creation_date" year_ms = 86400 * 1000 * 365 time_partitioning = TimePartitioning( type_=TimePartitioningType.DAY, @@ -1662,29 +1551,28 @@ def test_time_partitioning_setter(self): config = self._get_target_class()() config.time_partitioning = time_partitioning expected = { - 'type': TimePartitioningType.DAY, - 'field': field, - 'expirationMs': str(year_ms), - 'requirePartitionFilter': False, + "type": TimePartitioningType.DAY, + "field": field, + "expirationMs": str(year_ms), + "requirePartitionFilter": False, } - self.assertEqual( - config._properties['load']['timePartitioning'], expected) + self.assertEqual(config._properties["load"]["timePartitioning"], expected) def test_time_partitioning_setter_w_none(self): from google.cloud.bigquery.table import TimePartitioningType - field = 'creation_date' + field = "creation_date" year_ms = 86400 * 1000 * 365 config = self._get_target_class()() - config._properties['load']['timePartitioning'] = { - 'type': TimePartitioningType.DAY, - 'field': field, - 'expirationMs': str(year_ms), - 'requirePartitionFilter': False, + config._properties["load"]["timePartitioning"] = { + "type": TimePartitioningType.DAY, + "field": field, + "expirationMs": str(year_ms), + "requirePartitionFilter": False, } config.time_partitioning = None self.assertIsNone(config.time_partitioning) - self.assertNotIn('timePartitioning', config._properties['load']) + self.assertNotIn("timePartitioning", config._properties["load"]) def test_write_disposition_missing(self): config = self._get_target_class()() @@ -1695,7 +1583,7 @@ def test_write_disposition_hit(self): write_disposition = WriteDisposition.WRITE_TRUNCATE config = self._get_target_class()() - config._properties['load']['writeDisposition'] = write_disposition + config._properties["load"]["writeDisposition"] = write_disposition self.assertEqual(config.write_disposition, write_disposition) def test_write_disposition_setter(self): @@ -1705,11 +1593,12 @@ def test_write_disposition_setter(self): config = self._get_target_class()() config.write_disposition = write_disposition self.assertEqual( - config._properties['load']['writeDisposition'], write_disposition) + config._properties["load"]["writeDisposition"], write_disposition + ) class TestLoadJob(unittest.TestCase, _Base): - JOB_TYPE = 'load' + JOB_TYPE = "load" @staticmethod def _get_target_class(): @@ -1725,134 +1614,117 @@ def _setUpConstants(self): self.OUTPUT_ROWS = 345 def _make_resource(self, started=False, ended=False): - resource = super(TestLoadJob, self)._make_resource( - started, ended) - config = resource['configuration']['load'] - config['sourceUris'] = [self.SOURCE1] - config['destinationTable'] = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + resource = super(TestLoadJob, self)._make_resource(started, ended) + config = resource["configuration"]["load"] + config["sourceUris"] = [self.SOURCE1] + config["destinationTable"] = { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, } if ended: - resource['status'] = {'state': 'DONE'} - resource['statistics']['load']['inputFiles'] = self.INPUT_FILES - resource['statistics']['load']['inputFileBytes'] = self.INPUT_BYTES - resource['statistics']['load']['outputBytes'] = self.OUTPUT_BYTES - resource['statistics']['load']['outputRows'] = self.OUTPUT_ROWS + resource["status"] = {"state": "DONE"} + resource["statistics"]["load"]["inputFiles"] = self.INPUT_FILES + resource["statistics"]["load"]["inputFileBytes"] = self.INPUT_BYTES + resource["statistics"]["load"]["outputBytes"] = self.OUTPUT_BYTES + resource["statistics"]["load"]["outputRows"] = self.OUTPUT_ROWS return resource def _verifyBooleanConfigProperties(self, job, config): - if 'allowJaggedRows' in config: - self.assertEqual(job.allow_jagged_rows, - config['allowJaggedRows']) + if "allowJaggedRows" in config: + self.assertEqual(job.allow_jagged_rows, config["allowJaggedRows"]) else: self.assertIsNone(job.allow_jagged_rows) - if 'allowQuotedNewlines' in config: - self.assertEqual(job.allow_quoted_newlines, - config['allowQuotedNewlines']) + if "allowQuotedNewlines" in config: + self.assertEqual(job.allow_quoted_newlines, config["allowQuotedNewlines"]) else: self.assertIsNone(job.allow_quoted_newlines) - if 'autodetect' in config: - self.assertEqual( - job.autodetect, config['autodetect']) + if "autodetect" in config: + self.assertEqual(job.autodetect, config["autodetect"]) else: self.assertIsNone(job.autodetect) - if 'ignoreUnknownValues' in config: - self.assertEqual(job.ignore_unknown_values, - config['ignoreUnknownValues']) + if "ignoreUnknownValues" in config: + self.assertEqual(job.ignore_unknown_values, config["ignoreUnknownValues"]) else: self.assertIsNone(job.ignore_unknown_values) def _verifyEnumConfigProperties(self, job, config): - if 'createDisposition' in config: - self.assertEqual(job.create_disposition, - config['createDisposition']) + if "createDisposition" in config: + self.assertEqual(job.create_disposition, config["createDisposition"]) else: self.assertIsNone(job.create_disposition) - if 'encoding' in config: - self.assertEqual(job.encoding, - config['encoding']) + if "encoding" in config: + self.assertEqual(job.encoding, config["encoding"]) else: self.assertIsNone(job.encoding) - if 'sourceFormat' in config: - self.assertEqual(job.source_format, - config['sourceFormat']) + if "sourceFormat" in config: + self.assertEqual(job.source_format, config["sourceFormat"]) else: self.assertIsNone(job.source_format) - if 'writeDisposition' in config: - self.assertEqual(job.write_disposition, - config['writeDisposition']) + if "writeDisposition" in config: + self.assertEqual(job.write_disposition, config["writeDisposition"]) else: self.assertIsNone(job.write_disposition) - if 'schemaUpdateOptions' in config: - self.assertEqual( - job.schema_update_options, config['schemaUpdateOptions']) + if "schemaUpdateOptions" in config: + self.assertEqual(job.schema_update_options, config["schemaUpdateOptions"]) else: self.assertIsNone(job.schema_update_options) def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) - config = resource.get('configuration', {}).get('load') + config = resource.get("configuration", {}).get("load") self._verifyBooleanConfigProperties(job, config) self._verifyEnumConfigProperties(job, config) - self.assertEqual(job.source_uris, config['sourceUris']) + self.assertEqual(job.source_uris, config["sourceUris"]) - table_ref = config['destinationTable'] - self.assertEqual(job.destination.project, table_ref['projectId']) - self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) - self.assertEqual(job.destination.table_id, table_ref['tableId']) + table_ref = config["destinationTable"] + self.assertEqual(job.destination.project, table_ref["projectId"]) + self.assertEqual(job.destination.dataset_id, table_ref["datasetId"]) + self.assertEqual(job.destination.table_id, table_ref["tableId"]) - if 'fieldDelimiter' in config: - self.assertEqual(job.field_delimiter, - config['fieldDelimiter']) + if "fieldDelimiter" in config: + self.assertEqual(job.field_delimiter, config["fieldDelimiter"]) else: self.assertIsNone(job.field_delimiter) - if 'maxBadRecords' in config: - self.assertEqual(job.max_bad_records, - config['maxBadRecords']) + if "maxBadRecords" in config: + self.assertEqual(job.max_bad_records, config["maxBadRecords"]) else: self.assertIsNone(job.max_bad_records) - if 'nullMarker' in config: - self.assertEqual(job.null_marker, - config['nullMarker']) + if "nullMarker" in config: + self.assertEqual(job.null_marker, config["nullMarker"]) else: self.assertIsNone(job.null_marker) - if 'quote' in config: - self.assertEqual(job.quote_character, - config['quote']) + if "quote" in config: + self.assertEqual(job.quote_character, config["quote"]) else: self.assertIsNone(job.quote_character) - if 'skipLeadingRows' in config: - self.assertEqual(str(job.skip_leading_rows), - config['skipLeadingRows']) + if "skipLeadingRows" in config: + self.assertEqual(str(job.skip_leading_rows), config["skipLeadingRows"]) else: self.assertIsNone(job.skip_leading_rows) - if 'destinationEncryptionConfiguration' in config: + if "destinationEncryptionConfiguration" in config: self.assertIsNotNone(job.destination_encryption_configuration) self.assertEqual( job.destination_encryption_configuration.kms_key_name, - config['destinationEncryptionConfiguration']['kmsKeyName']) + config["destinationEncryptionConfiguration"]["kmsKeyName"], + ) else: self.assertIsNone(job.destination_encryption_configuration) def test_ctor(self): client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) self.assertIs(job.destination, self.TABLE_REF) self.assertEqual(list(job.source_uris), [self.SOURCE1]) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual( - job.path, - '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID)) + self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) self._verifyInitialReadonlyProperties(job) @@ -1887,23 +1759,23 @@ def test_ctor_w_config(self): from google.cloud.bigquery.job import LoadJobConfig client = _make_client(project=self.PROJECT) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") config = LoadJobConfig() config.schema = [full_name, age] - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client, config) + job = self._make_one( + self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client, config + ) self.assertEqual(job.schema, [full_name, age]) def test_ctor_w_job_reference(self): from google.cloud.bigquery import job client = _make_client(project=self.PROJECT) - job_ref = job._JobReference(self.JOB_ID, 'alternative-project', 'US') - load_job = self._make_one( - job_ref, [self.SOURCE1], self.TABLE_REF, client) - self.assertEqual(load_job.project, 'alternative-project') - self.assertEqual(load_job.location, 'US') + job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") + load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) + self.assertEqual(load_job.project, "alternative-project") + self.assertEqual(load_job.location, "US") def test_done(self): client = _make_client(project=self.PROJECT) @@ -1923,19 +1795,18 @@ def test_result(self): def test_result_invokes_begin(self): begun_resource = self._make_resource() done_resource = copy.deepcopy(begun_resource) - done_resource['status'] = {'state': 'DONE'} + done_resource["status"] = {"state": "DONE"} connection = _make_connection(begun_resource, done_resource) client = _make_client(self.PROJECT) client._connection = connection - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) job.result() self.assertEqual(len(connection.api_request.call_args_list), 2) begin_request, reload_request = connection.api_request.call_args_list - self.assertEqual(begin_request[1]['method'], 'POST') - self.assertEqual(reload_request[1]['method'], 'GET') + self.assertEqual(begin_request[1]["method"], "POST") + self.assertEqual(reload_request[1]["method"], "GET") def test_schema_setter_non_list(self): from google.cloud.bigquery.job import LoadJobConfig @@ -1949,7 +1820,7 @@ def test_schema_setter_invalid_field(self): from google.cloud.bigquery.schema import SchemaField config = LoadJobConfig() - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") with self.assertRaises(ValueError): config.schema = [full_name, object()] @@ -1958,8 +1829,8 @@ def test_schema_setter(self): from google.cloud.bigquery.schema import SchemaField config = LoadJobConfig() - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") config.schema = [full_name, age] self.assertEqual(config.schema, [full_name, age]) @@ -1971,29 +1842,29 @@ def test_props_set_by_server(self): CREATED = datetime.datetime(2015, 8, 11, 12, 13, 22, tzinfo=UTC) STARTED = datetime.datetime(2015, 8, 11, 13, 47, 15, tzinfo=UTC) ENDED = datetime.datetime(2015, 8, 11, 14, 47, 15, tzinfo=UTC) - FULL_JOB_ID = '%s:%s' % (self.PROJECT, self.JOB_ID) - URL = 'http://example.com/projects/%s/jobs/%s' % ( - self.PROJECT, self.JOB_ID) - EMAIL = 'phred@example.com' - ERROR_RESULT = {'debugInfo': 'DEBUG', - 'location': 'LOCATION', - 'message': 'MESSAGE', - 'reason': 'REASON'} + FULL_JOB_ID = "%s:%s" % (self.PROJECT, self.JOB_ID) + URL = "http://example.com/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) + EMAIL = "phred@example.com" + ERROR_RESULT = { + "debugInfo": "DEBUG", + "location": "LOCATION", + "message": "MESSAGE", + "reason": "REASON", + } client = _make_client(project=self.PROJECT) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) - job._properties['etag'] = 'ETAG' - job._properties['id'] = FULL_JOB_ID - job._properties['selfLink'] = URL - job._properties['user_email'] = EMAIL - - statistics = job._properties['statistics'] = {} - statistics['creationTime'] = _millis(CREATED) - statistics['startTime'] = _millis(STARTED) - statistics['endTime'] = _millis(ENDED) - - self.assertEqual(job.etag, 'ETAG') + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) + job._properties["etag"] = "ETAG" + job._properties["id"] = FULL_JOB_ID + job._properties["selfLink"] = URL + job._properties["user_email"] = EMAIL + + statistics = job._properties["statistics"] = {} + statistics["creationTime"] = _millis(CREATED) + statistics["startTime"] = _millis(STARTED) + statistics["endTime"] = _millis(ENDED) + + self.assertEqual(job.etag, "ETAG") self.assertEqual(job.self_link, URL) self.assertEqual(job.user_email, EMAIL) @@ -2004,30 +1875,30 @@ def test_props_set_by_server(self): # running jobs have no load stats not yet set. self.assertIsNone(job.output_bytes) - load_stats = statistics['load'] = {} - load_stats['inputFileBytes'] = 12345 - load_stats['inputFiles'] = 1 - load_stats['outputBytes'] = 23456 - load_stats['outputRows'] = 345 + load_stats = statistics["load"] = {} + load_stats["inputFileBytes"] = 12345 + load_stats["inputFiles"] = 1 + load_stats["outputBytes"] = 23456 + load_stats["outputRows"] = 345 self.assertEqual(job.input_file_bytes, 12345) self.assertEqual(job.input_files, 1) self.assertEqual(job.output_bytes, 23456) self.assertEqual(job.output_rows, 345) - status = job._properties['status'] = {} + status = job._properties["status"] = {} self.assertIsNone(job.error_result) self.assertIsNone(job.errors) self.assertIsNone(job.state) - status['errorResult'] = ERROR_RESULT - status['errors'] = [ERROR_RESULT] - status['state'] = 'STATE' + status["errorResult"] = ERROR_RESULT + status["errors"] = [ERROR_RESULT] + status["state"] = "STATE" self.assertEqual(job.error_result, ERROR_RESULT) self.assertEqual(job.errors, [ERROR_RESULT]) - self.assertEqual(job.state, 'STATE') + self.assertEqual(job.state, "STATE") def test_from_api_repr_missing_identity(self): self._setUpConstants() @@ -2041,11 +1912,8 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.JOB_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - } + "id": "%s:%s" % (self.PROJECT, self.JOB_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, } klass = self._get_target_class() with self.assertRaises(KeyError): @@ -2055,18 +1923,15 @@ def test_from_api_repr_bare(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.FULL_JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'load': { - 'sourceUris': [self.SOURCE1], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "id": self.FULL_JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "load": { + "sourceUris": [self.SOURCE1], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, } }, @@ -2080,22 +1945,19 @@ def test_from_api_with_encryption(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.FULL_JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'load': { - 'sourceUris': [self.SOURCE1], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "id": self.FULL_JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "load": { + "sourceUris": [self.SOURCE1], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, + }, + "destinationEncryptionConfiguration": { + "kmsKeyName": self.KMS_KEY_NAME }, - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME - } } }, } @@ -2109,8 +1971,8 @@ def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) RESOURCE = self._make_resource() - load_config = RESOURCE['configuration']['load'] - load_config['createDisposition'] = CreateDisposition.CREATE_IF_NEEDED + load_config = RESOURCE["configuration"]["load"] + load_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) self.assertIs(job._client, client) @@ -2119,9 +1981,8 @@ def test_from_api_repr_w_properties(self): def test_begin_w_already_running(self): conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client) - job._properties['status'] = {'state': 'RUNNING'} + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) + job._properties["status"] = {"state": "RUNNING"} with self.assertRaises(ValueError): job._begin() @@ -2129,78 +1990,70 @@ def test_begin_w_already_running(self): def test_begin_w_bound_client(self): RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) job._begin() conn.api_request.assert_called_once_with( - method='POST', - path='/projects/{}/jobs'.format(self.PROJECT), + method="POST", + path="/projects/{}/jobs".format(self.PROJECT), data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'load': { - 'sourceUris': [self.SOURCE1], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "load": { + "sourceUris": [self.SOURCE1], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - }, + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_autodetect(self): from google.cloud.bigquery.job import LoadJobConfig - path = '/projects/{}/jobs'.format(self.PROJECT) + path = "/projects/{}/jobs".format(self.PROJECT) resource = self._make_resource() - resource['configuration']['load']['autodetect'] = True + resource["configuration"]["load"]["autodetect"] = True # Ensure None for missing server-set props - del resource['statistics']['creationTime'] - del resource['etag'] - del resource['selfLink'] - del resource['user_email'] + del resource["statistics"]["creationTime"] + del resource["etag"] + del resource["selfLink"] + del resource["user_email"] conn = _make_connection(resource) client = _make_client(project=self.PROJECT, connection=conn) config = LoadJobConfig() config.autodetect = True - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client, config) + job = self._make_one( + self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client, config + ) job._begin() sent = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'load': { - 'sourceUris': [self.SOURCE1], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "load": { + "sourceUris": [self.SOURCE1], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'autodetect': True - }, + "autodetect": True, + } }, } - conn.api_request.assert_called_once_with( - method='POST', - path=path, - data=sent) + conn.api_request.assert_called_once_with(method="POST", path=path, data=sent) self._verifyResourceProperties(job, resource) def test_begin_w_alternate_client(self): @@ -2210,283 +2063,262 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import WriteDisposition from google.cloud.bigquery.schema import SchemaField - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource(ended=True) LOAD_CONFIGURATION = { - 'sourceUris': [self.SOURCE1], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_ID, + "sourceUris": [self.SOURCE1], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_ID, }, - 'allowJaggedRows': True, - 'allowQuotedNewlines': True, - 'createDisposition': CreateDisposition.CREATE_NEVER, - 'encoding': 'ISO-8559-1', - 'fieldDelimiter': '|', - 'ignoreUnknownValues': True, - 'maxBadRecords': 100, - 'nullMarker': r'\N', - 'quote': "'", - 'skipLeadingRows': '1', - 'sourceFormat': 'CSV', - 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, - 'schema': {'fields': [ - { - 'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None, - }, - { - 'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None, - }, - ]}, - 'schemaUpdateOptions': [ - SchemaUpdateOption.ALLOW_FIELD_ADDITION, - ], + "allowJaggedRows": True, + "allowQuotedNewlines": True, + "createDisposition": CreateDisposition.CREATE_NEVER, + "encoding": "ISO-8559-1", + "fieldDelimiter": "|", + "ignoreUnknownValues": True, + "maxBadRecords": 100, + "nullMarker": r"\N", + "quote": "'", + "skipLeadingRows": "1", + "sourceFormat": "CSV", + "writeDisposition": WriteDisposition.WRITE_TRUNCATE, + "schema": { + "fields": [ + { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + }, + { + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, + }, + ] + }, + "schemaUpdateOptions": [SchemaUpdateOption.ALLOW_FIELD_ADDITION], } - RESOURCE['configuration']['load'] = LOAD_CONFIGURATION + RESOURCE["configuration"]["load"] = LOAD_CONFIGURATION conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESOURCE) client2 = _make_client(project=self.PROJECT, connection=conn2) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") config = LoadJobConfig() config.schema = [full_name, age] - job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, - client1, config) + job = self._make_one( + self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1, config + ) config.allow_jagged_rows = True config.allow_quoted_newlines = True config.create_disposition = CreateDisposition.CREATE_NEVER - config.encoding = 'ISO-8559-1' - config.field_delimiter = '|' + config.encoding = "ISO-8559-1" + config.field_delimiter = "|" config.ignore_unknown_values = True config.max_bad_records = 100 - config.null_marker = r'\N' + config.null_marker = r"\N" config.quote_character = "'" config.skip_leading_rows = 1 - config.source_format = 'CSV' + config.source_format = "CSV" config.write_disposition = WriteDisposition.WRITE_TRUNCATE - config.schema_update_options = [ - SchemaUpdateOption.ALLOW_FIELD_ADDITION, - ] + config.schema_update_options = [SchemaUpdateOption.ALLOW_FIELD_ADDITION] job._begin(client=client2) conn1.api_request.assert_not_called() self.assertEqual(len(conn2.api_request.call_args_list), 1) req = conn2.api_request.call_args_list[0] - self.assertEqual(req[1]['method'], 'POST') - self.assertEqual(req[1]['path'], PATH) + self.assertEqual(req[1]["method"], "POST") + self.assertEqual(req[1]["path"], PATH) SENT = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'load': LOAD_CONFIGURATION, - }, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": {"load": LOAD_CONFIGURATION}, } self.maxDiff = None - self.assertEqual(req[1]['data'], SENT) + self.assertEqual(req[1]["data"], SENT) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_job_reference(self): from google.cloud.bigquery import job resource = self._make_resource() - resource['jobReference']['projectId'] = 'alternative-project' - resource['jobReference']['location'] = 'US' - job_ref = job._JobReference(self.JOB_ID, 'alternative-project', 'US') + resource["jobReference"]["projectId"] = "alternative-project" + resource["jobReference"]["location"] = "US" + job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") conn = _make_connection(resource) client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one( - job_ref, [self.SOURCE1], self.TABLE_REF, client) + load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) load_job._begin() conn.api_request.assert_called_once() _, request = conn.api_request.call_args - self.assertEqual(request['method'], 'POST') - self.assertEqual( - request['path'], '/projects/alternative-project/jobs') + self.assertEqual(request["method"], "POST") + self.assertEqual(request["path"], "/projects/alternative-project/jobs") self.assertEqual( - request['data']['jobReference']['projectId'], - 'alternative-project') - self.assertEqual(request['data']['jobReference']['location'], 'US') - self.assertEqual(request['data']['jobReference']['jobId'], self.JOB_ID) + request["data"]["jobReference"]["projectId"], "alternative-project" + ) + self.assertEqual(request["data"]["jobReference"]["location"], "US") + self.assertEqual(request["data"]["jobReference"]["jobId"], self.JOB_ID) def test_exists_miss_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) self.assertFalse(job.exists()) conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_exists_hit_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection({}) client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) self.assertTrue(job.exists(client=client2)) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_exists_miss_w_job_reference(self): from google.cloud.bigquery import job - job_ref = job._JobReference('my-job-id', 'other-project', 'US') + job_ref = job._JobReference("my-job-id", "other-project", "US") conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one( - job_ref, [self.SOURCE1], self.TABLE_REF, client) + load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) self.assertFalse(load_job.exists()) conn.api_request.assert_called_once_with( - method='GET', - path='/projects/other-project/jobs/my-job-id', - query_params={'fields': 'id', 'location': 'US'}) + method="GET", + path="/projects/other-project/jobs/my-job-id", + query_params={"fields": "id", "location": "US"}, + ) def test_reload_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) job.reload() conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_reload_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESOURCE) client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) job.reload(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_reload_w_job_reference(self): from google.cloud.bigquery import job resource = self._make_resource(ended=True) - resource['jobReference']['projectId'] = 'alternative-project' - resource['jobReference']['location'] = 'US' - job_ref = job._JobReference(self.JOB_ID, 'alternative-project', 'US') + resource["jobReference"]["projectId"] = "alternative-project" + resource["jobReference"]["location"] = "US" + job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") conn = _make_connection(resource) client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one( - job_ref, [self.SOURCE1], self.TABLE_REF, client) + load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) load_job.reload() conn.api_request.assert_called_once_with( - method='GET', - path='/projects/alternative-project/jobs/{}'.format( - self.JOB_ID), - query_params={'location': 'US'}) + method="GET", + path="/projects/alternative-project/jobs/{}".format(self.JOB_ID), + query_params={"location": "US"}, + ) def test_cancel_w_bound_client(self): - PATH = '/projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s/cancel" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource(ended=True) - RESPONSE = {'job': RESOURCE} + RESPONSE = {"job": RESOURCE} conn = _make_connection(RESPONSE) client = _make_client(project=self.PROJECT, connection=conn) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client) job.cancel() conn.api_request.assert_called_once_with( - method='POST', - path=PATH, - query_params={}) + method="POST", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_cancel_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s/cancel" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource(ended=True) - RESPONSE = {'job': RESOURCE} + RESPONSE = {"job": RESOURCE} conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESPONSE) client2 = _make_client(project=self.PROJECT, connection=conn2) - job = self._make_one( - self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) + job = self._make_one(self.JOB_ID, [self.SOURCE1], self.TABLE_REF, client1) job.cancel(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='POST', - path=PATH, - query_params={}) + method="POST", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_cancel_w_job_reference(self): from google.cloud.bigquery import job resource = self._make_resource(ended=True) - resource['jobReference']['projectId'] = 'alternative-project' - resource['jobReference']['location'] = 'US' - job_ref = job._JobReference(self.JOB_ID, 'alternative-project', 'US') - conn = _make_connection({'job': resource}) + resource["jobReference"]["projectId"] = "alternative-project" + resource["jobReference"]["location"] = "US" + job_ref = job._JobReference(self.JOB_ID, "alternative-project", "US") + conn = _make_connection({"job": resource}) client = _make_client(project=self.PROJECT, connection=conn) - load_job = self._make_one( - job_ref, [self.SOURCE1], self.TABLE_REF, client) + load_job = self._make_one(job_ref, [self.SOURCE1], self.TABLE_REF, client) load_job.cancel() conn.api_request.assert_called_once_with( - method='POST', - path='/projects/alternative-project/jobs/{}/cancel'.format( - self.JOB_ID), - query_params={'location': 'US'}) + method="POST", + path="/projects/alternative-project/jobs/{}/cancel".format(self.JOB_ID), + query_params={"location": "US"}, + ) class TestCopyJobConfig(unittest.TestCase, _Base): - JOB_TYPE = 'copy' + JOB_TYPE = "copy" @staticmethod def _get_target_class(): from google.cloud.bigquery.job import CopyJobConfig + return CopyJobConfig def test_ctor_w_properties(self): @@ -2496,8 +2328,7 @@ def test_ctor_w_properties(self): create_disposition = CreateDisposition.CREATE_NEVER write_disposition = WriteDisposition.WRITE_TRUNCATE config = self._get_target_class()( - create_disposition=create_disposition, - write_disposition=write_disposition + create_disposition=create_disposition, write_disposition=write_disposition ) self.assertEqual(config.create_disposition, create_disposition) @@ -2508,35 +2339,33 @@ def test_to_api_repr_with_encryption(self): config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME) + kms_key_name=self.KMS_KEY_NAME + ) resource = config.to_api_repr() self.assertEqual( resource, { - 'copy': { - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME, - }, - }, - }) + "copy": { + "destinationEncryptionConfiguration": { + "kmsKeyName": self.KMS_KEY_NAME + } + } + }, + ) def test_to_api_repr_with_encryption_none(self): config = self._make_one() config.destination_encryption_configuration = None resource = config.to_api_repr() self.assertEqual( - resource, - { - 'copy': { - 'destinationEncryptionConfiguration': None, - }, - }) + resource, {"copy": {"destinationEncryptionConfiguration": None}} + ) class TestCopyJob(unittest.TestCase, _Base): - JOB_TYPE = 'copy' - SOURCE_TABLE = 'source_table' - DESTINATION_TABLE = 'destination_table' + JOB_TYPE = "copy" + SOURCE_TABLE = "source_table" + DESTINATION_TABLE = "destination_table" @staticmethod def _get_target_class(): @@ -2545,18 +2374,19 @@ def _get_target_class(): return CopyJob def _make_resource(self, started=False, ended=False): - resource = super(TestCopyJob, self)._make_resource( - started, ended) - config = resource['configuration']['copy'] - config['sourceTables'] = [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, - }] - config['destinationTable'] = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + resource = super(TestCopyJob, self)._make_resource(started, ended) + config = resource["configuration"]["copy"] + config["sourceTables"] = [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, + } + ] + config["destinationTable"] = { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, } return resource @@ -2564,39 +2394,38 @@ def _make_resource(self, started=False, ended=False): def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) - config = resource.get('configuration', {}).get('copy') + config = resource.get("configuration", {}).get("copy") - table_ref = config['destinationTable'] - self.assertEqual(job.destination.project, table_ref['projectId']) - self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) - self.assertEqual(job.destination.table_id, table_ref['tableId']) + table_ref = config["destinationTable"] + self.assertEqual(job.destination.project, table_ref["projectId"]) + self.assertEqual(job.destination.dataset_id, table_ref["datasetId"]) + self.assertEqual(job.destination.table_id, table_ref["tableId"]) - sources = config.get('sourceTables') + sources = config.get("sourceTables") if sources is None: - sources = [config['sourceTable']] + sources = [config["sourceTable"]] self.assertEqual(len(sources), len(job.sources)) for table_ref, table in zip(sources, job.sources): - self.assertEqual(table.project, table_ref['projectId']) - self.assertEqual(table.dataset_id, table_ref['datasetId']) - self.assertEqual(table.table_id, table_ref['tableId']) + self.assertEqual(table.project, table_ref["projectId"]) + self.assertEqual(table.dataset_id, table_ref["datasetId"]) + self.assertEqual(table.table_id, table_ref["tableId"]) - if 'createDisposition' in config: - self.assertEqual(job.create_disposition, - config['createDisposition']) + if "createDisposition" in config: + self.assertEqual(job.create_disposition, config["createDisposition"]) else: self.assertIsNone(job.create_disposition) - if 'writeDisposition' in config: - self.assertEqual(job.write_disposition, - config['writeDisposition']) + if "writeDisposition" in config: + self.assertEqual(job.write_disposition, config["writeDisposition"]) else: self.assertIsNone(job.write_disposition) - if 'destinationEncryptionConfiguration' in config: + if "destinationEncryptionConfiguration" in config: self.assertIsNotNone(job.destination_encryption_configuration) self.assertEqual( job.destination_encryption_configuration.kms_key_name, - config['destinationEncryptionConfiguration']['kmsKeyName']) + config["destinationEncryptionConfiguration"]["kmsKeyName"], + ) else: self.assertIsNone(job.destination_encryption_configuration) @@ -2609,9 +2438,7 @@ def test_ctor(self): self.assertEqual(job.sources, [source]) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual( - job.path, - '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID)) + self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) self._verifyInitialReadonlyProperties(job) @@ -2632,11 +2459,8 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - } + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, } klass = self._get_target_class() with self.assertRaises(KeyError): @@ -2646,22 +2470,21 @@ def test_from_api_repr_bare(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': { - 'sourceTables': [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, - }], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, }, } }, @@ -2675,26 +2498,25 @@ def test_from_api_with_encryption(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': { - 'sourceTables': [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, - }], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, + }, + "destinationEncryptionConfiguration": { + "kmsKeyName": self.KMS_KEY_NAME }, - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME - } } }, } @@ -2707,22 +2529,19 @@ def test_from_api_repr_w_sourcetable(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "copy": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, }, - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, }, } }, @@ -2736,18 +2555,15 @@ def test_from_api_repr_wo_sources(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': { - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, - }, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "copy": { + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, + } } }, } @@ -2757,23 +2573,24 @@ def test_from_api_repr_wo_sources(self): def test_from_api_repr_w_properties(self): from google.cloud.bigquery.job import CreateDisposition + client = _make_client(project=self.PROJECT) RESOURCE = self._make_resource() - copy_config = RESOURCE['configuration']['copy'] - copy_config['createDisposition'] = CreateDisposition.CREATE_IF_NEEDED + copy_config = RESOURCE["configuration"]["copy"] + copy_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) self.assertIs(job._client, client) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_bound_client(self): - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source = self._table_ref(self.SOURCE_TABLE) @@ -2783,28 +2600,28 @@ def test_begin_w_bound_client(self): job._begin() conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': { - 'sourceTables': [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE - }], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, }, - }, + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_alternate_client(self): @@ -2812,23 +2629,26 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import CreateDisposition from google.cloud.bigquery.job import WriteDisposition - PATH = '/projects/%s/jobs' % (self.PROJECT,) + + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource(ended=True) COPY_CONFIGURATION = { - 'sourceTables': [{ - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, - }], - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, }, - 'createDisposition': CreateDisposition.CREATE_NEVER, - 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, + "createDisposition": CreateDisposition.CREATE_NEVER, + "writeDisposition": WriteDisposition.WRITE_TRUNCATE, } - RESOURCE['configuration']['copy'] = COPY_CONFIGURATION + RESOURCE["configuration"]["copy"] = COPY_CONFIGURATION conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESOURCE) @@ -2838,27 +2658,22 @@ def test_begin_w_alternate_client(self): config = CopyJobConfig() config.create_disposition = CreateDisposition.CREATE_NEVER config.write_disposition = WriteDisposition.WRITE_TRUNCATE - job = self._make_one(self.JOB_ID, [source], destination, client1, - config) + job = self._make_one(self.JOB_ID, [source], destination, client1, config) job._begin(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'copy': COPY_CONFIGURATION, - }, - }) + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": {"copy": COPY_CONFIGURATION}, + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_exists_miss_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) @@ -2869,12 +2684,11 @@ def test_exists_miss_w_bound_client(self): self.assertFalse(job.exists()) conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_exists_hit_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection({}) @@ -2887,12 +2701,11 @@ def test_exists_hit_w_alternate_client(self): conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_reload_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) @@ -2903,13 +2716,12 @@ def test_reload_w_bound_client(self): job.reload() conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_reload_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) @@ -2923,72 +2735,73 @@ def test_reload_w_alternate_client(self): conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) class TestExtractJobConfig(unittest.TestCase, _Base): - JOB_TYPE = 'extract' + JOB_TYPE = "extract" @staticmethod def _get_target_class(): from google.cloud.bigquery.job import ExtractJobConfig + return ExtractJobConfig def test_ctor_w_properties(self): - config = self._get_target_class()( - field_delimiter='\t', print_header=True) + config = self._get_target_class()(field_delimiter="\t", print_header=True) - self.assertEqual(config.field_delimiter, '\t') + self.assertEqual(config.field_delimiter, "\t") self.assertTrue(config.print_header) def test_to_api_repr(self): from google.cloud.bigquery import job + config = self._make_one() config.compression = job.Compression.SNAPPY config.destination_format = job.DestinationFormat.AVRO - config.field_delimiter = 'ignored for avro' + config.field_delimiter = "ignored for avro" config.print_header = False - config._properties['extract']['someNewField'] = 'some-value' + config._properties["extract"]["someNewField"] = "some-value" resource = config.to_api_repr() self.assertEqual( resource, { - 'extract': { - 'compression': 'SNAPPY', - 'destinationFormat': 'AVRO', - 'fieldDelimiter': 'ignored for avro', - 'printHeader': False, - 'someNewField': 'some-value', - }, - }) + "extract": { + "compression": "SNAPPY", + "destinationFormat": "AVRO", + "fieldDelimiter": "ignored for avro", + "printHeader": False, + "someNewField": "some-value", + } + }, + ) def test_from_api_repr(self): cls = self._get_target_class() config = cls.from_api_repr( { - 'extract': { - 'compression': 'NONE', - 'destinationFormat': 'CSV', - 'fieldDelimiter': '\t', - 'printHeader': True, - 'someNewField': 'some-value', - }, - }) - self.assertEqual(config.compression, 'NONE') - self.assertEqual(config.destination_format, 'CSV') - self.assertEqual(config.field_delimiter, '\t') + "extract": { + "compression": "NONE", + "destinationFormat": "CSV", + "fieldDelimiter": "\t", + "printHeader": True, + "someNewField": "some-value", + } + } + ) + self.assertEqual(config.compression, "NONE") + self.assertEqual(config.destination_format, "CSV") + self.assertEqual(config.field_delimiter, "\t") self.assertEqual(config.print_header, True) - self.assertEqual( - config._properties['extract']['someNewField'], 'some-value') + self.assertEqual(config._properties["extract"]["someNewField"], "some-value") class TestExtractJob(unittest.TestCase, _Base): - JOB_TYPE = 'extract' - SOURCE_TABLE = 'source_table' - DESTINATION_URI = 'gs://bucket_name/object_name' + JOB_TYPE = "extract" + SOURCE_TABLE = "source_table" + DESTINATION_URI = "gs://bucket_name/object_name" @staticmethod def _get_target_class(): @@ -2997,50 +2810,45 @@ def _get_target_class(): return ExtractJob def _make_resource(self, started=False, ended=False): - resource = super(TestExtractJob, self)._make_resource( - started, ended) - config = resource['configuration']['extract'] - config['sourceTable'] = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, + resource = super(TestExtractJob, self)._make_resource(started, ended) + config = resource["configuration"]["extract"] + config["sourceTable"] = { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, } - config['destinationUris'] = [self.DESTINATION_URI] + config["destinationUris"] = [self.DESTINATION_URI] return resource def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) - config = resource.get('configuration', {}).get('extract') + config = resource.get("configuration", {}).get("extract") - self.assertEqual(job.destination_uris, config['destinationUris']) + self.assertEqual(job.destination_uris, config["destinationUris"]) - table_ref = config['sourceTable'] - self.assertEqual(job.source.project, table_ref['projectId']) - self.assertEqual(job.source.dataset_id, table_ref['datasetId']) - self.assertEqual(job.source.table_id, table_ref['tableId']) + table_ref = config["sourceTable"] + self.assertEqual(job.source.project, table_ref["projectId"]) + self.assertEqual(job.source.dataset_id, table_ref["datasetId"]) + self.assertEqual(job.source.table_id, table_ref["tableId"]) - if 'compression' in config: - self.assertEqual( - job.compression, config['compression']) + if "compression" in config: + self.assertEqual(job.compression, config["compression"]) else: self.assertIsNone(job.compression) - if 'destinationFormat' in config: - self.assertEqual( - job.destination_format, config['destinationFormat']) + if "destinationFormat" in config: + self.assertEqual(job.destination_format, config["destinationFormat"]) else: self.assertIsNone(job.destination_format) - if 'fieldDelimiter' in config: - self.assertEqual( - job.field_delimiter, config['fieldDelimiter']) + if "fieldDelimiter" in config: + self.assertEqual(job.field_delimiter, config["fieldDelimiter"]) else: self.assertIsNone(job.field_delimiter) - if 'printHeader' in config: - self.assertEqual( - job.print_header, config['printHeader']) + if "printHeader" in config: + self.assertEqual(job.print_header, config["printHeader"]) else: self.assertIsNone(job.print_header) @@ -3049,17 +2857,14 @@ def test_ctor(self): client = _make_client(project=self.PROJECT) source = Table(self.TABLE_REF) - job = self._make_one( - self.JOB_ID, source, [self.DESTINATION_URI], client) + job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) self.assertEqual(job.source.project, self.PROJECT) self.assertEqual(job.source.dataset_id, self.DS_ID) self.assertEqual(job.source.table_id, self.TABLE_ID) self.assertEqual(job.destination_uris, [self.DESTINATION_URI]) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual( - job.path, - '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID)) + self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) self._verifyInitialReadonlyProperties(job) @@ -3073,16 +2878,17 @@ def test_destination_uri_file_counts(self): file_counts = 23 client = _make_client(project=self.PROJECT) job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client) + self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client + ) self.assertIsNone(job.destination_uri_file_counts) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.destination_uri_file_counts) - extract_stats = statistics['extract'] = {} + extract_stats = statistics["extract"] = {} self.assertIsNone(job.destination_uri_file_counts) - extract_stats['destinationUriFileCounts'] = [str(file_counts)] + extract_stats["destinationUriFileCounts"] = [str(file_counts)] self.assertEqual(job.destination_uri_file_counts, [file_counts]) def test_from_api_repr_missing_identity(self): @@ -3097,11 +2903,8 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - } + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, } klass = self._get_target_class() with self.assertRaises(KeyError): @@ -3111,19 +2914,16 @@ def test_from_api_repr_bare(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, }, - 'destinationUris': [self.DESTINATION_URI], + "destinationUris": [self.DESTINATION_URI], } }, } @@ -3134,10 +2934,11 @@ def test_from_api_repr_bare(self): def test_from_api_repr_w_properties(self): from google.cloud.bigquery.job import Compression + client = _make_client(project=self.PROJECT) RESOURCE = self._make_resource() - extract_config = RESOURCE['configuration']['extract'] - extract_config['compression'] = Compression.GZIP + extract_config = RESOURCE["configuration"]["extract"] + extract_config["compression"] = Compression.GZIP klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) self.assertIs(job._client, client) @@ -3146,41 +2947,38 @@ def test_from_api_repr_w_properties(self): def test_begin_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], - client) + job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) job._begin() conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'extract': { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "extract": { + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, }, - 'destinationUris': [self.DESTINATION_URI], - }, + "destinationUris": [self.DESTINATION_URI], + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_alternate_client(self): @@ -3189,21 +2987,21 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import DestinationFormat from google.cloud.bigquery.job import ExtractJobConfig - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource(ended=True) EXTRACT_CONFIGURATION = { - 'sourceTable': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.SOURCE_TABLE, + "sourceTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.SOURCE_TABLE, }, - 'destinationUris': [self.DESTINATION_URI], - 'compression': Compression.GZIP, - 'destinationFormat': DestinationFormat.NEWLINE_DELIMITED_JSON, - 'fieldDelimiter': '|', - 'printHeader': False, + "destinationUris": [self.DESTINATION_URI], + "compression": Compression.GZIP, + "destinationFormat": DestinationFormat.NEWLINE_DELIMITED_JSON, + "fieldDelimiter": "|", + "printHeader": False, } - RESOURCE['configuration']['extract'] = EXTRACT_CONFIGURATION + RESOURCE["configuration"]["extract"] = EXTRACT_CONFIGURATION conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESOURCE) @@ -3213,81 +3011,78 @@ def test_begin_w_alternate_client(self): config = ExtractJobConfig() config.compression = Compression.GZIP config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON - config.field_delimiter = '|' + config.field_delimiter = "|" config.print_header = False - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], - client1, config) + job = self._make_one( + self.JOB_ID, source, [self.DESTINATION_URI], client1, config + ) job._begin(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'extract': EXTRACT_CONFIGURATION, - }, - }) + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": {"extract": EXTRACT_CONFIGURATION}, + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_exists_miss_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client) + self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client + ) self.assertFalse(job.exists()) conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_exists_hit_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection({}) client2 = _make_client(project=self.PROJECT, connection=conn2) job = self._make_one( - self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client1) + self.JOB_ID, self.TABLE_REF, [self.DESTINATION_URI], client1 + ) self.assertTrue(job.exists(client=client2)) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_reload_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], - client) + job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client) job.reload() conn.api_request.assert_called_once_with( - method='GET', path=PATH, query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_reload_w_alternate_client(self): from google.cloud.bigquery.dataset import DatasetReference - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) RESOURCE = self._make_resource() conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) @@ -3295,14 +3090,14 @@ def test_reload_w_alternate_client(self): client2 = _make_client(project=self.PROJECT, connection=conn2) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) source = source_dataset.table(self.SOURCE_TABLE) - job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], - client1) + job = self._make_one(self.JOB_ID, source, [self.DESTINATION_URI], client1) job.reload(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', path=PATH, query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) @@ -3318,7 +3113,7 @@ def _make_one(self, *args, **kw): def test_ctor(self): config = self._make_one() - self.assertEqual(config._properties, {'query': {}}) + self.assertEqual(config._properties, {"query": {}}) def test_ctor_w_none(self): config = self._make_one() @@ -3328,8 +3123,7 @@ def test_ctor_w_none(self): self.assertIsNone(config.destination) def test_ctor_w_properties(self): - config = self._get_target_class()( - use_query_cache=False, use_legacy_sql=True) + config = self._get_target_class()(use_query_cache=False, use_legacy_sql=True) self.assertFalse(config.use_query_cache) self.assertTrue(config.use_legacy_sql) @@ -3338,22 +3132,22 @@ def test_time_partitioning(self): from google.cloud.bigquery import table time_partitioning = table.TimePartitioning( - type_=table.TimePartitioningType.DAY, field='name') + type_=table.TimePartitioningType.DAY, field="name" + ) config = self._make_one() config.time_partitioning = time_partitioning # TimePartitioning should be configurable after assigning time_partitioning.expiration_ms = 10000 - self.assertEqual( - config.time_partitioning.type_, table.TimePartitioningType.DAY) - self.assertEqual(config.time_partitioning.field, 'name') + self.assertEqual(config.time_partitioning.type_, table.TimePartitioningType.DAY) + self.assertEqual(config.time_partitioning.field, "name") self.assertEqual(config.time_partitioning.expiration_ms, 10000) config.time_partitioning = None self.assertIsNone(config.time_partitioning) def test_clustering_fields(self): - fields = ['email', 'postal_code'] + fields = ["email", "postal_code"] config = self._get_target_class()() config.clustering_fields = fields self.assertEqual(config.clustering_fields, fields) @@ -3374,16 +3168,16 @@ def test_from_api_repr_normal(self): from google.cloud.bigquery.dataset import DatasetReference resource = { - 'query': { - 'useLegacySql': True, - 'query': 'no property for me', - 'defaultDataset': { - 'projectId': 'someproject', - 'datasetId': 'somedataset', + "query": { + "useLegacySql": True, + "query": "no property for me", + "defaultDataset": { + "projectId": "someproject", + "datasetId": "somedataset", }, - 'someNewProperty': 'I should be saved, too.', + "someNewProperty": "I should be saved, too.", }, - 'dryRun': True, + "dryRun": True, } klass = self._get_target_class() @@ -3391,85 +3185,81 @@ def test_from_api_repr_normal(self): self.assertTrue(config.use_legacy_sql) self.assertEqual( - config.default_dataset, - DatasetReference('someproject', 'somedataset')) + config.default_dataset, DatasetReference("someproject", "somedataset") + ) self.assertTrue(config.dry_run) # Make sure unknown properties propagate. + self.assertEqual(config._properties["query"]["query"], "no property for me") self.assertEqual( - config._properties['query']['query'], 'no property for me') - self.assertEqual( - config._properties['query']['someNewProperty'], - 'I should be saved, too.') + config._properties["query"]["someNewProperty"], "I should be saved, too." + ) def test_to_api_repr_normal(self): from google.cloud.bigquery.dataset import DatasetReference config = self._make_one() config.use_legacy_sql = True - config.default_dataset = DatasetReference( - 'someproject', 'somedataset') + config.default_dataset = DatasetReference("someproject", "somedataset") config.dry_run = False - config._properties['someNewProperty'] = 'Woohoo, alpha stuff.' + config._properties["someNewProperty"] = "Woohoo, alpha stuff." resource = config.to_api_repr() - self.assertFalse(resource['dryRun']) - self.assertTrue(resource['query']['useLegacySql']) + self.assertFalse(resource["dryRun"]) + self.assertTrue(resource["query"]["useLegacySql"]) self.assertEqual( - resource['query']['defaultDataset']['projectId'], 'someproject') + resource["query"]["defaultDataset"]["projectId"], "someproject" + ) self.assertEqual( - resource['query']['defaultDataset']['datasetId'], 'somedataset') + resource["query"]["defaultDataset"]["datasetId"], "somedataset" + ) # Make sure unknown properties propagate. - self.assertEqual( - resource['someNewProperty'], 'Woohoo, alpha stuff.') + self.assertEqual(resource["someNewProperty"], "Woohoo, alpha stuff.") def test_to_api_repr_with_encryption(self): from google.cloud.bigquery.table import EncryptionConfiguration config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME) + kms_key_name=self.KMS_KEY_NAME + ) resource = config.to_api_repr() self.assertEqual( - resource, { - 'query': { - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME, - }, - }, - }) + resource, + { + "query": { + "destinationEncryptionConfiguration": { + "kmsKeyName": self.KMS_KEY_NAME + } + } + }, + ) def test_to_api_repr_with_encryption_none(self): config = self._make_one() config.destination_encryption_configuration = None resource = config.to_api_repr() self.assertEqual( - resource, - { - 'query': { - 'destinationEncryptionConfiguration': None, - }, - }) + resource, {"query": {"destinationEncryptionConfiguration": None}} + ) def test_from_api_repr_with_encryption(self): resource = { - 'query': { - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME, - }, - }, + "query": { + "destinationEncryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME} + } } klass = self._get_target_class() config = klass.from_api_repr(resource) self.assertEqual( - config.destination_encryption_configuration.kms_key_name, - self.KMS_KEY_NAME) + config.destination_encryption_configuration.kms_key_name, self.KMS_KEY_NAME + ) class TestQueryJob(unittest.TestCase, _Base): - JOB_TYPE = 'query' - QUERY = 'select count(*) from persons' - DESTINATION_TABLE = 'destination_table' + JOB_TYPE = "query" + QUERY = "select count(*) from persons" + DESTINATION_TABLE = "destination_table" @staticmethod def _get_target_class(): @@ -3478,71 +3268,66 @@ def _get_target_class(): return QueryJob def _make_resource(self, started=False, ended=False): - resource = super(TestQueryJob, self)._make_resource( - started, ended) - config = resource['configuration']['query'] - config['query'] = self.QUERY + resource = super(TestQueryJob, self)._make_resource(started, ended) + config = resource["configuration"]["query"] + config["query"] = self.QUERY if ended: - resource['status'] = {'state': 'DONE'} + resource["status"] = {"state": "DONE"} return resource def _verifyBooleanResourceProperties(self, job, config): - if 'allowLargeResults' in config: - self.assertEqual(job.allow_large_results, - config['allowLargeResults']) + if "allowLargeResults" in config: + self.assertEqual(job.allow_large_results, config["allowLargeResults"]) else: self.assertIsNone(job.allow_large_results) - if 'flattenResults' in config: - self.assertEqual(job.flatten_results, - config['flattenResults']) + if "flattenResults" in config: + self.assertEqual(job.flatten_results, config["flattenResults"]) else: self.assertIsNone(job.flatten_results) - if 'useQueryCache' in config: - self.assertEqual(job.use_query_cache, - config['useQueryCache']) + if "useQueryCache" in config: + self.assertEqual(job.use_query_cache, config["useQueryCache"]) else: self.assertIsNone(job.use_query_cache) - if 'useLegacySql' in config: - self.assertEqual(job.use_legacy_sql, - config['useLegacySql']) + if "useLegacySql" in config: + self.assertEqual(job.use_legacy_sql, config["useLegacySql"]) else: self.assertIsNone(job.use_legacy_sql) def _verifyIntegerResourceProperties(self, job, config): - if 'maximumBillingTier' in config: - self.assertEqual( - job.maximum_billing_tier, config['maximumBillingTier']) + if "maximumBillingTier" in config: + self.assertEqual(job.maximum_billing_tier, config["maximumBillingTier"]) else: self.assertIsNone(job.maximum_billing_tier) - if 'maximumBytesBilled' in config: + if "maximumBytesBilled" in config: self.assertEqual( - str(job.maximum_bytes_billed), config['maximumBytesBilled']) + str(job.maximum_bytes_billed), config["maximumBytesBilled"] + ) self.assertIsInstance(job.maximum_bytes_billed, int) else: self.assertIsNone(job.maximum_bytes_billed) def _verify_udf_resources(self, job, config): - udf_resources = config.get('userDefinedFunctionResources', ()) + udf_resources = config.get("userDefinedFunctionResources", ()) self.assertEqual(len(job.udf_resources), len(udf_resources)) for found, expected in zip(job.udf_resources, udf_resources): - if 'resourceUri' in expected: - self.assertEqual(found.udf_type, 'resourceUri') - self.assertEqual(found.value, expected['resourceUri']) + if "resourceUri" in expected: + self.assertEqual(found.udf_type, "resourceUri") + self.assertEqual(found.value, expected["resourceUri"]) else: - self.assertEqual(found.udf_type, 'inlineCode') - self.assertEqual(found.value, expected['inlineCode']) + self.assertEqual(found.udf_type, "inlineCode") + self.assertEqual(found.value, expected["inlineCode"]) def _verifyQueryParameters(self, job, config): - query_parameters = config.get('queryParameters', ()) + query_parameters = config.get("queryParameters", ()) self.assertEqual(len(job.query_parameters), len(query_parameters)) for found, expected in zip(job.query_parameters, query_parameters): self.assertEqual(found.to_api_repr(), expected) def _verify_table_definitions(self, job, config): - table_defs = config.get('tableDefinitions') + table_defs = config.get("tableDefinitions") if job.table_definitions is None: self.assertIsNone(table_defs) else: @@ -3553,71 +3338,65 @@ def _verify_table_definitions(self, job, config): self.assertEqual(found_ec.to_api_repr(), expected_ec) def _verify_configuration_properties(self, job, configuration): - if 'dryRun' in configuration: - self.assertEqual(job.dry_run, - configuration['dryRun']) + if "dryRun" in configuration: + self.assertEqual(job.dry_run, configuration["dryRun"]) else: self.assertIsNone(job.dry_run) def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) - configuration = resource.get('configuration', {}) + configuration = resource.get("configuration", {}) self._verify_configuration_properties(job, configuration) - query_config = resource.get('configuration', {}).get('query') + query_config = resource.get("configuration", {}).get("query") self._verifyBooleanResourceProperties(job, query_config) self._verifyIntegerResourceProperties(job, query_config) self._verify_udf_resources(job, query_config) self._verifyQueryParameters(job, query_config) self._verify_table_definitions(job, query_config) - self.assertEqual(job.query, query_config['query']) - if 'createDisposition' in query_config: - self.assertEqual(job.create_disposition, - query_config['createDisposition']) + self.assertEqual(job.query, query_config["query"]) + if "createDisposition" in query_config: + self.assertEqual(job.create_disposition, query_config["createDisposition"]) else: self.assertIsNone(job.create_disposition) - if 'defaultDataset' in query_config: + if "defaultDataset" in query_config: ds_ref = job.default_dataset - ds_ref = { - 'projectId': ds_ref.project, - 'datasetId': ds_ref.dataset_id, - } - self.assertEqual(ds_ref, query_config['defaultDataset']) + ds_ref = {"projectId": ds_ref.project, "datasetId": ds_ref.dataset_id} + self.assertEqual(ds_ref, query_config["defaultDataset"]) else: self.assertIsNone(job.default_dataset) - if 'destinationTable' in query_config: + if "destinationTable" in query_config: table = job.destination tb_ref = { - 'projectId': table.project, - 'datasetId': table.dataset_id, - 'tableId': table.table_id + "projectId": table.project, + "datasetId": table.dataset_id, + "tableId": table.table_id, } - self.assertEqual(tb_ref, query_config['destinationTable']) + self.assertEqual(tb_ref, query_config["destinationTable"]) else: self.assertIsNone(job.destination) - if 'priority' in query_config: - self.assertEqual(job.priority, - query_config['priority']) + if "priority" in query_config: + self.assertEqual(job.priority, query_config["priority"]) else: self.assertIsNone(job.priority) - if 'writeDisposition' in query_config: - self.assertEqual(job.write_disposition, - query_config['writeDisposition']) + if "writeDisposition" in query_config: + self.assertEqual(job.write_disposition, query_config["writeDisposition"]) else: self.assertIsNone(job.write_disposition) - if 'destinationEncryptionConfiguration' in query_config: + if "destinationEncryptionConfiguration" in query_config: self.assertIsNotNone(job.destination_encryption_configuration) self.assertEqual( job.destination_encryption_configuration.kms_key_name, - query_config['destinationEncryptionConfiguration'][ - 'kmsKeyName']) + query_config["destinationEncryptionConfiguration"]["kmsKeyName"], + ) else: self.assertIsNone(job.destination_encryption_configuration) - if 'schemaUpdateOptions' in query_config: + if "schemaUpdateOptions" in query_config: self.assertEqual( - job.schema_update_options, query_config['schemaUpdateOptions']) + job.schema_update_options, query_config["schemaUpdateOptions"] + ) else: self.assertIsNone(job.schema_update_options) @@ -3627,9 +3406,7 @@ def test_ctor_defaults(self): self.assertEqual(job.query, self.QUERY) self.assertIs(job._client, client) self.assertEqual(job.job_type, self.JOB_TYPE) - self.assertEqual( - job.path, - '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID)) + self.assertEqual(job.path, "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID)) self._verifyInitialReadonlyProperties(job) @@ -3657,24 +3434,22 @@ def test_ctor_w_udf_resources(self): from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import UDFResource - RESOURCE_URI = 'gs://some-bucket/js/lib.js' + RESOURCE_URI = "gs://some-bucket/js/lib.js" udf_resources = [UDFResource("resourceUri", RESOURCE_URI)] client = _make_client(project=self.PROJECT) config = QueryJobConfig() config.udf_resources = udf_resources - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) self.assertEqual(job.udf_resources, udf_resources) def test_ctor_w_query_parameters(self): from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import ScalarQueryParameter - query_parameters = [ScalarQueryParameter("foo", 'INT64', 123)] + query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] client = _make_client(project=self.PROJECT) config = QueryJobConfig(query_parameters=query_parameters) - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) self.assertEqual(job.query_parameters, query_parameters) def test_from_api_repr_missing_identity(self): @@ -3689,11 +3464,8 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_ID), - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - } + "id": "%s:%s" % (self.PROJECT, self.DS_ID), + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, } klass = self._get_target_class() with self.assertRaises(KeyError): @@ -3703,14 +3475,9 @@ def test_from_api_repr_bare(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': {'query': self.QUERY}, - }, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": {"query": {"query": self.QUERY}}, } klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) @@ -3721,18 +3488,15 @@ def test_from_api_repr_with_encryption(self): self._setUpConstants() client = _make_client(project=self.PROJECT) RESOURCE = { - 'id': self.JOB_ID, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'destinationEncryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME - } - }, + "id": self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "destinationEncryptionConfiguration": { + "kmsKeyName": self.KMS_KEY_NAME + }, + } }, } klass = self._get_target_class() @@ -3747,17 +3511,15 @@ def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) RESOURCE = self._make_resource() - query_config = RESOURCE['configuration']['query'] - query_config['createDisposition'] = CreateDisposition.CREATE_IF_NEEDED - query_config['writeDisposition'] = WriteDisposition.WRITE_TRUNCATE - query_config['destinationTable'] = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.DESTINATION_TABLE, + query_config = RESOURCE["configuration"]["query"] + query_config["createDisposition"] = CreateDisposition.CREATE_IF_NEEDED + query_config["writeDisposition"] = WriteDisposition.WRITE_TRUNCATE + query_config["destinationTable"] = { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.DESTINATION_TABLE, } - query_config['schemaUpdateOptions'] = [ - SchemaUpdateOption.ALLOW_FIELD_ADDITION, - ] + query_config["schemaUpdateOptions"] = [SchemaUpdateOption.ALLOW_FIELD_ADDITION] klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) self.assertIs(job._client, client) @@ -3766,11 +3528,9 @@ def test_from_api_repr_w_properties(self): def test_cancelled(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) - job._properties['status'] = { - 'state': 'DONE', - 'errorResult': { - 'reason': 'stopped' - } + job._properties["status"] = { + "state": "DONE", + "errorResult": {"reason": "stopped"}, } self.assertTrue(job.cancelled()) @@ -3786,113 +3546,102 @@ def test_query_plan(self): from google.cloud.bigquery.job import QueryPlanEntry from google.cloud.bigquery.job import QueryPlanEntryStep - plan_entries = [{ - 'name': 'NAME', - 'id': '1234', - 'inputStages': ['88', '101'], - 'startMs': '1522540800000', - 'endMs': '1522540804000', - 'parallelInputs': '1000', - 'completedParallelInputs': '5', - 'waitMsAvg': '33', - 'waitMsMax': '400', - 'waitRatioAvg': 2.71828, - 'waitRatioMax': 3.14159, - 'readMsAvg': '45', - 'readMsMax': '90', - 'readRatioAvg': 1.41421, - 'readRatioMax': 1.73205, - 'computeMsAvg': '55', - 'computeMsMax': '99', - 'computeRatioAvg': 0.69315, - 'computeRatioMax': 1.09861, - 'writeMsAvg': '203', - 'writeMsMax': '340', - 'writeRatioAvg': 3.32193, - 'writeRatioMax': 2.30258, - 'recordsRead': '100', - 'recordsWritten': '1', - 'status': 'STATUS', - 'shuffleOutputBytes': '1024', - 'shuffleOutputBytesSpilled': '1', - 'steps': [{ - 'kind': 'KIND', - 'substeps': ['SUBSTEP1', 'SUBSTEP2'], - }], - }] + plan_entries = [ + { + "name": "NAME", + "id": "1234", + "inputStages": ["88", "101"], + "startMs": "1522540800000", + "endMs": "1522540804000", + "parallelInputs": "1000", + "completedParallelInputs": "5", + "waitMsAvg": "33", + "waitMsMax": "400", + "waitRatioAvg": 2.71828, + "waitRatioMax": 3.14159, + "readMsAvg": "45", + "readMsMax": "90", + "readRatioAvg": 1.41421, + "readRatioMax": 1.73205, + "computeMsAvg": "55", + "computeMsMax": "99", + "computeRatioAvg": 0.69315, + "computeRatioMax": 1.09861, + "writeMsAvg": "203", + "writeMsMax": "340", + "writeRatioAvg": 3.32193, + "writeRatioMax": 2.30258, + "recordsRead": "100", + "recordsWritten": "1", + "status": "STATUS", + "shuffleOutputBytes": "1024", + "shuffleOutputBytesSpilled": "1", + "steps": [{"kind": "KIND", "substeps": ["SUBSTEP1", "SUBSTEP2"]}], + } + ] client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertEqual(job.query_plan, []) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertEqual(job.query_plan, []) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertEqual(job.query_plan, []) - query_stats['queryPlan'] = plan_entries + query_stats["queryPlan"] = plan_entries self.assertEqual(len(job.query_plan), len(plan_entries)) for found, expected in zip(job.query_plan, plan_entries): self.assertIsInstance(found, QueryPlanEntry) - self.assertEqual(found.name, expected['name']) - self.assertEqual(found.entry_id, expected['id']) - self.assertEqual( - len(found.input_stages), - len(expected['inputStages'])) + self.assertEqual(found.name, expected["name"]) + self.assertEqual(found.entry_id, expected["id"]) + self.assertEqual(len(found.input_stages), len(expected["inputStages"])) for f_id in found.input_stages: - self.assertIn(f_id, [int(e) for e in expected['inputStages']]) - self.assertEqual( - found.start.strftime(_RFC3339_MICROS), - '2018-04-01T00:00:00.000000Z') + self.assertIn(f_id, [int(e) for e in expected["inputStages"]]) self.assertEqual( - found.end.strftime(_RFC3339_MICROS), - '2018-04-01T00:00:04.000000Z') + found.start.strftime(_RFC3339_MICROS), "2018-04-01T00:00:00.000000Z" + ) self.assertEqual( - found.parallel_inputs, - int(expected['parallelInputs'])) + found.end.strftime(_RFC3339_MICROS), "2018-04-01T00:00:04.000000Z" + ) + self.assertEqual(found.parallel_inputs, int(expected["parallelInputs"])) self.assertEqual( found.completed_parallel_inputs, - int(expected['completedParallelInputs'])) - self.assertEqual(found.wait_ms_avg, int(expected['waitMsAvg'])) - self.assertEqual(found.wait_ms_max, int(expected['waitMsMax'])) - self.assertEqual(found.wait_ratio_avg, expected['waitRatioAvg']) - self.assertEqual(found.wait_ratio_max, expected['waitRatioMax']) - self.assertEqual(found.read_ms_avg, int(expected['readMsAvg'])) - self.assertEqual(found.read_ms_max, int(expected['readMsMax'])) - self.assertEqual(found.read_ratio_avg, expected['readRatioAvg']) - self.assertEqual(found.read_ratio_max, expected['readRatioMax']) - self.assertEqual( - found.compute_ms_avg, - int(expected['computeMsAvg'])) - self.assertEqual( - found.compute_ms_max, - int(expected['computeMsMax'])) - self.assertEqual( - found.compute_ratio_avg, expected['computeRatioAvg']) + int(expected["completedParallelInputs"]), + ) + self.assertEqual(found.wait_ms_avg, int(expected["waitMsAvg"])) + self.assertEqual(found.wait_ms_max, int(expected["waitMsMax"])) + self.assertEqual(found.wait_ratio_avg, expected["waitRatioAvg"]) + self.assertEqual(found.wait_ratio_max, expected["waitRatioMax"]) + self.assertEqual(found.read_ms_avg, int(expected["readMsAvg"])) + self.assertEqual(found.read_ms_max, int(expected["readMsMax"])) + self.assertEqual(found.read_ratio_avg, expected["readRatioAvg"]) + self.assertEqual(found.read_ratio_max, expected["readRatioMax"]) + self.assertEqual(found.compute_ms_avg, int(expected["computeMsAvg"])) + self.assertEqual(found.compute_ms_max, int(expected["computeMsMax"])) + self.assertEqual(found.compute_ratio_avg, expected["computeRatioAvg"]) + self.assertEqual(found.compute_ratio_max, expected["computeRatioMax"]) + self.assertEqual(found.write_ms_avg, int(expected["writeMsAvg"])) + self.assertEqual(found.write_ms_max, int(expected["writeMsMax"])) + self.assertEqual(found.write_ratio_avg, expected["writeRatioAvg"]) + self.assertEqual(found.write_ratio_max, expected["writeRatioMax"]) + self.assertEqual(found.records_read, int(expected["recordsRead"])) + self.assertEqual(found.records_written, int(expected["recordsWritten"])) + self.assertEqual(found.status, expected["status"]) self.assertEqual( - found.compute_ratio_max, expected['computeRatioMax']) - self.assertEqual(found.write_ms_avg, int(expected['writeMsAvg'])) - self.assertEqual(found.write_ms_max, int(expected['writeMsMax'])) - self.assertEqual(found.write_ratio_avg, expected['writeRatioAvg']) - self.assertEqual(found.write_ratio_max, expected['writeRatioMax']) - self.assertEqual( - found.records_read, int(expected['recordsRead'])) - self.assertEqual( - found.records_written, int(expected['recordsWritten'])) - self.assertEqual(found.status, expected['status']) - self.assertEqual( - found.shuffle_output_bytes, - int(expected['shuffleOutputBytes'])) + found.shuffle_output_bytes, int(expected["shuffleOutputBytes"]) + ) self.assertEqual( found.shuffle_output_bytes_spilled, - int(expected['shuffleOutputBytesSpilled'])) + int(expected["shuffleOutputBytesSpilled"]), + ) - self.assertEqual(len(found.steps), len(expected['steps'])) - for f_step, e_step in zip(found.steps, expected['steps']): + self.assertEqual(len(found.steps), len(expected["steps"])) + for f_step, e_step in zip(found.steps, expected["steps"]): self.assertIsInstance(f_step, QueryPlanEntryStep) - self.assertEqual(f_step.kind, e_step['kind']) - self.assertEqual(f_step.substeps, e_step['substeps']) + self.assertEqual(f_step.kind, e_step["kind"]) + self.assertEqual(f_step.substeps, e_step["substeps"]) def test_total_bytes_processed(self): total_bytes = 1234 @@ -3900,13 +3649,13 @@ def test_total_bytes_processed(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.total_bytes_processed) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.total_bytes_processed) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.total_bytes_processed) - query_stats['totalBytesProcessed'] = str(total_bytes) + query_stats["totalBytesProcessed"] = str(total_bytes) self.assertEqual(job.total_bytes_processed, total_bytes) def test_total_bytes_billed(self): @@ -3915,13 +3664,13 @@ def test_total_bytes_billed(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.total_bytes_billed) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.total_bytes_billed) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.total_bytes_billed) - query_stats['totalBytesBilled'] = str(total_bytes) + query_stats["totalBytesBilled"] = str(total_bytes) self.assertEqual(job.total_bytes_billed, total_bytes) def test_billing_tier(self): @@ -3930,13 +3679,13 @@ def test_billing_tier(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.billing_tier) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.billing_tier) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.billing_tier) - query_stats['billingTier'] = billing_tier + query_stats["billingTier"] = billing_tier self.assertEqual(job.billing_tier, billing_tier) def test_cache_hit(self): @@ -3944,52 +3693,52 @@ def test_cache_hit(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.cache_hit) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.cache_hit) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.cache_hit) - query_stats['cacheHit'] = True + query_stats["cacheHit"] = True self.assertTrue(job.cache_hit) def test_ddl_operation_performed(self): - op = 'SKIP' + op = "SKIP" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.ddl_operation_performed) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.ddl_operation_performed) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.ddl_operation_performed) - query_stats['ddlOperationPerformed'] = op + query_stats["ddlOperationPerformed"] = op self.assertEqual(job.ddl_operation_performed, op) def test_ddl_target_table(self): from google.cloud.bigquery.table import TableReference ref_table = { - 'projectId': self.PROJECT, - 'datasetId': 'ddl_ds', - 'tableId': 'targettable', + "projectId": self.PROJECT, + "datasetId": "ddl_ds", + "tableId": "targettable", } client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.ddl_target_table) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.ddl_target_table) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.ddl_target_table) - query_stats['ddlTargetTable'] = ref_table + query_stats["ddlTargetTable"] = ref_table self.assertIsInstance(job.ddl_target_table, TableReference) - self.assertEqual(job.ddl_target_table.table_id, 'targettable') - self.assertEqual(job.ddl_target_table.dataset_id, 'ddl_ds') + self.assertEqual(job.ddl_target_table.table_id, "targettable") + self.assertEqual(job.ddl_target_table.dataset_id, "ddl_ds") self.assertEqual(job.ddl_target_table.project, self.PROJECT) def test_num_dml_affected_rows(self): @@ -3998,13 +3747,13 @@ def test_num_dml_affected_rows(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.num_dml_affected_rows) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.num_dml_affected_rows) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.num_dml_affected_rows) - query_stats['numDmlAffectedRows'] = str(num_rows) + query_stats["numDmlAffectedRows"] = str(num_rows) self.assertEqual(job.num_dml_affected_rows, num_rows) def test_slot_millis(self): @@ -4013,97 +3762,93 @@ def test_slot_millis(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.slot_millis) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.slot_millis) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.slot_millis) - query_stats['totalSlotMs'] = millis + query_stats["totalSlotMs"] = millis self.assertEqual(job.slot_millis, millis) def test_statement_type(self): - statement_type = 'SELECT' + statement_type = "SELECT" client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.statement_type) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.statement_type) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.statement_type) - query_stats['statementType'] = statement_type + query_stats["statementType"] = statement_type self.assertEqual(job.statement_type, statement_type) def test_referenced_tables(self): from google.cloud.bigquery.table import TableReference - ref_tables_resource = [{ - 'projectId': self.PROJECT, - 'datasetId': 'dataset', - 'tableId': 'local1', - }, { - - 'projectId': self.PROJECT, - 'datasetId': 'dataset', - 'tableId': 'local2', - }, { - - 'projectId': 'other-project-123', - 'datasetId': 'other-dataset', - 'tableId': 'other-table', - }] + ref_tables_resource = [ + {"projectId": self.PROJECT, "datasetId": "dataset", "tableId": "local1"}, + {"projectId": self.PROJECT, "datasetId": "dataset", "tableId": "local2"}, + { + "projectId": "other-project-123", + "datasetId": "other-dataset", + "tableId": "other-table", + }, + ] client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertEqual(job.referenced_tables, []) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertEqual(job.referenced_tables, []) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertEqual(job.referenced_tables, []) - query_stats['referencedTables'] = ref_tables_resource + query_stats["referencedTables"] = ref_tables_resource local1, local2, remote = job.referenced_tables self.assertIsInstance(local1, TableReference) - self.assertEqual(local1.table_id, 'local1') - self.assertEqual(local1.dataset_id, 'dataset') + self.assertEqual(local1.table_id, "local1") + self.assertEqual(local1.dataset_id, "dataset") self.assertEqual(local1.project, self.PROJECT) self.assertIsInstance(local2, TableReference) - self.assertEqual(local2.table_id, 'local2') - self.assertEqual(local2.dataset_id, 'dataset') + self.assertEqual(local2.table_id, "local2") + self.assertEqual(local2.dataset_id, "dataset") self.assertEqual(local2.project, self.PROJECT) self.assertIsInstance(remote, TableReference) - self.assertEqual(remote.table_id, 'other-table') - self.assertEqual(remote.dataset_id, 'other-dataset') - self.assertEqual(remote.project, 'other-project-123') + self.assertEqual(remote.table_id, "other-table") + self.assertEqual(remote.dataset_id, "other-dataset") + self.assertEqual(remote.project, "other-project-123") def test_timeline(self): - timeline_resource = [{ - 'elapsedMs': 1, - 'activeUnits': 22, - 'pendingUnits': 33, - 'completedUnits': 44, - 'totalSlotMs': 101, - }] + timeline_resource = [ + { + "elapsedMs": 1, + "activeUnits": 22, + "pendingUnits": 33, + "completedUnits": 44, + "totalSlotMs": 101, + } + ] client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertEqual(job.timeline, []) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertEqual(job.timeline, []) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertEqual(job.timeline, []) - query_stats['timeline'] = timeline_resource + query_stats["timeline"] = timeline_resource self.assertEqual(len(job.timeline), len(timeline_resource)) self.assertEqual(job.timeline[0].elapsed_ms, 1) @@ -4117,75 +3862,56 @@ def test_undeclared_query_parameters(self): from google.cloud.bigquery.query import ScalarQueryParameter from google.cloud.bigquery.query import StructQueryParameter - undeclared = [{ - 'name': 'my_scalar', - 'parameterType': { - 'type': 'STRING', - }, - 'parameterValue': { - 'value': 'value', + undeclared = [ + { + "name": "my_scalar", + "parameterType": {"type": "STRING"}, + "parameterValue": {"value": "value"}, }, - }, { - 'name': 'my_array', - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'INT64', + { + "name": "my_array", + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": { + "arrayValues": [{"value": "1066"}, {"value": "1745"}] }, }, - 'parameterValue': { - 'arrayValues': [ - {'value': '1066'}, - {'value': '1745'}, - ], - }, - }, { - 'name': 'my_struct', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [{ - 'name': 'count', - 'type': { - 'type': 'INT64', - } - }], - }, - 'parameterValue': { - 'structValues': { - 'count': { - 'value': '123', - }, - } + { + "name": "my_struct", + "parameterType": { + "type": "STRUCT", + "structTypes": [{"name": "count", "type": {"type": "INT64"}}], + }, + "parameterValue": {"structValues": {"count": {"value": "123"}}}, }, - }] + ] client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertEqual(job.undeclared_query_parameters, []) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertEqual(job.undeclared_query_parameters, []) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertEqual(job.undeclared_query_parameters, []) - query_stats['undeclaredQueryParameters'] = undeclared + query_stats["undeclaredQueryParameters"] = undeclared scalar, array, struct = job.undeclared_query_parameters self.assertIsInstance(scalar, ScalarQueryParameter) - self.assertEqual(scalar.name, 'my_scalar') - self.assertEqual(scalar.type_, 'STRING') - self.assertEqual(scalar.value, 'value') + self.assertEqual(scalar.name, "my_scalar") + self.assertEqual(scalar.type_, "STRING") + self.assertEqual(scalar.value, "value") self.assertIsInstance(array, ArrayQueryParameter) - self.assertEqual(array.name, 'my_array') - self.assertEqual(array.array_type, 'INT64') + self.assertEqual(array.name, "my_array") + self.assertEqual(array.array_type, "INT64") self.assertEqual(array.values, [1066, 1745]) self.assertIsInstance(struct, StructQueryParameter) - self.assertEqual(struct.name, 'my_struct') - self.assertEqual(struct.struct_types, {'count': 'INT64'}) - self.assertEqual(struct.struct_values, {'count': 123}) + self.assertEqual(struct.name, "my_struct") + self.assertEqual(struct.struct_types, {"count": "INT64"}) + self.assertEqual(struct.struct_values, {"count": 123}) def test_estimated_bytes_processed(self): est_bytes = 123456 @@ -4194,23 +3920,20 @@ def test_estimated_bytes_processed(self): job = self._make_one(self.JOB_ID, self.QUERY, client) self.assertIsNone(job.estimated_bytes_processed) - statistics = job._properties['statistics'] = {} + statistics = job._properties["statistics"] = {} self.assertIsNone(job.estimated_bytes_processed) - query_stats = statistics['query'] = {} + query_stats = statistics["query"] = {} self.assertIsNone(job.estimated_bytes_processed) - query_stats['estimatedBytesProcessed'] = str(est_bytes) + query_stats["estimatedBytesProcessed"] = str(est_bytes) self.assertEqual(job.estimated_bytes_processed, est_bytes) def test_result(self): query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'schema': {'fields': [{'name': 'col1', 'type': 'STRING'}]}, + "jobComplete": True, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, } connection = _make_connection(query_resource, query_resource) client = _make_client(self.PROJECT, connection=connection) @@ -4224,12 +3947,9 @@ def test_result(self): def test_result_w_empty_schema(self): # Destination table may have no schema for some DDL and DML queries. query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'schema': {'fields': []}, + "jobComplete": True, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "schema": {"fields": []}, } connection = _make_connection(query_resource, query_resource) client = _make_client(self.PROJECT, connection=connection) @@ -4243,20 +3963,21 @@ def test_result_w_empty_schema(self): def test_result_invokes_begins(self): begun_resource = self._make_resource() incomplete_resource = { - 'jobComplete': False, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'schema': {'fields': [{'name': 'col1', 'type': 'STRING'}]}, + "jobComplete": False, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, } query_resource = copy.deepcopy(incomplete_resource) - query_resource['jobComplete'] = True + query_resource["jobComplete"] = True done_resource = copy.deepcopy(begun_resource) - done_resource['status'] = {'state': 'DONE'} + done_resource["status"] = {"state": "DONE"} connection = _make_connection( - begun_resource, incomplete_resource, query_resource, done_resource, - query_resource) + begun_resource, + incomplete_resource, + query_resource, + done_resource, + query_resource, + ) client = _make_client(project=self.PROJECT, connection=connection) job = self._make_one(self.JOB_ID, self.QUERY, client) @@ -4266,24 +3987,20 @@ def test_result_invokes_begins(self): begin_request = connection.api_request.call_args_list[0] query_request = connection.api_request.call_args_list[2] reload_request = connection.api_request.call_args_list[3] - self.assertEqual(begin_request[1]['method'], 'POST') - self.assertEqual(query_request[1]['method'], 'GET') - self.assertEqual(reload_request[1]['method'], 'GET') + self.assertEqual(begin_request[1]["method"], "POST") + self.assertEqual(query_request[1]["method"], "GET") + self.assertEqual(reload_request[1]["method"], "GET") def test_result_w_timeout(self): begun_resource = self._make_resource() query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'schema': {'fields': [{'name': 'col1', 'type': 'STRING'}]}, + "jobComplete": True, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, } done_resource = copy.deepcopy(begun_resource) - done_resource['status'] = {'state': 'DONE'} - connection = _make_connection( - begun_resource, query_resource, done_resource) + done_resource["status"] = {"state": "DONE"} + connection = _make_connection(begun_resource, query_resource, done_resource) client = _make_client(project=self.PROJECT, connection=connection) job = self._make_one(self.JOB_ID, self.QUERY, client) @@ -4293,13 +4010,14 @@ def test_result_w_timeout(self): begin_request = connection.api_request.call_args_list[0] query_request = connection.api_request.call_args_list[1] reload_request = connection.api_request.call_args_list[2] - self.assertEqual(begin_request[1]['method'], 'POST') - self.assertEqual(query_request[1]['method'], 'GET') + self.assertEqual(begin_request[1]["method"], "POST") + self.assertEqual(query_request[1]["method"], "GET") self.assertEqual( - query_request[1]['path'], - '/projects/{}/queries/{}'.format(self.PROJECT, self.JOB_ID)) - self.assertEqual(query_request[1]['query_params']['timeoutMs'], 900) - self.assertEqual(reload_request[1]['method'], 'GET') + query_request[1]["path"], + "/projects/{}/queries/{}".format(self.PROJECT, self.JOB_ID), + ) + self.assertEqual(query_request[1]["query_params"]["timeoutMs"], 900) + self.assertEqual(reload_request[1]["method"], "GET") def test_result_error(self): from google.cloud import exceptions @@ -4307,15 +4025,15 @@ def test_result_error(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, self.QUERY, client) error_result = { - 'debugInfo': 'DEBUG', - 'location': 'LOCATION', - 'message': 'MESSAGE', - 'reason': 'invalid' + "debugInfo": "DEBUG", + "location": "LOCATION", + "message": "MESSAGE", + "reason": "invalid", } - job._properties['status'] = { - 'errorResult': error_result, - 'errors': [error_result], - 'state': 'DONE' + job._properties["status"] = { + "errorResult": error_result, + "errors": [error_result], + "state": "DONE", } job._set_future_result() @@ -4329,45 +4047,42 @@ def test_begin_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.job import QueryJobConfig - PATH = '/projects/%s/jobs' % (self.PROJECT,) - DS_ID = 'DATASET' + PATH = "/projects/%s/jobs" % (self.PROJECT,) + DS_ID = "DATASET" RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) config = QueryJobConfig() config.default_dataset = DatasetReference(self.PROJECT, DS_ID) - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) job._begin() self.assertIsNone(job.default_dataset) self.assertEqual(job.udf_resources, []) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': False, - 'defaultDataset': { - 'projectId': self.PROJECT, - 'datasetId': DS_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "useLegacySql": False, + "defaultDataset": { + "projectId": self.PROJECT, + "datasetId": DS_ID, }, - }, + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_alternate_client(self): @@ -4378,36 +4093,31 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import SchemaUpdateOption from google.cloud.bigquery.job import WriteDisposition - PATH = '/projects/%s/jobs' % (self.PROJECT,) - TABLE = 'TABLE' - DS_ID = 'DATASET' + PATH = "/projects/%s/jobs" % (self.PROJECT,) + TABLE = "TABLE" + DS_ID = "DATASET" RESOURCE = self._make_resource(ended=True) QUERY_CONFIGURATION = { - 'query': self.QUERY, - 'allowLargeResults': True, - 'createDisposition': CreateDisposition.CREATE_NEVER, - 'defaultDataset': { - 'projectId': self.PROJECT, - 'datasetId': DS_ID, - }, - 'destinationTable': { - 'projectId': self.PROJECT, - 'datasetId': DS_ID, - 'tableId': TABLE, + "query": self.QUERY, + "allowLargeResults": True, + "createDisposition": CreateDisposition.CREATE_NEVER, + "defaultDataset": {"projectId": self.PROJECT, "datasetId": DS_ID}, + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": DS_ID, + "tableId": TABLE, }, - 'flattenResults': True, - 'priority': QueryPriority.INTERACTIVE, - 'useQueryCache': True, - 'useLegacySql': True, - 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, - 'maximumBillingTier': 4, - 'maximumBytesBilled': '123456', - 'schemaUpdateOptions': [ - SchemaUpdateOption.ALLOW_FIELD_RELAXATION, - ] + "flattenResults": True, + "priority": QueryPriority.INTERACTIVE, + "useQueryCache": True, + "useLegacySql": True, + "writeDisposition": WriteDisposition.WRITE_TRUNCATE, + "maximumBillingTier": 4, + "maximumBytesBilled": "123456", + "schemaUpdateOptions": [SchemaUpdateOption.ALLOW_FIELD_RELAXATION], } - RESOURCE['configuration']['query'] = QUERY_CONFIGURATION - RESOURCE['configuration']['dryRun'] = True + RESOURCE["configuration"]["query"] = QUERY_CONFIGURATION + RESOURCE["configuration"]["dryRun"] = True conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection(RESOURCE) @@ -4428,46 +4138,38 @@ def test_begin_w_alternate_client(self): config.use_query_cache = True config.write_disposition = WriteDisposition.WRITE_TRUNCATE config.maximum_bytes_billed = 123456 - config.schema_update_options = [ - SchemaUpdateOption.ALLOW_FIELD_RELAXATION, - ] - job = self._make_one( - self.JOB_ID, self.QUERY, client1, job_config=config) + config.schema_update_options = [SchemaUpdateOption.ALLOW_FIELD_RELAXATION] + job = self._make_one(self.JOB_ID, self.QUERY, client1, job_config=config) job._begin(client=client2) conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'dryRun': True, - 'query': QUERY_CONFIGURATION, - }, - }) + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": {"dryRun": True, "query": QUERY_CONFIGURATION}, + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_udf(self): from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import UDFResource - RESOURCE_URI = 'gs://some-bucket/js/lib.js' + RESOURCE_URI = "gs://some-bucket/js/lib.js" INLINE_UDF_CODE = 'var someCode = "here";' - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] - RESOURCE['configuration']['query']['userDefinedFunctionResources'] = [ - {'resourceUri': RESOURCE_URI}, - {'inlineCode': INLINE_UDF_CODE}, + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] + RESOURCE["configuration"]["query"]["userDefinedFunctionResources"] = [ + {"resourceUri": RESOURCE_URI}, + {"inlineCode": INLINE_UDF_CODE}, ] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) @@ -4478,138 +4180,118 @@ def test_begin_w_udf(self): config = QueryJobConfig() config.udf_resources = udf_resources config.use_legacy_sql = True - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) job._begin() self.assertEqual(job.udf_resources, udf_resources) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': True, - 'userDefinedFunctionResources': [ - {'resourceUri': RESOURCE_URI}, - {'inlineCode': INLINE_UDF_CODE}, - ] - }, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "useLegacySql": True, + "userDefinedFunctionResources": [ + {"resourceUri": RESOURCE_URI}, + {"inlineCode": INLINE_UDF_CODE}, + ], + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_named_query_parameter(self): from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import ScalarQueryParameter - query_parameters = [ScalarQueryParameter('foo', 'INT64', 123)] - PATH = '/projects/%s/jobs' % (self.PROJECT,) + query_parameters = [ScalarQueryParameter("foo", "INT64", 123)] + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] - config = RESOURCE['configuration']['query'] - config['parameterMode'] = 'NAMED' - config['queryParameters'] = [ + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] + config = RESOURCE["configuration"]["query"] + config["parameterMode"] = "NAMED" + config["queryParameters"] = [ { - 'name': 'foo', - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': '123', - }, - }, + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, + } ] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) jconfig = QueryJobConfig() jconfig.query_parameters = query_parameters - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=jconfig) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=jconfig) job._begin() self.assertEqual(job.query_parameters, query_parameters) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': False, - 'parameterMode': 'NAMED', - 'queryParameters': config['queryParameters'], - }, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "useLegacySql": False, + "parameterMode": "NAMED", + "queryParameters": config["queryParameters"], + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_positional_query_parameter(self): from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.query import ScalarQueryParameter - query_parameters = [ScalarQueryParameter.positional('INT64', 123)] - PATH = '/projects/%s/jobs' % (self.PROJECT,) + query_parameters = [ScalarQueryParameter.positional("INT64", 123)] + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] - config = RESOURCE['configuration']['query'] - config['parameterMode'] = 'POSITIONAL' - config['queryParameters'] = [ - { - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': '123', - }, - }, + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] + config = RESOURCE["configuration"]["query"] + config["parameterMode"] = "POSITIONAL" + config["queryParameters"] = [ + {"parameterType": {"type": "INT64"}, "parameterValue": {"value": "123"}} ] conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) jconfig = QueryJobConfig() jconfig.query_parameters = query_parameters - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=jconfig) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=jconfig) job._begin() self.assertEqual(job.query_parameters, query_parameters) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': False, - 'parameterMode': 'POSITIONAL', - 'queryParameters': config['queryParameters'], - }, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "useLegacySql": False, + "parameterMode": "POSITIONAL", + "queryParameters": config["queryParameters"], + } }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_begin_w_table_defs(self): @@ -4618,47 +4300,44 @@ def test_begin_w_table_defs(self): from google.cloud.bigquery.external_config import BigtableColumn from google.cloud.bigquery.external_config import BigtableColumnFamily - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] - bt_config = ExternalConfig('BIGTABLE') + bt_config = ExternalConfig("BIGTABLE") bt_config.ignore_unknown_values = True bt_config.options.read_rowkey_as_string = True cf = BigtableColumnFamily() - cf.family_id = 'cf' + cf.family_id = "cf" col = BigtableColumn() - col.field_name = 'fn' + col.field_name = "fn" cf.columns = [col] bt_config.options.column_families = [cf] BT_CONFIG_RESOURCE = { - 'sourceFormat': 'BIGTABLE', - 'ignoreUnknownValues': True, - 'bigtableOptions': { - 'readRowkeyAsString': True, - 'columnFamilies': [{ - 'familyId': 'cf', - 'columns': [{'fieldName': 'fn'}], - }], + "sourceFormat": "BIGTABLE", + "ignoreUnknownValues": True, + "bigtableOptions": { + "readRowkeyAsString": True, + "columnFamilies": [ + {"familyId": "cf", "columns": [{"fieldName": "fn"}]} + ], }, } CSV_CONFIG_RESOURCE = { - 'sourceFormat': 'CSV', - 'maxBadRecords': 8, - 'csvOptions': { - 'allowJaggedRows': True, - }, + "sourceFormat": "CSV", + "maxBadRecords": 8, + "csvOptions": {"allowJaggedRows": True}, } - csv_config = ExternalConfig('CSV') + csv_config = ExternalConfig("CSV") csv_config.max_bad_records = 8 csv_config.options.allow_jagged_rows = True - bt_table = 'bigtable-table' - csv_table = 'csv-table' - RESOURCE['configuration']['query']['tableDefinitions'] = { + bt_table = "bigtable-table" + csv_table = "csv-table" + RESOURCE["configuration"]["query"]["tableDefinitions"] = { bt_table: BT_CONFIG_RESOURCE, csv_table: CSV_CONFIG_RESOURCE, } @@ -4666,77 +4345,65 @@ def test_begin_w_table_defs(self): conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) config = QueryJobConfig() - config.table_definitions = { - bt_table: bt_config, - csv_table: csv_config, - } + config.table_definitions = {bt_table: bt_config, csv_table: csv_config} config.use_legacy_sql = True - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) job._begin() conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': True, - 'tableDefinitions': { + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": { + "query": self.QUERY, + "useLegacySql": True, + "tableDefinitions": { bt_table: BT_CONFIG_RESOURCE, csv_table: CSV_CONFIG_RESOURCE, }, - }, + } }, - }) + }, + ) self._verifyResourceProperties(job, want_resource) def test_dry_run_query(self): from google.cloud.bigquery.job import QueryJobConfig - PATH = '/projects/%s/jobs' % (self.PROJECT,) + PATH = "/projects/%s/jobs" % (self.PROJECT,) RESOURCE = self._make_resource() # Ensure None for missing server-set props - del RESOURCE['statistics']['creationTime'] - del RESOURCE['etag'] - del RESOURCE['selfLink'] - del RESOURCE['user_email'] - RESOURCE['configuration']['dryRun'] = True + del RESOURCE["statistics"]["creationTime"] + del RESOURCE["etag"] + del RESOURCE["selfLink"] + del RESOURCE["user_email"] + RESOURCE["configuration"]["dryRun"] = True conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) config = QueryJobConfig() config.dry_run = True - job = self._make_one( - self.JOB_ID, self.QUERY, client, job_config=config) + job = self._make_one(self.JOB_ID, self.QUERY, client, job_config=config) job._begin() self.assertEqual(job.udf_resources, []) conn.api_request.assert_called_once_with( - method='POST', + method="POST", path=PATH, data={ - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "configuration": { + "query": {"query": self.QUERY, "useLegacySql": False}, + "dryRun": True, }, - 'configuration': { - 'query': { - 'query': self.QUERY, - 'useLegacySql': False, - }, - 'dryRun': True, - }, - }) + }, + ) self._verifyResourceProperties(job, RESOURCE) def test_exists_miss_w_bound_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn = _make_connection() client = _make_client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_ID, self.QUERY, client) @@ -4744,12 +4411,11 @@ def test_exists_miss_w_bound_client(self): self.assertFalse(job.exists()) conn.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_exists_hit_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _make_connection({}) @@ -4760,17 +4426,16 @@ def test_exists_hit_w_alternate_client(self): conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', - path=PATH, - query_params={'fields': 'id'}) + method="GET", path=PATH, query_params={"fields": "id"} + ) def test_reload_w_bound_client(self): from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.job import QueryJobConfig - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - DS_ID = 'DATASET' - DEST_TABLE = 'dest_table' + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) + DS_ID = "DATASET" + DEST_TABLE = "dest_table" RESOURCE = self._make_resource() conn = _make_connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) @@ -4785,19 +4450,20 @@ def test_reload_w_bound_client(self): self.assertNotEqual(job.destination, table_ref) conn.api_request.assert_called_once_with( - method='GET', path=PATH, query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) def test_reload_w_alternate_client(self): - PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - DS_ID = 'DATASET' - DEST_TABLE = 'dest_table' + PATH = "/projects/%s/jobs/%s" % (self.PROJECT, self.JOB_ID) + DS_ID = "DATASET" + DEST_TABLE = "dest_table" RESOURCE = self._make_resource() - q_config = RESOURCE['configuration']['query'] - q_config['destinationTable'] = { - 'projectId': self.PROJECT, - 'datasetId': DS_ID, - 'tableId': DEST_TABLE, + q_config = RESOURCE["configuration"]["query"] + q_config["destinationTable"] = { + "projectId": self.PROJECT, + "datasetId": DS_ID, + "tableId": DEST_TABLE, } conn1 = _make_connection() client1 = _make_client(project=self.PROJECT, connection=conn1) @@ -4809,36 +4475,35 @@ def test_reload_w_alternate_client(self): conn1.api_request.assert_not_called() conn2.api_request.assert_called_once_with( - method='GET', path=PATH, query_params={}) + method="GET", path=PATH, query_params={} + ) self._verifyResourceProperties(job, RESOURCE) - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): begun_resource = self._make_resource() query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, + "jobComplete": True, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "totalRows": "4", + "schema": { + "fields": [ + {"name": "name", "type": "STRING", "mode": "NULLABLE"}, + {"name": "age", "type": "INTEGER", "mode": "NULLABLE"}, + ] }, - 'totalRows': '4', - 'schema': { - 'fields': [ - {'name': 'name', 'type': 'STRING', 'mode': 'NULLABLE'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'NULLABLE'}, - ], - }, - 'rows': [ - {'f': [{'v': 'Phred Phlyntstone'}, {'v': '32'}]}, - {'f': [{'v': 'Bharney Rhubble'}, {'v': '33'}]}, - {'f': [{'v': 'Wylma Phlyntstone'}, {'v': '29'}]}, - {'f': [{'v': 'Bhettye Rhubble'}, {'v': '27'}]}, + "rows": [ + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, + {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, + {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, ], } done_resource = copy.deepcopy(begun_resource) - done_resource['status'] = {'state': 'DONE'} + done_resource["status"] = {"state": "DONE"} connection = _make_connection( - begun_resource, query_resource, done_resource, query_resource) + begun_resource, query_resource, done_resource, query_resource + ) client = _make_client(project=self.PROJECT, connection=connection) job = self._make_one(self.JOB_ID, self.QUERY, client) @@ -4846,25 +4511,21 @@ def test_to_dataframe(self): self.assertIsInstance(df, pandas.DataFrame) self.assertEqual(len(df), 4) # verify the number of rows - self.assertEqual(list(df), ['name', 'age']) # verify the column names + self.assertEqual(list(df), ["name", "age"]) # verify the column names def test_iter(self): import types begun_resource = self._make_resource() query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - 'totalRows': '0', - 'schema': {'fields': [{'name': 'col1', 'type': 'STRING'}]}, + "jobComplete": True, + "jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}, + "totalRows": "0", + "schema": {"fields": [{"name": "col1", "type": "STRING"}]}, } done_resource = copy.deepcopy(begun_resource) - done_resource['status'] = {'state': 'DONE'} - connection = _make_connection( - begun_resource, query_resource, done_resource) + done_resource["status"] = {"state": "DONE"} + connection = _make_connection(begun_resource, query_resource, done_resource) client = _make_client(project=self.PROJECT, connection=connection) job = self._make_one(self.JOB_ID, self.QUERY, client) @@ -4872,8 +4533,8 @@ def test_iter(self): class TestQueryPlanEntryStep(unittest.TestCase, _Base): - KIND = 'KIND' - SUBSTEPS = ('SUB1', 'SUB2') + KIND = "KIND" + SUBSTEPS = ("SUB1", "SUB2") @staticmethod def _get_target_class(): @@ -4896,10 +4557,7 @@ def test_from_api_repr_empty(self): self.assertEqual(step.substeps, []) def test_from_api_repr_normal(self): - resource = { - 'kind': self.KIND, - 'substeps': self.SUBSTEPS, - } + resource = {"kind": self.KIND, "substeps": self.SUBSTEPS} klass = self._get_target_class() step = klass.from_api_repr(resource) self.assertEqual(step.kind, self.KIND) @@ -4911,7 +4569,7 @@ def test___eq___mismatched_type(self): def test___eq___mismatch_kind(self): step = self._make_one(self.KIND, self.SUBSTEPS) - other = self._make_one('OTHER', self.SUBSTEPS) + other = self._make_one("OTHER", self.SUBSTEPS) self.assertNotEqual(step, other) def test___eq___mismatch_substeps(self): @@ -4926,11 +4584,11 @@ def test___eq___hit(self): def test___eq___wrong_type(self): step = self._make_one(self.KIND, self.SUBSTEPS) - self.assertFalse(step == 'hello') + self.assertFalse(step == "hello") class TestQueryPlanEntry(unittest.TestCase, _Base): - NAME = 'NAME' + NAME = "NAME" ENTRY_ID = 1234 START_MS = 1522540800000 END_MS = 1522540804000 @@ -4955,12 +4613,12 @@ class TestQueryPlanEntry(unittest.TestCase, _Base): WRITE_RATIO_MAX = 2.30258 RECORDS_READ = 100 RECORDS_WRITTEN = 1 - STATUS = 'STATUS' + STATUS = "STATUS" SHUFFLE_OUTPUT_BYTES = 1024 SHUFFLE_OUTPUT_BYTES_SPILLED = 1 - START_RFC3339_MICROS = '2018-04-01T00:00:00.000000Z' - END_RFC3339_MICROS = '2018-04-01T00:00:04.000000Z' + START_RFC3339_MICROS = "2018-04-01T00:00:00.000000Z" + END_RFC3339_MICROS = "2018-04-01T00:00:04.000000Z" @staticmethod def _get_target_class(): @@ -5006,40 +4664,45 @@ def test_from_api_repr_empty(self): def test_from_api_repr_normal(self): from google.cloud.bigquery.job import QueryPlanEntryStep - steps = [QueryPlanEntryStep( - kind=TestQueryPlanEntryStep.KIND, - substeps=TestQueryPlanEntryStep.SUBSTEPS)] + steps = [ + QueryPlanEntryStep( + kind=TestQueryPlanEntryStep.KIND, + substeps=TestQueryPlanEntryStep.SUBSTEPS, + ) + ] resource = { - 'name': self.NAME, - 'id': self.ENTRY_ID, - 'inputStages': self.INPUT_STAGES, - 'startMs': self.START_MS, - 'endMs': self.END_MS, - 'waitMsAvg': self.WAIT_MS_AVG, - 'waitMsMax': self.WAIT_MS_MAX, - 'waitRatioAvg': self.WAIT_RATIO_AVG, - 'waitRatioMax': self.WAIT_RATIO_MAX, - 'readMsAvg': self.READ_MS_AVG, - 'readMsMax': self.READ_MS_MAX, - 'readRatioAvg': self.READ_RATIO_AVG, - 'readRatioMax': self.READ_RATIO_MAX, - 'computeMsAvg': self.COMPUTE_MS_AVG, - 'computeMsMax': self.COMPUTE_MS_MAX, - 'computeRatioAvg': self.COMPUTE_RATIO_AVG, - 'computeRatioMax': self.COMPUTE_RATIO_MAX, - 'writeMsAvg': self.WRITE_MS_AVG, - 'writeMsMax': self.WRITE_MS_MAX, - 'writeRatioAvg': self.WRITE_RATIO_AVG, - 'writeRatioMax': self.WRITE_RATIO_MAX, - 'recordsRead': self.RECORDS_READ, - 'recordsWritten': self.RECORDS_WRITTEN, - 'status': self.STATUS, - 'shuffleOutputBytes': self.SHUFFLE_OUTPUT_BYTES, - 'shuffleOutputBytesSpilled': self.SHUFFLE_OUTPUT_BYTES_SPILLED, - 'steps': [{ - 'kind': TestQueryPlanEntryStep.KIND, - 'substeps': TestQueryPlanEntryStep.SUBSTEPS, - }] + "name": self.NAME, + "id": self.ENTRY_ID, + "inputStages": self.INPUT_STAGES, + "startMs": self.START_MS, + "endMs": self.END_MS, + "waitMsAvg": self.WAIT_MS_AVG, + "waitMsMax": self.WAIT_MS_MAX, + "waitRatioAvg": self.WAIT_RATIO_AVG, + "waitRatioMax": self.WAIT_RATIO_MAX, + "readMsAvg": self.READ_MS_AVG, + "readMsMax": self.READ_MS_MAX, + "readRatioAvg": self.READ_RATIO_AVG, + "readRatioMax": self.READ_RATIO_MAX, + "computeMsAvg": self.COMPUTE_MS_AVG, + "computeMsMax": self.COMPUTE_MS_MAX, + "computeRatioAvg": self.COMPUTE_RATIO_AVG, + "computeRatioMax": self.COMPUTE_RATIO_MAX, + "writeMsAvg": self.WRITE_MS_AVG, + "writeMsMax": self.WRITE_MS_MAX, + "writeRatioAvg": self.WRITE_RATIO_AVG, + "writeRatioMax": self.WRITE_RATIO_MAX, + "recordsRead": self.RECORDS_READ, + "recordsWritten": self.RECORDS_WRITTEN, + "status": self.STATUS, + "shuffleOutputBytes": self.SHUFFLE_OUTPUT_BYTES, + "shuffleOutputBytesSpilled": self.SHUFFLE_OUTPUT_BYTES_SPILLED, + "steps": [ + { + "kind": TestQueryPlanEntryStep.KIND, + "substeps": TestQueryPlanEntryStep.SUBSTEPS, + } + ], } klass = self._get_target_class() @@ -5065,14 +4728,12 @@ def test_start(self): klass = self._get_target_class() entry = klass.from_api_repr({}) - self.assertEqual( - entry.start, - None) + self.assertEqual(entry.start, None) - entry._properties['startMs'] = self.START_MS + entry._properties["startMs"] = self.START_MS self.assertEqual( - entry.start.strftime(_RFC3339_MICROS), - self.START_RFC3339_MICROS) + entry.start.strftime(_RFC3339_MICROS), self.START_RFC3339_MICROS + ) def test_end(self): from google.cloud._helpers import _RFC3339_MICROS @@ -5080,14 +4741,10 @@ def test_end(self): klass = self._get_target_class() entry = klass.from_api_repr({}) - self.assertEqual( - entry.end, - None) + self.assertEqual(entry.end, None) - entry._properties['endMs'] = self.END_MS - self.assertEqual( - entry.end.strftime(_RFC3339_MICROS), - self.END_RFC3339_MICROS) + entry._properties["endMs"] = self.END_MS + self.assertEqual(entry.end.strftime(_RFC3339_MICROS), self.END_RFC3339_MICROS) class TestTimelineEntry(unittest.TestCase, _Base): @@ -5100,6 +4757,7 @@ class TestTimelineEntry(unittest.TestCase, _Base): @staticmethod def _get_target_class(): from google.cloud.bigquery.job import TimelineEntry + return TimelineEntry def test_from_api_repr_empty(self): @@ -5113,11 +4771,11 @@ def test_from_api_repr_empty(self): def test_from_api_repr_normal(self): resource = { - 'elapsedMs': self.ELAPSED_MS, - 'activeUnits': self.ACTIVE_UNITS, - 'pendingUnits': self.PENDING_UNITS, - 'completedUnits': self.COMPLETED_UNITS, - 'totalSlotMs': self.SLOT_MILLIS, + "elapsedMs": self.ELAPSED_MS, + "activeUnits": self.ACTIVE_UNITS, + "pendingUnits": self.PENDING_UNITS, + "completedUnits": self.COMPLETED_UNITS, + "totalSlotMs": self.SLOT_MILLIS, } klass = self._get_target_class() diff --git a/bigquery/tests/unit/test_magics.py b/bigquery/tests/unit/test_magics.py index b0e08661ca00..5729146b053a 100644 --- a/bigquery/tests/unit/test_magics.py +++ b/bigquery/tests/unit/test_magics.py @@ -17,6 +17,7 @@ from concurrent import futures import pytest + try: import pandas except ImportError: # pragma: NO COVER @@ -34,10 +35,10 @@ from google.cloud.bigquery import magics -pytestmark = pytest.mark.skipif(IPython is None, reason='Requires `ipython`') +pytestmark = pytest.mark.skipif(IPython is None, reason="Requires `ipython`") -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def ipython(): config = tools.default_config() config.TerminalInteractiveShell.simple_prompt = True @@ -62,11 +63,13 @@ def test_context_credentials_auto_set_w_application_default_credentials(): assert magics.context._credentials is None assert magics.context._project is None - project = 'prahj-ekt' + project = "prahj-ekt" credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) default_patch = mock.patch( - 'google.auth.default', return_value=(credentials_mock, project)) + "google.auth.default", return_value=(credentials_mock, project) + ) with default_patch as default_mock: assert magics.context.credentials is credentials_mock assert magics.context.project == project @@ -75,12 +78,14 @@ def test_context_credentials_auto_set_w_application_default_credentials(): def test_context_credentials_and_project_can_be_set_explicitly(): - project1 = 'one-project-55564' - project2 = 'other-project-52569' + project1 = "one-project-55564" + project2 = "other-project-52569" credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) default_patch = mock.patch( - 'google.auth.default', return_value=(credentials_mock, project1)) + "google.auth.default", return_value=(credentials_mock, project1) + ) with default_patch as default_mock: magics.context.credentials = credentials_mock magics.context.project = project2 @@ -94,247 +99,264 @@ def test_context_credentials_and_project_can_be_set_explicitly(): def test__run_query(): magics.context._credentials = None - job_id = 'job_1234' - sql = 'SELECT 17' + job_id = "job_1234" + sql = "SELECT 17" responses = [ futures.TimeoutError, futures.TimeoutError, - [table.Row((17,), {'num': 0})] + [table.Row((17,), {"num": 0})], ] client_patch = mock.patch( - 'google.cloud.bigquery.magics.bigquery.Client', autospec=True) + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) with client_patch as client_mock, io.capture_output() as captured: client_mock().query(sql).result.side_effect = responses client_mock().query(sql).job_id = job_id query_job = magics._run_query(client_mock(), sql) - lines = re.split('\n|\r', captured.stdout) + lines = re.split("\n|\r", captured.stdout) # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != '\x1b[2K', lines)) + updates = list(filter(lambda x: bool(x) and x != "\x1b[2K", lines)) assert query_job.job_id == job_id expected_first_line = "Executing query with job ID: {}".format(job_id) assert updates[0] == expected_first_line execution_updates = updates[1:-1] assert len(execution_updates) == 3 # one update per API response - assert all(re.match("Query executing: .*s", line) - for line in execution_updates) + assert all(re.match("Query executing: .*s", line) for line in execution_updates) assert re.match("Query complete after .*s", updates[-1]) -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.usefixtures("ipython_interactive") def test_extension_load(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") # verify that the magic is registered and has the correct source - magic = ip.magics_manager.magics['cell'].get('bigquery') - assert magic.__module__ == 'google.cloud.bigquery.magics' + magic = ip.magics_manager.magics["cell"].get("bigquery") + assert magic.__module__ == "google.cloud.bigquery.magics" -@pytest.mark.usefixtures('ipython_interactive') -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_without_optional_arguments(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) - sql = 'SELECT 17 AS num' - result = pandas.DataFrame([17], columns=['num']) + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True) + google.cloud.bigquery.job.QueryJob, instance=True + ) query_job_mock.to_dataframe.return_value = result with run_query_patch as run_query_mock: run_query_mock.return_value = query_job_mock - result = ip.run_cell_magic('bigquery', '', sql) + result = ip.run_cell_magic("bigquery", "", sql) assert isinstance(result, pandas.DataFrame) - assert len(result) == len(result) # verify row count + assert len(result) == len(result) # verify row count assert list(result) == list(result) # verify column names -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_legacy_sql(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) with run_query_patch as run_query_mock: - ip.run_cell_magic( - 'bigquery', '--use_legacy_sql', 'SELECT 17 AS num') + ip.run_cell_magic("bigquery", "--use_legacy_sql", "SELECT 17 AS num") job_config_used = run_query_mock.call_args_list[0][0][-1] assert job_config_used.use_legacy_sql is True -@pytest.mark.usefixtures('ipython_interactive') -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_result_saved_to_variable(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) - sql = 'SELECT 17 AS num' - result = pandas.DataFrame([17], columns=['num']) - assert 'df' not in ip.user_ns + sql = "SELECT 17 AS num" + result = pandas.DataFrame([17], columns=["num"]) + assert "df" not in ip.user_ns run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True) + google.cloud.bigquery.job.QueryJob, instance=True + ) query_job_mock.to_dataframe.return_value = result with run_query_patch as run_query_mock: run_query_mock.return_value = query_job_mock - ip.run_cell_magic('bigquery', 'df', sql) + ip.run_cell_magic("bigquery", "df", sql) - assert 'df' in ip.user_ns # verify that variable exists - df = ip.user_ns['df'] - assert len(df) == len(result) # verify row count + assert "df" in ip.user_ns # verify that variable exists + df = ip.user_ns["df"] + assert len(df) == len(result) # verify row count assert list(df) == list(result) # verify column names -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_does_not_clear_display_in_verbose_mode(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) clear_patch = mock.patch( - 'google.cloud.bigquery.magics.display.clear_output', autospec=True) + "google.cloud.bigquery.magics.display.clear_output", autospec=True + ) run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) with clear_patch as clear_mock, run_query_patch: - ip.run_cell_magic('bigquery', '--verbose', 'SELECT 17 as num') + ip.run_cell_magic("bigquery", "--verbose", "SELECT 17 as num") assert clear_mock.call_count == 0 -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_clears_display_in_verbose_mode(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) clear_patch = mock.patch( - 'google.cloud.bigquery.magics.display.clear_output', autospec=True) + "google.cloud.bigquery.magics.display.clear_output", autospec=True + ) run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) with clear_patch as clear_mock, run_query_patch: - ip.run_cell_magic('bigquery', '', 'SELECT 17 as num') + ip.run_cell_magic("bigquery", "", "SELECT 17 as num") assert clear_mock.call_count == 1 -@pytest.mark.usefixtures('ipython_interactive') +@pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_with_project(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context._project = None credentials_mock = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) default_patch = mock.patch( - 'google.auth.default', - return_value=(credentials_mock, 'general-project')) + "google.auth.default", return_value=(credentials_mock, "general-project") + ) run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) with run_query_patch as run_query_mock, default_patch: - ip.run_cell_magic( - 'bigquery', '--project=specific-project', 'SELECT 17 as num') + ip.run_cell_magic("bigquery", "--project=specific-project", "SELECT 17 as num") client_used = run_query_mock.call_args_list[0][0][0] - assert client_used.project == 'specific-project' + assert client_used.project == "specific-project" # context project should not change - assert magics.context.project == 'general-project' + assert magics.context.project == "general-project" -@pytest.mark.usefixtures('ipython_interactive') -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_string_params(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) - sql = 'SELECT @num AS num' - result = pandas.DataFrame([17], columns=['num']) - assert 'params_string_df' not in ip.user_ns + sql = "SELECT @num AS num" + result = pandas.DataFrame([17], columns=["num"]) + assert "params_string_df" not in ip.user_ns run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True) + google.cloud.bigquery.job.QueryJob, instance=True + ) query_job_mock.to_dataframe.return_value = result with run_query_patch as run_query_mock: run_query_mock.return_value = query_job_mock - ip.run_cell_magic( - 'bigquery', 'params_string_df --params {"num":17}', sql) - run_query_mock.assert_called_once_with( - mock.ANY, sql.format(num=17), mock.ANY) + ip.run_cell_magic("bigquery", 'params_string_df --params {"num":17}', sql) + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) - assert 'params_string_df' in ip.user_ns # verify that the variable exists - df = ip.user_ns['params_string_df'] - assert len(df) == len(result) # verify row count - assert list(df) == list(result) # verify column names + assert "params_string_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_string_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names -@pytest.mark.usefixtures('ipython_interactive') -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_dict_params(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) - sql = 'SELECT @num AS num' - result = pandas.DataFrame([17], columns=['num']) - assert 'params_dict_df' not in ip.user_ns + sql = "SELECT @num AS num" + result = pandas.DataFrame([17], columns=["num"]) + assert "params_dict_df" not in ip.user_ns run_query_patch = mock.patch( - 'google.cloud.bigquery.magics._run_query', autospec=True) + "google.cloud.bigquery.magics._run_query", autospec=True + ) query_job_mock = mock.create_autospec( - google.cloud.bigquery.job.QueryJob, instance=True) + google.cloud.bigquery.job.QueryJob, instance=True + ) query_job_mock.to_dataframe.return_value = result with run_query_patch as run_query_mock: run_query_mock.return_value = query_job_mock params = {"num": 17} # Insert dictionary into user namespace so that it can be expanded - ip.user_ns['params'] = params - ip.run_cell_magic('bigquery', 'params_dict_df --params $params', sql) - run_query_mock.assert_called_once_with( - mock.ANY, sql.format(num=17), mock.ANY) + ip.user_ns["params"] = params + ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) - assert 'params_dict_df' in ip.user_ns # verify that the variable exists - df = ip.user_ns['params_dict_df'] - assert len(df) == len(result) # verify row count - assert list(df) == list(result) # verify column names + assert "params_dict_df" in ip.user_ns # verify that the variable exists + df = ip.user_ns["params_dict_df"] + assert len(df) == len(result) # verify row count + assert list(df) == list(result) # verify column names -@pytest.mark.usefixtures('ipython_interactive') -@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.usefixtures("ipython_interactive") +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_bigquery_magic_with_improperly_formatted_params(): ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') + ip.extension_manager.load_extension("google.cloud.bigquery") magics.context.credentials = mock.create_autospec( - google.auth.credentials.Credentials, instance=True) + google.auth.credentials.Credentials, instance=True + ) - sql = 'SELECT @num AS num' + sql = "SELECT @num AS num" with pytest.raises(SyntaxError): - ip.run_cell_magic( - 'bigquery', '--params {17}', sql) + ip.run_cell_magic("bigquery", "--params {17}", sql) diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index c262132f8e0c..f50335082349 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -19,7 +19,6 @@ class Test_UDFResource(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.query import UDFResource @@ -30,23 +29,21 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - udf = self._make_one('resourceUri', 'gs://some_bucket/some_file') - self.assertEqual(udf.udf_type, 'resourceUri') - self.assertEqual(udf.value, 'gs://some_bucket/some_file') + udf = self._make_one("resourceUri", "gs://some_bucket/some_file") + self.assertEqual(udf.udf_type, "resourceUri") + self.assertEqual(udf.value, "gs://some_bucket/some_file") def test___eq__(self): - udf = self._make_one('resourceUri', 'gs://some_bucket/some_file') + udf = self._make_one("resourceUri", "gs://some_bucket/some_file") self.assertEqual(udf, udf) self.assertNotEqual(udf, object()) - wrong_val = self._make_one( - 'resourceUri', 'gs://some_bucket/other_file') + wrong_val = self._make_one("resourceUri", "gs://some_bucket/other_file") self.assertNotEqual(udf, wrong_val) - wrong_type = self._make_one('inlineCode', udf.value) + wrong_type = self._make_one("inlineCode", udf.value) self.assertNotEqual(udf, wrong_type) class Test__AbstractQueryParameter(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.query import _AbstractQueryParameter @@ -68,7 +65,6 @@ def test_to_api_virtual(self): class Test_ScalarQueryParameter(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.query import ScalarQueryParameter @@ -79,141 +75,108 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - param = self._make_one(name='foo', type_='INT64', value=123) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.type_, 'INT64') + param = self._make_one(name="foo", type_="INT64", value=123) + self.assertEqual(param.name, "foo") + self.assertEqual(param.type_, "INT64") self.assertEqual(param.value, 123) def test___eq__(self): - param = self._make_one(name='foo', type_='INT64', value=123) + param = self._make_one(name="foo", type_="INT64", value=123) self.assertEqual(param, param) self.assertNotEqual(param, object()) - alias = self._make_one(name='bar', type_='INT64', value=123) + alias = self._make_one(name="bar", type_="INT64", value=123) self.assertNotEqual(param, alias) - wrong_type = self._make_one(name='foo', type_='FLOAT64', value=123.0) + wrong_type = self._make_one(name="foo", type_="FLOAT64", value=123.0) self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one(name='foo', type_='INT64', value=234) + wrong_val = self._make_one(name="foo", type_="INT64", value=234) self.assertNotEqual(param, wrong_val) def test_positional(self): klass = self._get_target_class() - param = klass.positional(type_='INT64', value=123) + param = klass.positional(type_="INT64", value=123) self.assertEqual(param.name, None) - self.assertEqual(param.type_, 'INT64') + self.assertEqual(param.type_, "INT64") self.assertEqual(param.value, 123) def test_from_api_repr_w_name(self): RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': 123, - }, + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": 123}, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.type_, 'INT64') + self.assertEqual(param.name, "foo") + self.assertEqual(param.type_, "INT64") self.assertEqual(param.value, 123) def test_from_api_repr_wo_name(self): RESOURCE = { - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': '123', - }, + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) self.assertEqual(param.name, None) - self.assertEqual(param.type_, 'INT64') + self.assertEqual(param.type_, "INT64") self.assertEqual(param.value, 123) def test_to_api_repr_w_name(self): EXPECTED = { - 'name': 'foo', - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': '123', - }, + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, } - param = self._make_one(name='foo', type_='INT64', value=123) + param = self._make_one(name="foo", type_="INT64", value=123) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_wo_name(self): EXPECTED = { - 'parameterType': { - 'type': 'INT64', - }, - 'parameterValue': { - 'value': '123', - }, + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, } klass = self._get_target_class() - param = klass.positional(type_='INT64', value=123) + param = klass.positional(type_="INT64", value=123) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_float(self): EXPECTED = { - 'parameterType': { - 'type': 'FLOAT64', - }, - 'parameterValue': { - 'value': 12.345, - }, + "parameterType": {"type": "FLOAT64"}, + "parameterValue": {"value": 12.345}, } klass = self._get_target_class() - param = klass.positional(type_='FLOAT64', value=12.345) + param = klass.positional(type_="FLOAT64", value=12.345) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_numeric(self): EXPECTED = { - 'parameterType': { - 'type': 'NUMERIC', - }, - 'parameterValue': { - 'value': '123456789.123456789', - }, + "parameterType": {"type": "NUMERIC"}, + "parameterValue": {"value": "123456789.123456789"}, } klass = self._get_target_class() - param = klass.positional(type_='NUMERIC', - value='123456789.123456789') + param = klass.positional(type_="NUMERIC", value="123456789.123456789") self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_bool(self): EXPECTED = { - 'parameterType': { - 'type': 'BOOL', - }, - 'parameterValue': { - 'value': 'false', - }, + "parameterType": {"type": "BOOL"}, + "parameterValue": {"value": "false"}, } klass = self._get_target_class() - param = klass.positional(type_='BOOL', value=False) + param = klass.positional(type_="BOOL", value=False) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_timestamp_datetime(self): from google.cloud._helpers import UTC - STAMP = '2016-12-20 15:58:27.339328+00:00' + STAMP = "2016-12-20 15:58:27.339328+00:00" when = datetime.datetime(2016, 12, 20, 15, 58, 27, 339328, tzinfo=UTC) EXPECTED = { - 'parameterType': { - 'type': 'TIMESTAMP', - }, - 'parameterValue': { - 'value': STAMP, - }, + "parameterType": {"type": "TIMESTAMP"}, + "parameterValue": {"value": STAMP}, } klass = self._get_target_class() - param = klass.positional(type_='TIMESTAMP', value=when) + param = klass.positional(type_="TIMESTAMP", value=when) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_timestamp_micros(self): @@ -222,15 +185,11 @@ def test_to_api_repr_w_timestamp_micros(self): now = datetime.datetime.utcnow() seconds = _microseconds_from_datetime(now) / 1.0e6 EXPECTED = { - 'parameterType': { - 'type': 'TIMESTAMP', - }, - 'parameterValue': { - 'value': seconds, - }, + "parameterType": {"type": "TIMESTAMP"}, + "parameterValue": {"value": seconds}, } klass = self._get_target_class() - param = klass.positional(type_='TIMESTAMP', value=seconds) + param = klass.positional(type_="TIMESTAMP", value=seconds) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_datetime_datetime(self): @@ -238,15 +197,13 @@ def test_to_api_repr_w_datetime_datetime(self): now = datetime.datetime.utcnow() EXPECTED = { - 'parameterType': { - 'type': 'DATETIME', - }, - 'parameterValue': { - 'value': _datetime_to_rfc3339(now)[:-1], # strip trailing 'Z' + "parameterType": {"type": "DATETIME"}, + "parameterValue": { + "value": _datetime_to_rfc3339(now)[:-1] # strip trailing 'Z' }, } klass = self._get_target_class() - param = klass.positional(type_='DATETIME', value=now) + param = klass.positional(type_="DATETIME", value=now) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_datetime_string(self): @@ -255,105 +212,89 @@ def test_to_api_repr_w_datetime_string(self): now = datetime.datetime.utcnow() now_str = _datetime_to_rfc3339(now) EXPECTED = { - 'parameterType': { - 'type': 'DATETIME', - }, - 'parameterValue': { - 'value': now_str, - }, + "parameterType": {"type": "DATETIME"}, + "parameterValue": {"value": now_str}, } klass = self._get_target_class() - param = klass.positional(type_='DATETIME', value=now_str) + param = klass.positional(type_="DATETIME", value=now_str) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_date_date(self): today = datetime.date.today() EXPECTED = { - 'parameterType': { - 'type': 'DATE', - }, - 'parameterValue': { - 'value': today.isoformat(), - }, + "parameterType": {"type": "DATE"}, + "parameterValue": {"value": today.isoformat()}, } klass = self._get_target_class() - param = klass.positional(type_='DATE', value=today) + param = klass.positional(type_="DATE", value=today) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_date_string(self): today = datetime.date.today() - today_str = today.isoformat(), + today_str = (today.isoformat(),) EXPECTED = { - 'parameterType': { - 'type': 'DATE', - }, - 'parameterValue': { - 'value': today_str, - }, + "parameterType": {"type": "DATE"}, + "parameterValue": {"value": today_str}, } klass = self._get_target_class() - param = klass.positional(type_='DATE', value=today_str) + param = klass.positional(type_="DATE", value=today_str) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_unknown_type(self): EXPECTED = { - 'parameterType': { - 'type': 'UNKNOWN', - }, - 'parameterValue': { - 'value': 'unknown', - }, + "parameterType": {"type": "UNKNOWN"}, + "parameterValue": {"value": "unknown"}, } klass = self._get_target_class() - param = klass.positional(type_='UNKNOWN', value='unknown') + param = klass.positional(type_="UNKNOWN", value="unknown") self.assertEqual(param.to_api_repr(), EXPECTED) def test___eq___wrong_type(self): - field = self._make_one('test', 'STRING', 'value') + field = self._make_one("test", "STRING", "value") other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___eq___name_mismatch(self): - field = self._make_one('test', 'STRING', 'value') - other = self._make_one('other', 'STRING', 'value') + field = self._make_one("test", "STRING", "value") + other = self._make_one("other", "STRING", "value") self.assertNotEqual(field, other) def test___eq___field_type_mismatch(self): - field = self._make_one('test', 'STRING', None) - other = self._make_one('test', 'INT64', None) + field = self._make_one("test", "STRING", None) + other = self._make_one("test", "INT64", None) self.assertNotEqual(field, other) def test___eq___value_mismatch(self): - field = self._make_one('test', 'STRING', 'hello') - other = self._make_one('test', 'STRING', 'world') + field = self._make_one("test", "STRING", "hello") + other = self._make_one("test", "STRING", "world") self.assertNotEqual(field, other) def test___eq___hit(self): - field = self._make_one('test', 'STRING', 'gotcha') - other = self._make_one('test', 'STRING', 'gotcha') + field = self._make_one("test", "STRING", "gotcha") + other = self._make_one("test", "STRING", "gotcha") self.assertEqual(field, other) def test___ne___wrong_type(self): - field = self._make_one('toast', 'INT64', 13) + field = self._make_one("toast", "INT64", 13) other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___ne___same_value(self): - field1 = self._make_one('test', 'INT64', 12) - field2 = self._make_one('test', 'INT64', 12) + field1 = self._make_one("test", "INT64", 12) + field2 = self._make_one("test", "INT64", 12) # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (field1 != field2) + comparison_val = field1 != field2 self.assertFalse(comparison_val) def test___ne___different_values(self): - field1 = self._make_one('test', 'INT64', 11) - field2 = self._make_one('test', 'INT64', 12) + field1 = self._make_one("test", "INT64", 11) + field2 = self._make_one("test", "INT64", 12) self.assertNotEqual(field1, field2) def test___repr__(self): - field1 = self._make_one('field1', 'STRING', 'value') + field1 = self._make_one("field1", "STRING", "value") expected = "ScalarQueryParameter('field1', 'STRING', 'value')" self.assertEqual(repr(field1), expected) @@ -365,7 +306,6 @@ def _make_subparam(name, type_, value): class Test_ArrayQueryParameter(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.query import ArrayQueryParameter @@ -376,119 +316,81 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - param = self._make_one(name='foo', array_type='INT64', values=[1, 2]) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.array_type, 'INT64') + param = self._make_one(name="foo", array_type="INT64", values=[1, 2]) + self.assertEqual(param.name, "foo") + self.assertEqual(param.array_type, "INT64") self.assertEqual(param.values, [1, 2]) def test___eq__(self): - param = self._make_one(name='foo', array_type='INT64', values=[123]) + param = self._make_one(name="foo", array_type="INT64", values=[123]) self.assertEqual(param, param) self.assertNotEqual(param, object()) - alias = self._make_one(name='bar', array_type='INT64', values=[123]) + alias = self._make_one(name="bar", array_type="INT64", values=[123]) self.assertNotEqual(param, alias) - wrong_type = self._make_one( - name='foo', array_type='FLOAT64', values=[123.0]) + wrong_type = self._make_one(name="foo", array_type="FLOAT64", values=[123.0]) self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one( - name='foo', array_type='INT64', values=[234]) + wrong_val = self._make_one(name="foo", array_type="INT64", values=[234]) self.assertNotEqual(param, wrong_val) def test_positional(self): klass = self._get_target_class() - param = klass.positional(array_type='INT64', values=[1, 2]) + param = klass.positional(array_type="INT64", values=[1, 2]) self.assertEqual(param.name, None) - self.assertEqual(param.array_type, 'INT64') + self.assertEqual(param.array_type, "INT64") self.assertEqual(param.values, [1, 2]) def test_from_api_repr_w_name(self): RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'INT64', - }, - }, - 'parameterValue': { - 'arrayValues': [ - { - 'value': '1', - }, - { - 'value': '2' - }, - ], - }, + "name": "foo", + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.array_type, 'INT64') + self.assertEqual(param.name, "foo") + self.assertEqual(param.array_type, "INT64") self.assertEqual(param.values, [1, 2]) def test_from_api_repr_wo_name(self): RESOURCE = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'INT64', - }, - }, - 'parameterValue': { - 'arrayValues': [ - { - 'value': '1', - }, - { - 'value': '2' - }, - ], - }, + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) self.assertEqual(param.name, None) - self.assertEqual(param.array_type, 'INT64') + self.assertEqual(param.array_type, "INT64") self.assertEqual(param.values, [1, 2]) def test_from_api_repr_w_struct_type(self): from google.cloud.bigquery.query import StructQueryParameter RESOURCE = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'STRUCT', - 'structTypes': [ - { - 'name': 'name', - 'type': {'type': 'STRING'}, - }, - { - 'name': 'age', - 'type': {'type': 'INT64'}, - }, + "parameterType": { + "type": "ARRAY", + "arrayType": { + "type": "STRUCT", + "structTypes": [ + {"name": "name", "type": {"type": "STRING"}}, + {"name": "age", "type": {"type": "INT64"}}, ], }, }, - 'parameterValue': { - 'arrayValues': [ + "parameterValue": { + "arrayValues": [ { - 'structValues': { - 'name': {'value': 'Phred Phlyntstone'}, - 'age': {'value': '32'}, - }, + "structValues": { + "name": {"value": "Phred Phlyntstone"}, + "age": {"value": "32"}, + } }, { - 'structValues': { - 'name': { - 'value': 'Bharney Rhubbyl', - }, - 'age': {'value': '31'}, - }, + "structValues": { + "name": {"value": "Bharney Rhubbyl"}, + "age": {"value": "31"}, + } }, - ], + ] }, } @@ -496,162 +398,121 @@ def test_from_api_repr_w_struct_type(self): param = klass.from_api_repr(RESOURCE) phred = StructQueryParameter.positional( - _make_subparam('name', 'STRING', 'Phred Phlyntstone'), - _make_subparam('age', 'INT64', 32)) + _make_subparam("name", "STRING", "Phred Phlyntstone"), + _make_subparam("age", "INT64", 32), + ) bharney = StructQueryParameter.positional( - _make_subparam('name', 'STRING', 'Bharney Rhubbyl'), - _make_subparam('age', 'INT64', 31)) - self.assertEqual(param.array_type, 'STRUCT') + _make_subparam("name", "STRING", "Bharney Rhubbyl"), + _make_subparam("age", "INT64", 31), + ) + self.assertEqual(param.array_type, "STRUCT") self.assertEqual(param.values, [phred, bharney]) def test_to_api_repr_w_name(self): EXPECTED = { - 'name': 'foo', - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'INT64', - }, - }, - 'parameterValue': { - 'arrayValues': [ - { - 'value': '1', - }, - { - 'value': '2' - }, - ], - }, + "name": "foo", + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, } - param = self._make_one(name='foo', array_type='INT64', values=[1, 2]) + param = self._make_one(name="foo", array_type="INT64", values=[1, 2]) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_wo_name(self): EXPECTED = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'INT64', - }, - }, - 'parameterValue': { - 'arrayValues': [ - { - 'value': '1', - }, - { - 'value': '2' - }, - ], - }, + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": {"arrayValues": [{"value": "1"}, {"value": "2"}]}, } klass = self._get_target_class() - param = klass.positional(array_type='INT64', values=[1, 2]) + param = klass.positional(array_type="INT64", values=[1, 2]) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_unknown_type(self): EXPECTED = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'UNKNOWN', - }, - }, - 'parameterValue': { - 'arrayValues': [ - { - 'value': 'unknown', - } - ], - }, + "parameterType": {"type": "ARRAY", "arrayType": {"type": "UNKNOWN"}}, + "parameterValue": {"arrayValues": [{"value": "unknown"}]}, } klass = self._get_target_class() - param = klass.positional(array_type='UNKNOWN', values=['unknown']) + param = klass.positional(array_type="UNKNOWN", values=["unknown"]) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_record_type(self): from google.cloud.bigquery.query import StructQueryParameter EXPECTED = { - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'foo', 'type': {'type': 'STRING'}}, - {'name': 'bar', 'type': {'type': 'INT64'}}, + "parameterType": { + "type": "ARRAY", + "arrayType": { + "type": "STRUCT", + "structTypes": [ + {"name": "foo", "type": {"type": "STRING"}}, + {"name": "bar", "type": {"type": "INT64"}}, ], }, }, - 'parameterValue': { - 'arrayValues': [{ - 'structValues': { - 'foo': {'value': 'Foo'}, - 'bar': {'value': '123'}, - } - }] + "parameterValue": { + "arrayValues": [ + {"structValues": {"foo": {"value": "Foo"}, "bar": {"value": "123"}}} + ] }, } - one = _make_subparam('foo', 'STRING', 'Foo') - another = _make_subparam('bar', 'INT64', 123) + one = _make_subparam("foo", "STRING", "Foo") + another = _make_subparam("bar", "INT64", 123) struct = StructQueryParameter.positional(one, another) klass = self._get_target_class() - param = klass.positional(array_type='RECORD', values=[struct]) + param = klass.positional(array_type="RECORD", values=[struct]) self.assertEqual(param.to_api_repr(), EXPECTED) def test___eq___wrong_type(self): - field = self._make_one('test', 'STRING', ['value']) + field = self._make_one("test", "STRING", ["value"]) other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___eq___name_mismatch(self): - field = self._make_one('field', 'STRING', ['value']) - other = self._make_one('other', 'STRING', ['value']) + field = self._make_one("field", "STRING", ["value"]) + other = self._make_one("other", "STRING", ["value"]) self.assertNotEqual(field, other) def test___eq___field_type_mismatch(self): - field = self._make_one('test', 'STRING', []) - other = self._make_one('test', 'INT64', []) + field = self._make_one("test", "STRING", []) + other = self._make_one("test", "INT64", []) self.assertNotEqual(field, other) def test___eq___value_mismatch(self): - field = self._make_one('test', 'STRING', ['hello']) - other = self._make_one('test', 'STRING', ['hello', 'world']) + field = self._make_one("test", "STRING", ["hello"]) + other = self._make_one("test", "STRING", ["hello", "world"]) self.assertNotEqual(field, other) def test___eq___hit(self): - field = self._make_one('test', 'STRING', ['gotcha']) - other = self._make_one('test', 'STRING', ['gotcha']) + field = self._make_one("test", "STRING", ["gotcha"]) + other = self._make_one("test", "STRING", ["gotcha"]) self.assertEqual(field, other) def test___ne___wrong_type(self): - field = self._make_one('toast', 'INT64', [13]) + field = self._make_one("toast", "INT64", [13]) other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___ne___same_value(self): - field1 = self._make_one('test', 'INT64', [12]) - field2 = self._make_one('test', 'INT64', [12]) + field1 = self._make_one("test", "INT64", [12]) + field2 = self._make_one("test", "INT64", [12]) # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (field1 != field2) + comparison_val = field1 != field2 self.assertFalse(comparison_val) def test___ne___different_values(self): - field1 = self._make_one('test', 'INT64', [11]) - field2 = self._make_one('test', 'INT64', [12]) + field1 = self._make_one("test", "INT64", [11]) + field2 = self._make_one("test", "INT64", [12]) self.assertNotEqual(field1, field2) def test___repr__(self): - field1 = self._make_one('field1', 'STRING', ['value']) + field1 = self._make_one("field1", "STRING", ["value"]) expected = "ArrayQueryParameter('field1', 'STRING', ['value'])" self.assertEqual(repr(field1), expected) class Test_StructQueryParameter(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.query import StructQueryParameter @@ -662,105 +523,96 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - sub_1 = _make_subparam('bar', 'INT64', 123) - sub_2 = _make_subparam('baz', 'STRING', 'abc') - param = self._make_one('foo', sub_1, sub_2) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.struct_types, {'bar': 'INT64', 'baz': 'STRING'}) - self.assertEqual(param.struct_values, {'bar': 123, 'baz': 'abc'}) + sub_1 = _make_subparam("bar", "INT64", 123) + sub_2 = _make_subparam("baz", "STRING", "abc") + param = self._make_one("foo", sub_1, sub_2) + self.assertEqual(param.name, "foo") + self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) + self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) def test___eq__(self): - sub_1 = _make_subparam('bar', 'INT64', 123) - sub_2 = _make_subparam('baz', 'STRING', 'abc') - sub_3 = _make_subparam('baz', 'STRING', 'def') - sub_1_float = _make_subparam('bar', 'FLOAT64', 123.0) - param = self._make_one('foo', sub_1, sub_2) + sub_1 = _make_subparam("bar", "INT64", 123) + sub_2 = _make_subparam("baz", "STRING", "abc") + sub_3 = _make_subparam("baz", "STRING", "def") + sub_1_float = _make_subparam("bar", "FLOAT64", 123.0) + param = self._make_one("foo", sub_1, sub_2) self.assertEqual(param, param) self.assertNotEqual(param, object()) - alias = self._make_one('bar', sub_1, sub_2) + alias = self._make_one("bar", sub_1, sub_2) self.assertNotEqual(param, alias) - wrong_type = self._make_one('foo', sub_1_float, sub_2) + wrong_type = self._make_one("foo", sub_1_float, sub_2) self.assertNotEqual(param, wrong_type) - wrong_val = self._make_one('foo', sub_2, sub_3) + wrong_val = self._make_one("foo", sub_2, sub_3) self.assertNotEqual(param, wrong_val) def test_positional(self): - sub_1 = _make_subparam('bar', 'INT64', 123) - sub_2 = _make_subparam('baz', 'STRING', 'abc') + sub_1 = _make_subparam("bar", "INT64", 123) + sub_2 = _make_subparam("baz", "STRING", "abc") klass = self._get_target_class() param = klass.positional(sub_1, sub_2) self.assertEqual(param.name, None) - self.assertEqual(param.struct_types, {'bar': 'INT64', 'baz': 'STRING'}) - self.assertEqual(param.struct_values, {'bar': 123, 'baz': 'abc'}) + self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) + self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) def test_from_api_repr_w_name(self): RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'INT64'}}, - {'name': 'baz', 'type': {'type': 'STRING'}}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "INT64"}}, + {"name": "baz", "type": {"type": "STRING"}}, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 123}, - 'baz': {'value': 'abc'}, - }, + "parameterValue": { + "structValues": {"bar": {"value": 123}, "baz": {"value": "abc"}} }, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) - self.assertEqual(param.name, 'foo') - self.assertEqual(param.struct_types, {'bar': 'INT64', 'baz': 'STRING'}) - self.assertEqual(param.struct_values, {'bar': 123, 'baz': 'abc'}) + self.assertEqual(param.name, "foo") + self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) + self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) def test_from_api_repr_wo_name(self): RESOURCE = { - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'INT64'}}, - {'name': 'baz', 'type': {'type': 'STRING'}}, + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "INT64"}}, + {"name": "baz", "type": {"type": "STRING"}}, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 123}, - 'baz': {'value': 'abc'}, - }, + "parameterValue": { + "structValues": {"bar": {"value": 123}, "baz": {"value": "abc"}} }, } klass = self._get_target_class() param = klass.from_api_repr(RESOURCE) self.assertEqual(param.name, None) - self.assertEqual(param.struct_types, {'bar': 'INT64', 'baz': 'STRING'}) - self.assertEqual(param.struct_values, {'bar': 123, 'baz': 'abc'}) + self.assertEqual(param.struct_types, {"bar": "INT64", "baz": "STRING"}) + self.assertEqual(param.struct_values, {"bar": 123, "baz": "abc"}) def test_from_api_repr_w_nested_array(self): from google.cloud.bigquery.query import ArrayQueryParameter RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'STRING'}}, - {'name': 'baz', 'type': { - 'type': 'ARRAY', - 'arrayType': {'type': 'INT64'}, - }}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "STRING"}}, + { + "name": "baz", + "type": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + }, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 'abc'}, - 'baz': {'arrayValues': [ - {'value': '123'}, - {'value': '456'}, - ]}, - }, + "parameterValue": { + "structValues": { + "bar": {"value": "abc"}, + "baz": {"arrayValues": [{"value": "123"}, {"value": "456"}]}, + } }, } klass = self._get_target_class() @@ -768,34 +620,41 @@ def test_from_api_repr_w_nested_array(self): self.assertEqual( param, self._make_one( - 'foo', - _make_subparam('bar', 'STRING', 'abc'), - ArrayQueryParameter('baz', 'INT64', [123, 456]))) + "foo", + _make_subparam("bar", "STRING", "abc"), + ArrayQueryParameter("baz", "INT64", [123, 456]), + ), + ) def test_from_api_repr_w_nested_struct(self): RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'STRING'}}, - {'name': 'baz', 'type': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'qux', 'type': {'type': 'INT64'}}, - {'name': 'spam', 'type': {'type': 'BOOL'}}, - ], - }}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "STRING"}}, + { + "name": "baz", + "type": { + "type": "STRUCT", + "structTypes": [ + {"name": "qux", "type": {"type": "INT64"}}, + {"name": "spam", "type": {"type": "BOOL"}}, + ], + }, + }, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 'abc'}, - 'baz': {'structValues': { - 'qux': {'value': '123'}, - 'spam': {'value': 'true'}, - }}, - }, + "parameterValue": { + "structValues": { + "bar": {"value": "abc"}, + "baz": { + "structValues": { + "qux": {"value": "123"}, + "spam": {"value": "true"}, + } + }, + } }, } @@ -803,56 +662,52 @@ def test_from_api_repr_w_nested_struct(self): param = klass.from_api_repr(RESOURCE) expected = self._make_one( - 'foo', - _make_subparam('bar', 'STRING', 'abc'), + "foo", + _make_subparam("bar", "STRING", "abc"), self._make_one( - 'baz', - _make_subparam('qux', 'INT64', 123), - _make_subparam('spam', 'BOOL', True))) - self.assertEqual(param.name, 'foo') + "baz", + _make_subparam("qux", "INT64", 123), + _make_subparam("spam", "BOOL", True), + ), + ) + self.assertEqual(param.name, "foo") self.assertEqual(param.struct_types, expected.struct_types) self.assertEqual(param.struct_values, expected.struct_values) def test_to_api_repr_w_name(self): EXPECTED = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'INT64'}}, - {'name': 'baz', 'type': {'type': 'STRING'}}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "INT64"}}, + {"name": "baz", "type": {"type": "STRING"}}, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': '123'}, - 'baz': {'value': 'abc'}, - }, + "parameterValue": { + "structValues": {"bar": {"value": "123"}, "baz": {"value": "abc"}} }, } - sub_1 = _make_subparam('bar', 'INT64', 123) - sub_2 = _make_subparam('baz', 'STRING', 'abc') - param = self._make_one('foo', sub_1, sub_2) + sub_1 = _make_subparam("bar", "INT64", 123) + sub_2 = _make_subparam("baz", "STRING", "abc") + param = self._make_one("foo", sub_1, sub_2) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_wo_name(self): EXPECTED = { - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'INT64'}}, - {'name': 'baz', 'type': {'type': 'STRING'}}, + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "INT64"}}, + {"name": "baz", "type": {"type": "STRING"}}, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': '123'}, - 'baz': {'value': 'abc'}, - }, + "parameterValue": { + "structValues": {"bar": {"value": "123"}, "baz": {"value": "abc"}} }, } - sub_1 = _make_subparam('bar', 'INT64', 123) - sub_2 = _make_subparam('baz', 'STRING', 'abc') + sub_1 = _make_subparam("bar", "INT64", 123) + sub_2 = _make_subparam("baz", "STRING", "abc") klass = self._get_target_class() param = klass.positional(sub_1, sub_2) self.assertEqual(param.to_api_repr(), EXPECTED) @@ -861,136 +716,123 @@ def test_to_api_repr_w_nested_array(self): from google.cloud.bigquery.query import ArrayQueryParameter EXPECTED = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'STRING'}}, - {'name': 'baz', 'type': { - 'type': 'ARRAY', - 'arrayType': {'type': 'INT64'}, - }}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "STRING"}}, + { + "name": "baz", + "type": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + }, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 'abc'}, - 'baz': {'arrayValues': [ - {'value': '123'}, - {'value': '456'}, - ]}, - }, + "parameterValue": { + "structValues": { + "bar": {"value": "abc"}, + "baz": {"arrayValues": [{"value": "123"}, {"value": "456"}]}, + } }, } - scalar = _make_subparam('bar', 'STRING', 'abc') - array = ArrayQueryParameter('baz', 'INT64', [123, 456]) - param = self._make_one('foo', scalar, array) + scalar = _make_subparam("bar", "STRING", "abc") + array = ArrayQueryParameter("baz", "INT64", [123, 456]) + param = self._make_one("foo", scalar, array) self.assertEqual(param.to_api_repr(), EXPECTED) def test_to_api_repr_w_nested_struct(self): EXPECTED = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'bar', 'type': {'type': 'STRING'}}, - {'name': 'baz', 'type': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'qux', 'type': {'type': 'INT64'}}, - {'name': 'spam', 'type': {'type': 'BOOL'}}, - ], - }}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "bar", "type": {"type": "STRING"}}, + { + "name": "baz", + "type": { + "type": "STRUCT", + "structTypes": [ + {"name": "qux", "type": {"type": "INT64"}}, + {"name": "spam", "type": {"type": "BOOL"}}, + ], + }, + }, ], }, - 'parameterValue': { - 'structValues': { - 'bar': {'value': 'abc'}, - 'baz': {'structValues': { - 'qux': {'value': '123'}, - 'spam': {'value': 'true'}, - }}, - }, + "parameterValue": { + "structValues": { + "bar": {"value": "abc"}, + "baz": { + "structValues": { + "qux": {"value": "123"}, + "spam": {"value": "true"}, + } + }, + } }, } - scalar_1 = _make_subparam('bar', 'STRING', 'abc') - scalar_2 = _make_subparam('qux', 'INT64', 123) - scalar_3 = _make_subparam('spam', 'BOOL', True) - sub = self._make_one('baz', scalar_2, scalar_3) - param = self._make_one('foo', scalar_1, sub) + scalar_1 = _make_subparam("bar", "STRING", "abc") + scalar_2 = _make_subparam("qux", "INT64", 123) + scalar_3 = _make_subparam("spam", "BOOL", True) + sub = self._make_one("baz", scalar_2, scalar_3) + param = self._make_one("foo", scalar_1, sub) self.assertEqual(param.to_api_repr(), EXPECTED) def test___eq___wrong_type(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'abc')) + field = self._make_one("test", _make_subparam("bar", "STRING", "abc")) other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___eq___name_mismatch(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'abc')) - other = self._make_one( - 'other ', _make_subparam('bar', 'STRING', 'abc')) + field = self._make_one("test", _make_subparam("bar", "STRING", "abc")) + other = self._make_one("other ", _make_subparam("bar", "STRING", "abc")) self.assertNotEqual(field, other) def test___eq___field_type_mismatch(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', None)) - other = self._make_one( - 'test', _make_subparam('bar', 'INT64', None)) + field = self._make_one("test", _make_subparam("bar", "STRING", None)) + other = self._make_one("test", _make_subparam("bar", "INT64", None)) self.assertNotEqual(field, other) def test___eq___value_mismatch(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'hello')) - other = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'world')) + field = self._make_one("test", _make_subparam("bar", "STRING", "hello")) + other = self._make_one("test", _make_subparam("bar", "STRING", "world")) self.assertNotEqual(field, other) def test___eq___hit(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'gotcha')) - other = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'gotcha')) + field = self._make_one("test", _make_subparam("bar", "STRING", "gotcha")) + other = self._make_one("test", _make_subparam("bar", "STRING", "gotcha")) self.assertEqual(field, other) def test___ne___wrong_type(self): - field = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'hello')) + field = self._make_one("test", _make_subparam("bar", "STRING", "hello")) other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___ne___same_value(self): - field1 = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'hello')) - field2 = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'hello')) + field1 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) + field2 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (field1 != field2) + comparison_val = field1 != field2 self.assertFalse(comparison_val) def test___ne___different_values(self): - field1 = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'hello')) - field2 = self._make_one( - 'test', _make_subparam('bar', 'STRING', 'world')) + field1 = self._make_one("test", _make_subparam("bar", "STRING", "hello")) + field2 = self._make_one("test", _make_subparam("bar", "STRING", "world")) self.assertNotEqual(field1, field2) def test___repr__(self): - field1 = self._make_one( - 'test', _make_subparam('field1', 'STRING', 'hello')) + field1 = self._make_one("test", _make_subparam("field1", "STRING", "hello")) got = repr(field1) - self.assertIn('StructQueryParameter', got) + self.assertIn("StructQueryParameter", got) self.assertIn("'field1', 'STRING'", got) self.assertIn("'field1': 'hello'", got) class Test_QueryResults(unittest.TestCase): - PROJECT = 'project' - JOB_ID = 'test-synchronous-query' - TOKEN = 'TOKEN' + PROJECT = "project" + JOB_ID = "test-synchronous-query" + TOKEN = "TOKEN" @staticmethod def _get_target_class(): @@ -1002,27 +844,21 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _make_resource(self): - return { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - } + return {"jobReference": {"projectId": self.PROJECT, "jobId": self.JOB_ID}} def _verifySchema(self, query, resource): from google.cloud.bigquery.schema import SchemaField - if 'schema' in resource: - fields = resource['schema']['fields'] + if "schema" in resource: + fields = resource["schema"]["fields"] self.assertEqual(len(query.schema), len(fields)) for found, expected in zip(query.schema, fields): self.assertIsInstance(found, SchemaField) - self.assertEqual(found.name, expected['name']) - self.assertEqual(found.field_type, expected['type']) - self.assertEqual(found.mode, expected['mode']) - self.assertEqual(found.description, - expected.get('description')) - self.assertEqual(found.fields, expected.get('fields', ())) + self.assertEqual(found.name, expected["name"]) + self.assertEqual(found.field_type, expected["type"]) + self.assertEqual(found.mode, expected["mode"]) + self.assertEqual(found.description, expected.get("description")) + self.assertEqual(found.fields, expected.get("fields", ())) else: self.assertEqual(query.schema, ()) @@ -1044,7 +880,7 @@ def test_cache_hit_missing(self): def test_cache_hit_present(self): resource = self._make_resource() - resource['cacheHit'] = True + resource["cacheHit"] = True query = self._make_one(resource) self.assertTrue(query.cache_hit) @@ -1054,7 +890,7 @@ def test_complete_missing(self): def test_complete_present(self): resource = self._make_resource() - resource['jobComplete'] = True + resource["jobComplete"] = True query = self._make_one(resource) self.assertTrue(query.complete) @@ -1063,11 +899,9 @@ def test_errors_missing(self): self.assertIsNone(query.errors) def test_errors_present(self): - ERRORS = [ - {'reason': 'testing'}, - ] + ERRORS = [{"reason": "testing"}] resource = self._make_resource() - resource['errors'] = ERRORS + resource["errors"] = ERRORS query = self._make_one(resource) self.assertEqual(query.errors, ERRORS) @@ -1076,15 +910,15 @@ def test_job_id_missing(self): self._make_one({}) def test_job_id_broken_job_reference(self): - resource = {'jobReference': {'bogus': 'BOGUS'}} + resource = {"jobReference": {"bogus": "BOGUS"}} with self.assertRaises(ValueError): self._make_one(resource) def test_job_id_present(self): resource = self._make_resource() - resource['jobReference']['jobId'] = 'custom-job' + resource["jobReference"]["jobId"] = "custom-job" query = self._make_one(resource) - self.assertEqual(query.job_id, 'custom-job') + self.assertEqual(query.job_id, "custom-job") def test_page_token_missing(self): query = self._make_one(self._make_resource()) @@ -1092,19 +926,19 @@ def test_page_token_missing(self): def test_page_token_present(self): resource = self._make_resource() - resource['pageToken'] = 'TOKEN' + resource["pageToken"] = "TOKEN" query = self._make_one(resource) - self.assertEqual(query.page_token, 'TOKEN') + self.assertEqual(query.page_token, "TOKEN") def test_total_rows_present_integer(self): resource = self._make_resource() - resource['totalRows'] = 42 + resource["totalRows"] = 42 query = self._make_one(resource) self.assertEqual(query.total_rows, 42) def test_total_rows_present_string(self): resource = self._make_resource() - resource['totalRows'] = '42' + resource["totalRows"] = "42" query = self._make_one(resource) self.assertEqual(query.total_rows, 42) @@ -1114,13 +948,13 @@ def test_total_bytes_processed_missing(self): def test_total_bytes_processed_present_integer(self): resource = self._make_resource() - resource['totalBytesProcessed'] = 123456 + resource["totalBytesProcessed"] = 123456 query = self._make_one(resource) self.assertEqual(query.total_bytes_processed, 123456) def test_total_bytes_processed_present_string(self): resource = self._make_resource() - resource['totalBytesProcessed'] = '123456' + resource["totalBytesProcessed"] = "123456" query = self._make_one(resource) self.assertEqual(query.total_bytes_processed, 123456) @@ -1130,13 +964,13 @@ def test_num_dml_affected_rows_missing(self): def test_num_dml_affected_rows_present_integer(self): resource = self._make_resource() - resource['numDmlAffectedRows'] = 123456 + resource["numDmlAffectedRows"] = 123456 query = self._make_one(resource) self.assertEqual(query.num_dml_affected_rows, 123456) def test_num_dml_affected_rows_present_string(self): resource = self._make_resource() - resource['numDmlAffectedRows'] = '123456' + resource["numDmlAffectedRows"] = "123456" query = self._make_one(resource) self.assertEqual(query.num_dml_affected_rows, 123456) @@ -1144,18 +978,17 @@ def test_schema(self): query = self._make_one(self._make_resource()) self._verifySchema(query, self._make_resource()) resource = self._make_resource() - resource['schema'] = { - 'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQURED'}, - ], + resource["schema"] = { + "fields": [ + {"name": "full_name", "type": "STRING", "mode": "REQURED"}, + {"name": "age", "type": "INTEGER", "mode": "REQURED"}, + ] } query._set_properties(resource) self._verifySchema(query, resource) class Test__query_param_from_api_repr(unittest.TestCase): - @staticmethod def _call_fut(resource): from google.cloud.bigquery.query import _query_param_from_api_repr @@ -1166,16 +999,16 @@ def test_w_scalar(self): from google.cloud.bigquery.query import ScalarQueryParameter RESOURCE = { - 'name': 'foo', - 'parameterType': {'type': 'INT64'}, - 'parameterValue': {'value': '123'}, + "name": "foo", + "parameterType": {"type": "INT64"}, + "parameterValue": {"value": "123"}, } parameter = self._call_fut(RESOURCE) self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, 'foo') - self.assertEqual(parameter.type_, 'INT64') + self.assertEqual(parameter.name, "foo") + self.assertEqual(parameter.type_, "INT64") self.assertEqual(parameter.value, 123) def test_w_scalar_timestamp(self): @@ -1183,85 +1016,75 @@ def test_w_scalar_timestamp(self): from google.cloud.bigquery.query import ScalarQueryParameter RESOURCE = { - 'name': 'zoned', - 'parameterType': {'type': 'TIMESTAMP'}, - 'parameterValue': {'value': '2012-03-04 05:06:07+00:00'}, + "name": "zoned", + "parameterType": {"type": "TIMESTAMP"}, + "parameterValue": {"value": "2012-03-04 05:06:07+00:00"}, } parameter = self._call_fut(RESOURCE) self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, 'zoned') - self.assertEqual(parameter.type_, 'TIMESTAMP') + self.assertEqual(parameter.name, "zoned") + self.assertEqual(parameter.type_, "TIMESTAMP") self.assertEqual( - parameter.value, - datetime.datetime(2012, 3, 4, 5, 6, 7, tzinfo=UTC)) + parameter.value, datetime.datetime(2012, 3, 4, 5, 6, 7, tzinfo=UTC) + ) def test_w_scalar_timestamp_micros(self): from google.cloud._helpers import UTC from google.cloud.bigquery.query import ScalarQueryParameter RESOURCE = { - 'name': 'zoned', - 'parameterType': {'type': 'TIMESTAMP'}, - 'parameterValue': {'value': '2012-03-04 05:06:07.250000+00:00'}, + "name": "zoned", + "parameterType": {"type": "TIMESTAMP"}, + "parameterValue": {"value": "2012-03-04 05:06:07.250000+00:00"}, } parameter = self._call_fut(RESOURCE) self.assertIsInstance(parameter, ScalarQueryParameter) - self.assertEqual(parameter.name, 'zoned') - self.assertEqual(parameter.type_, 'TIMESTAMP') + self.assertEqual(parameter.name, "zoned") + self.assertEqual(parameter.type_, "TIMESTAMP") self.assertEqual( - parameter.value, - datetime.datetime(2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC)) + parameter.value, datetime.datetime(2012, 3, 4, 5, 6, 7, 250000, tzinfo=UTC) + ) def test_w_array(self): from google.cloud.bigquery.query import ArrayQueryParameter RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'ARRAY', - 'arrayType': {'type': 'INT64'}, - }, - 'parameterValue': { - 'arrayValues': [ - {'value': '123'}, - ]}, + "name": "foo", + "parameterType": {"type": "ARRAY", "arrayType": {"type": "INT64"}}, + "parameterValue": {"arrayValues": [{"value": "123"}]}, } parameter = self._call_fut(RESOURCE) self.assertIsInstance(parameter, ArrayQueryParameter) - self.assertEqual(parameter.name, 'foo') - self.assertEqual(parameter.array_type, 'INT64') + self.assertEqual(parameter.name, "foo") + self.assertEqual(parameter.array_type, "INT64") self.assertEqual(parameter.values, [123]) def test_w_struct(self): from google.cloud.bigquery.query import StructQueryParameter RESOURCE = { - 'name': 'foo', - 'parameterType': { - 'type': 'STRUCT', - 'structTypes': [ - {'name': 'foo', 'type': {'type': 'STRING'}}, - {'name': 'bar', 'type': {'type': 'INT64'}}, + "name": "foo", + "parameterType": { + "type": "STRUCT", + "structTypes": [ + {"name": "foo", "type": {"type": "STRING"}}, + {"name": "bar", "type": {"type": "INT64"}}, ], }, - 'parameterValue': { - 'structValues': { - 'foo': {'value': 'Foo'}, - 'bar': {'value': '123'}, - } + "parameterValue": { + "structValues": {"foo": {"value": "Foo"}, "bar": {"value": "123"}} }, } parameter = self._call_fut(RESOURCE) self.assertIsInstance(parameter, StructQueryParameter) - self.assertEqual(parameter.name, 'foo') - self.assertEqual( - parameter.struct_types, {'foo': 'STRING', 'bar': 'INT64'}) - self.assertEqual(parameter.struct_values, {'foo': 'Foo', 'bar': 123}) + self.assertEqual(parameter.name, "foo") + self.assertEqual(parameter.struct_types, {"foo": "STRING", "bar": "INT64"}) + self.assertEqual(parameter.struct_values, {"foo": "Foo", "bar": 123}) diff --git a/bigquery/tests/unit/test_retry.py b/bigquery/tests/unit/test_retry.py index 2b9f77cb4162..d9f867cb30f7 100644 --- a/bigquery/tests/unit/test_retry.py +++ b/bigquery/tests/unit/test_retry.py @@ -1,4 +1,3 @@ - # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +18,6 @@ class Test_should_retry(unittest.TestCase): - def _call_fut(self, exc): from google.cloud.bigquery.retry import _should_retry @@ -29,48 +27,43 @@ def test_wo_errors_attribute(self): self.assertFalse(self._call_fut(object())) def test_w_empty_errors(self): - exc = mock.Mock(errors=[], spec=['errors']) + exc = mock.Mock(errors=[], spec=["errors"]) self.assertFalse(self._call_fut(exc)) def test_w_non_matching_reason(self): - exc = mock.Mock( - errors=[{'reason': 'bogus'}], spec=['errors']) + exc = mock.Mock(errors=[{"reason": "bogus"}], spec=["errors"]) self.assertFalse(self._call_fut(exc)) def test_w_backendError(self): - exc = mock.Mock( - errors=[{'reason': 'backendError'}], spec=['errors']) + exc = mock.Mock(errors=[{"reason": "backendError"}], spec=["errors"]) self.assertTrue(self._call_fut(exc)) def test_w_rateLimitExceeded(self): - exc = mock.Mock( - errors=[{'reason': 'rateLimitExceeded'}], spec=['errors']) + exc = mock.Mock(errors=[{"reason": "rateLimitExceeded"}], spec=["errors"]) self.assertTrue(self._call_fut(exc)) def test_w_unstructured_too_many_requests(self): from google.api_core.exceptions import TooManyRequests - exc = TooManyRequests('testing') + exc = TooManyRequests("testing") self.assertTrue(self._call_fut(exc)) def test_w_internalError(self): - exc = mock.Mock( - errors=[{'reason': 'internalError'}], spec=['errors']) + exc = mock.Mock(errors=[{"reason": "internalError"}], spec=["errors"]) self.assertTrue(self._call_fut(exc)) def test_w_unstructured_internal_server_error(self): from google.api_core.exceptions import InternalServerError - exc = InternalServerError('testing') + exc = InternalServerError("testing") self.assertTrue(self._call_fut(exc)) def test_w_badGateway(self): - exc = mock.Mock( - errors=[{'reason': 'badGateway'}], spec=['errors']) + exc = mock.Mock(errors=[{"reason": "badGateway"}], spec=["errors"]) self.assertTrue(self._call_fut(exc)) def test_w_unstructured_bad_gateway(self): from google.api_core.exceptions import BadGateway - exc = BadGateway('testing') + exc = BadGateway("testing") self.assertTrue(self._call_fut(exc)) diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 6be6abeb56dc..4694aaf63cd8 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -18,7 +18,6 @@ class TestSchemaField(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.schema import SchemaField @@ -29,241 +28,234 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor_defaults(self): - field = self._make_one('test', 'STRING') - self.assertEqual(field._name, 'test') - self.assertEqual(field._field_type, 'STRING') - self.assertEqual(field._mode, 'NULLABLE') + field = self._make_one("test", "STRING") + self.assertEqual(field._name, "test") + self.assertEqual(field._field_type, "STRING") + self.assertEqual(field._mode, "NULLABLE") self.assertIsNone(field._description) self.assertEqual(field._fields, ()) def test_constructor_explicit(self): - field = self._make_one('test', 'STRING', mode='REQUIRED', - description='Testing') - self.assertEqual(field._name, 'test') - self.assertEqual(field._field_type, 'STRING') - self.assertEqual(field._mode, 'REQUIRED') - self.assertEqual(field._description, 'Testing') + field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") + self.assertEqual(field._name, "test") + self.assertEqual(field._field_type, "STRING") + self.assertEqual(field._mode, "REQUIRED") + self.assertEqual(field._description, "Testing") self.assertEqual(field._fields, ()) def test_constructor_subfields(self): - sub_field1 = self._make_one('area_code', 'STRING') - sub_field2 = self._make_one('local_number', 'STRING') + sub_field1 = self._make_one("area_code", "STRING") + sub_field2 = self._make_one("local_number", "STRING") field = self._make_one( - 'phone_number', - 'RECORD', - fields=[sub_field1, sub_field2], + "phone_number", "RECORD", fields=[sub_field1, sub_field2] ) - self.assertEqual(field._name, 'phone_number') - self.assertEqual(field._field_type, 'RECORD') - self.assertEqual(field._mode, 'NULLABLE') + self.assertEqual(field._name, "phone_number") + self.assertEqual(field._field_type, "RECORD") + self.assertEqual(field._mode, "NULLABLE") self.assertIsNone(field._description) self.assertEqual(len(field._fields), 2) self.assertIs(field._fields[0], sub_field1) self.assertIs(field._fields[1], sub_field2) def test_to_api_repr(self): - field = self._make_one('foo', 'INTEGER', 'NULLABLE') - self.assertEqual(field.to_api_repr(), { - 'mode': 'NULLABLE', - 'name': 'foo', - 'type': 'INTEGER', - 'description': None, - }) + field = self._make_one("foo", "INTEGER", "NULLABLE") + self.assertEqual( + field.to_api_repr(), + {"mode": "NULLABLE", "name": "foo", "type": "INTEGER", "description": None}, + ) def test_to_api_repr_with_subfield(self): - subfield = self._make_one('bar', 'INTEGER', 'NULLABLE') - field = self._make_one('foo', 'RECORD', 'REQUIRED', fields=(subfield,)) - self.assertEqual(field.to_api_repr(), { - 'fields': [{ - 'mode': 'NULLABLE', - 'name': 'bar', - 'type': 'INTEGER', - 'description': None, - }], - 'mode': 'REQUIRED', - 'name': 'foo', - 'type': 'RECORD', - 'description': None, - }) + subfield = self._make_one("bar", "INTEGER", "NULLABLE") + field = self._make_one("foo", "RECORD", "REQUIRED", fields=(subfield,)) + self.assertEqual( + field.to_api_repr(), + { + "fields": [ + { + "mode": "NULLABLE", + "name": "bar", + "type": "INTEGER", + "description": None, + } + ], + "mode": "REQUIRED", + "name": "foo", + "type": "RECORD", + "description": None, + }, + ) def test_from_api_repr(self): - field = self._get_target_class().from_api_repr({ - 'fields': [{ - 'mode': 'nullable', - 'name': 'bar', - 'type': 'integer', - }], - 'mode': 'required', - 'description': 'test_description', - 'name': 'foo', - 'type': 'record', - }) - self.assertEqual(field.name, 'foo') - self.assertEqual(field.field_type, 'RECORD') - self.assertEqual(field.mode, 'REQUIRED') - self.assertEqual(field.description, 'test_description') + field = self._get_target_class().from_api_repr( + { + "fields": [{"mode": "nullable", "name": "bar", "type": "integer"}], + "mode": "required", + "description": "test_description", + "name": "foo", + "type": "record", + } + ) + self.assertEqual(field.name, "foo") + self.assertEqual(field.field_type, "RECORD") + self.assertEqual(field.mode, "REQUIRED") + self.assertEqual(field.description, "test_description") self.assertEqual(len(field.fields), 1) - self.assertEqual(field.fields[0].name, 'bar') - self.assertEqual(field.fields[0].field_type, 'INTEGER') - self.assertEqual(field.fields[0].mode, 'NULLABLE') + self.assertEqual(field.fields[0].name, "bar") + self.assertEqual(field.fields[0].field_type, "INTEGER") + self.assertEqual(field.fields[0].mode, "NULLABLE") def test_from_api_repr_defaults(self): - field = self._get_target_class().from_api_repr({ - 'name': 'foo', - 'type': 'record', - }) - self.assertEqual(field.name, 'foo') - self.assertEqual(field.field_type, 'RECORD') - self.assertEqual(field.mode, 'NULLABLE') + field = self._get_target_class().from_api_repr( + {"name": "foo", "type": "record"} + ) + self.assertEqual(field.name, "foo") + self.assertEqual(field.field_type, "RECORD") + self.assertEqual(field.mode, "NULLABLE") self.assertEqual(field.description, None) self.assertEqual(len(field.fields), 0) def test_name_property(self): - name = 'lemon-ness' - schema_field = self._make_one(name, 'INTEGER') + name = "lemon-ness" + schema_field = self._make_one(name, "INTEGER") self.assertIs(schema_field.name, name) def test_field_type_property(self): - field_type = 'BOOLEAN' - schema_field = self._make_one('whether', field_type) + field_type = "BOOLEAN" + schema_field = self._make_one("whether", field_type) self.assertIs(schema_field.field_type, field_type) def test_mode_property(self): - mode = 'REPEATED' - schema_field = self._make_one('again', 'FLOAT', mode=mode) + mode = "REPEATED" + schema_field = self._make_one("again", "FLOAT", mode=mode) self.assertIs(schema_field.mode, mode) def test_is_nullable(self): - mode = 'NULLABLE' - schema_field = self._make_one('test', 'FLOAT', mode=mode) + mode = "NULLABLE" + schema_field = self._make_one("test", "FLOAT", mode=mode) self.assertTrue(schema_field.is_nullable) def test_is_not_nullable(self): - mode = 'REPEATED' - schema_field = self._make_one('test', 'FLOAT', mode=mode) + mode = "REPEATED" + schema_field = self._make_one("test", "FLOAT", mode=mode) self.assertFalse(schema_field.is_nullable) def test_description_property(self): - description = 'It holds some data.' - schema_field = self._make_one( - 'do', 'TIMESTAMP', description=description) + description = "It holds some data." + schema_field = self._make_one("do", "TIMESTAMP", description=description) self.assertIs(schema_field.description, description) def test_fields_property(self): - sub_field1 = self._make_one('one', 'STRING') - sub_field2 = self._make_one('fish', 'INTEGER') + sub_field1 = self._make_one("one", "STRING") + sub_field2 = self._make_one("fish", "INTEGER") fields = (sub_field1, sub_field2) - schema_field = self._make_one('boat', 'RECORD', fields=fields) + schema_field = self._make_one("boat", "RECORD", fields=fields) self.assertIs(schema_field.fields, fields) def test___eq___wrong_type(self): - field = self._make_one('test', 'STRING') + field = self._make_one("test", "STRING") other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___eq___name_mismatch(self): - field = self._make_one('test', 'STRING') - other = self._make_one('other', 'STRING') + field = self._make_one("test", "STRING") + other = self._make_one("other", "STRING") self.assertNotEqual(field, other) def test___eq___field_type_mismatch(self): - field = self._make_one('test', 'STRING') - other = self._make_one('test', 'INTEGER') + field = self._make_one("test", "STRING") + other = self._make_one("test", "INTEGER") self.assertNotEqual(field, other) def test___eq___mode_mismatch(self): - field = self._make_one('test', 'STRING', mode='REQUIRED') - other = self._make_one('test', 'STRING', mode='NULLABLE') + field = self._make_one("test", "STRING", mode="REQUIRED") + other = self._make_one("test", "STRING", mode="NULLABLE") self.assertNotEqual(field, other) def test___eq___description_mismatch(self): - field = self._make_one('test', 'STRING', description='Testing') - other = self._make_one('test', 'STRING', description='Other') + field = self._make_one("test", "STRING", description="Testing") + other = self._make_one("test", "STRING", description="Other") self.assertNotEqual(field, other) def test___eq___fields_mismatch(self): - sub1 = self._make_one('sub1', 'STRING') - sub2 = self._make_one('sub2', 'STRING') - field = self._make_one('test', 'RECORD', fields=[sub1]) - other = self._make_one('test', 'RECORD', fields=[sub2]) + sub1 = self._make_one("sub1", "STRING") + sub2 = self._make_one("sub2", "STRING") + field = self._make_one("test", "RECORD", fields=[sub1]) + other = self._make_one("test", "RECORD", fields=[sub2]) self.assertNotEqual(field, other) def test___eq___hit(self): - field = self._make_one('test', 'STRING', mode='REQUIRED', - description='Testing') - other = self._make_one('test', 'STRING', mode='REQUIRED', - description='Testing') + field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") + other = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") self.assertEqual(field, other) def test___eq___hit_case_diff_on_type(self): - field = self._make_one('test', 'STRING', mode='REQUIRED', - description='Testing') - other = self._make_one('test', 'string', mode='REQUIRED', - description='Testing') + field = self._make_one("test", "STRING", mode="REQUIRED", description="Testing") + other = self._make_one("test", "string", mode="REQUIRED", description="Testing") self.assertEqual(field, other) def test___eq___hit_w_fields(self): - sub1 = self._make_one('sub1', 'STRING') - sub2 = self._make_one('sub2', 'STRING') - field = self._make_one('test', 'RECORD', fields=[sub1, sub2]) - other = self._make_one('test', 'RECORD', fields=[sub1, sub2]) + sub1 = self._make_one("sub1", "STRING") + sub2 = self._make_one("sub2", "STRING") + field = self._make_one("test", "RECORD", fields=[sub1, sub2]) + other = self._make_one("test", "RECORD", fields=[sub1, sub2]) self.assertEqual(field, other) def test___ne___wrong_type(self): - field = self._make_one('toast', 'INTEGER') + field = self._make_one("toast", "INTEGER") other = object() self.assertNotEqual(field, other) self.assertEqual(field, mock.ANY) def test___ne___same_value(self): - field1 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') - field2 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') + field1 = self._make_one("test", "TIMESTAMP", mode="REPEATED") + field2 = self._make_one("test", "TIMESTAMP", mode="REPEATED") # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (field1 != field2) + comparison_val = field1 != field2 self.assertFalse(comparison_val) def test___ne___different_values(self): field1 = self._make_one( - 'test1', 'FLOAT', mode='REPEATED', description='Not same') + "test1", "FLOAT", mode="REPEATED", description="Not same" + ) field2 = self._make_one( - 'test2', 'FLOAT', mode='NULLABLE', description='Knot saym') + "test2", "FLOAT", mode="NULLABLE", description="Knot saym" + ) self.assertNotEqual(field1, field2) def test___hash__set_equality(self): - sub1 = self._make_one('sub1', 'STRING') - sub2 = self._make_one('sub2', 'STRING') - field1 = self._make_one('test', 'RECORD', fields=[sub1]) - field2 = self._make_one('test', 'RECORD', fields=[sub2]) + sub1 = self._make_one("sub1", "STRING") + sub2 = self._make_one("sub2", "STRING") + field1 = self._make_one("test", "RECORD", fields=[sub1]) + field2 = self._make_one("test", "RECORD", fields=[sub2]) set_one = {field1, field2} set_two = {field1, field2} self.assertEqual(set_one, set_two) def test___hash__not_equals(self): - sub1 = self._make_one('sub1', 'STRING') - sub2 = self._make_one('sub2', 'STRING') - field1 = self._make_one('test', 'RECORD', fields=[sub1]) - field2 = self._make_one('test', 'RECORD', fields=[sub2]) + sub1 = self._make_one("sub1", "STRING") + sub2 = self._make_one("sub2", "STRING") + field1 = self._make_one("test", "RECORD", fields=[sub1]) + field2 = self._make_one("test", "RECORD", fields=[sub2]) set_one = {field1} set_two = {field2} self.assertNotEqual(set_one, set_two) def test___repr__(self): - field1 = self._make_one('field1', 'STRING') + field1 = self._make_one("field1", "STRING") expected = "SchemaField('field1', 'STRING', 'NULLABLE', None, ())" self.assertEqual(repr(field1), expected) # TODO: dedup with the same class in test_table.py. class _SchemaBase(object): - def _verify_field(self, field, r_field): - self.assertEqual(field.name, r_field['name']) - self.assertEqual(field.field_type, r_field['type']) - self.assertEqual(field.mode, r_field.get('mode', 'NULLABLE')) + self.assertEqual(field.name, r_field["name"]) + self.assertEqual(field.field_type, r_field["type"]) + self.assertEqual(field.mode, r_field.get("mode", "NULLABLE")) def _verifySchema(self, schema, resource): - r_fields = resource['schema']['fields'] + r_fields = resource["schema"]["fields"] self.assertEqual(len(schema), len(r_fields)) for field, r_field in zip(schema, r_fields): @@ -271,7 +263,6 @@ def _verifySchema(self, schema, resource): class Test_parse_schema_resource(unittest.TestCase, _SchemaBase): - def _call_fut(self, resource): from google.cloud.bigquery.schema import _parse_schema_resource @@ -279,44 +270,44 @@ def _call_fut(self, resource): def _make_resource(self): return { - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}, - ]}, + "schema": { + "fields": [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, + ] + } } def test__parse_schema_resource_defaults(self): RESOURCE = self._make_resource() - schema = self._call_fut(RESOURCE['schema']) + schema = self._call_fut(RESOURCE["schema"]) self._verifySchema(schema, RESOURCE) def test__parse_schema_resource_subfields(self): RESOURCE = self._make_resource() - RESOURCE['schema']['fields'].append( - {'name': 'phone', - 'type': 'RECORD', - 'mode': 'REPEATED', - 'fields': [{'name': 'type', - 'type': 'STRING', - 'mode': 'REQUIRED'}, - {'name': 'number', - 'type': 'STRING', - 'mode': 'REQUIRED'}]}) - schema = self._call_fut(RESOURCE['schema']) + RESOURCE["schema"]["fields"].append( + { + "name": "phone", + "type": "RECORD", + "mode": "REPEATED", + "fields": [ + {"name": "type", "type": "STRING", "mode": "REQUIRED"}, + {"name": "number", "type": "STRING", "mode": "REQUIRED"}, + ], + } + ) + schema = self._call_fut(RESOURCE["schema"]) self._verifySchema(schema, RESOURCE) def test__parse_schema_resource_fields_without_mode(self): RESOURCE = self._make_resource() - RESOURCE['schema']['fields'].append( - {'name': 'phone', - 'type': 'STRING'}) + RESOURCE["schema"]["fields"].append({"name": "phone", "type": "STRING"}) - schema = self._call_fut(RESOURCE['schema']) + schema = self._call_fut(RESOURCE["schema"]) self._verifySchema(schema, RESOURCE) class Test_build_schema_resource(unittest.TestCase, _SchemaBase): - def _call_fut(self, resource): from google.cloud.bigquery.schema import _build_schema_resource @@ -325,66 +316,88 @@ def _call_fut(self, resource): def test_defaults(self): from google.cloud.bigquery.schema import SchemaField - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") resource = self._call_fut([full_name, age]) self.assertEqual(len(resource), 2) - self.assertEqual(resource[0], - {'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None}) - self.assertEqual(resource[1], - {'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None}) + self.assertEqual( + resource[0], + { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + }, + ) + self.assertEqual( + resource[1], + {"name": "age", "type": "INTEGER", "mode": "REQUIRED", "description": None}, + ) def test_w_description(self): from google.cloud.bigquery.schema import SchemaField - DESCRIPTION = 'DESCRIPTION' - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED', - description=DESCRIPTION) - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + DESCRIPTION = "DESCRIPTION" + full_name = SchemaField( + "full_name", "STRING", mode="REQUIRED", description=DESCRIPTION + ) + age = SchemaField("age", "INTEGER", mode="REQUIRED") resource = self._call_fut([full_name, age]) self.assertEqual(len(resource), 2) - self.assertEqual(resource[0], - {'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': DESCRIPTION}) - self.assertEqual(resource[1], - {'name': 'age', - 'type': 'INTEGER', - 'mode': 'REQUIRED', - 'description': None}) + self.assertEqual( + resource[0], + { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": DESCRIPTION, + }, + ) + self.assertEqual( + resource[1], + {"name": "age", "type": "INTEGER", "mode": "REQUIRED", "description": None}, + ) def test_w_subfields(self): from google.cloud.bigquery.schema import SchemaField - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - ph_type = SchemaField('type', 'STRING', 'REQUIRED') - ph_num = SchemaField('number', 'STRING', 'REQUIRED') - phone = SchemaField('phone', 'RECORD', mode='REPEATED', - fields=[ph_type, ph_num]) + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + ph_type = SchemaField("type", "STRING", "REQUIRED") + ph_num = SchemaField("number", "STRING", "REQUIRED") + phone = SchemaField( + "phone", "RECORD", mode="REPEATED", fields=[ph_type, ph_num] + ) resource = self._call_fut([full_name, phone]) self.assertEqual(len(resource), 2) - self.assertEqual(resource[0], - {'name': 'full_name', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None}) - self.assertEqual(resource[1], - {'name': 'phone', - 'type': 'RECORD', - 'mode': 'REPEATED', - 'description': None, - 'fields': [{'name': 'type', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None}, - {'name': 'number', - 'type': 'STRING', - 'mode': 'REQUIRED', - 'description': None}]}) + self.assertEqual( + resource[0], + { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + }, + ) + self.assertEqual( + resource[1], + { + "name": "phone", + "type": "RECORD", + "mode": "REPEATED", + "description": None, + "fields": [ + { + "name": "type", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + }, + { + "name": "number", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + }, + ], + }, + ) diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index 5795a3c92e39..04a67c603ba9 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -16,6 +16,7 @@ import mock import six + try: import pandas except (ImportError, AttributeError): # pragma: NO COVER @@ -25,14 +26,13 @@ class _SchemaBase(object): - def _verify_field(self, field, r_field): - self.assertEqual(field.name, r_field['name']) - self.assertEqual(field.field_type, r_field['type']) - self.assertEqual(field.mode, r_field.get('mode', 'NULLABLE')) + self.assertEqual(field.name, r_field["name"]) + self.assertEqual(field.field_type, r_field["type"]) + self.assertEqual(field.mode, r_field.get("mode", "NULLABLE")) def _verifySchema(self, schema, resource): - r_fields = resource['schema']['fields'] + r_fields = resource["schema"]["fields"] self.assertEqual(len(schema), len(r_fields)) for field, r_field in zip(schema, r_fields): @@ -40,7 +40,7 @@ def _verifySchema(self, schema, resource): class TestEncryptionConfiguration(unittest.TestCase): - KMS_KEY_NAME = 'projects/1/locations/global/keyRings/1/cryptoKeys/1' + KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -68,9 +68,7 @@ def test_kms_key_name_setter(self): self.assertIsNone(encryption_config.kms_key_name) def test_from_api_repr(self): - RESOURCE = { - 'kmsKeyName': self.KMS_KEY_NAME, - } + RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} klass = self._get_target_class() encryption_config = klass.from_api_repr(RESOURCE) self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) @@ -78,11 +76,7 @@ def test_from_api_repr(self): def test_to_api_repr(self): encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) resource = encryption_config.to_api_repr() - self.assertEqual( - resource, - { - 'kmsKeyName': self.KMS_KEY_NAME, - }) + self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) def test___eq___wrong_type(self): encryption_config = self._make_one() @@ -110,7 +104,7 @@ def test___ne___same_value(self): encryption_config1 = self._make_one(self.KMS_KEY_NAME) encryption_config2 = self._make_one(self.KMS_KEY_NAME) # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (encryption_config1 != encryption_config2) + comparison_val = encryption_config1 != encryption_config2 self.assertFalse(comparison_val) def test___ne___different_values(self): @@ -139,7 +133,6 @@ def test___repr__(self): class TestTableReference(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.table import TableReference @@ -151,154 +144,159 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): from google.cloud.bigquery.dataset import DatasetReference - dataset_ref = DatasetReference('project_1', 'dataset_1') - table_ref = self._make_one(dataset_ref, 'table_1') + dataset_ref = DatasetReference("project_1", "dataset_1") + + table_ref = self._make_one(dataset_ref, "table_1") self.assertEqual(table_ref.dataset_id, dataset_ref.dataset_id) - self.assertEqual(table_ref.table_id, 'table_1') + self.assertEqual(table_ref.table_id, "table_1") def test_to_api_repr(self): from google.cloud.bigquery.dataset import DatasetReference - dataset_ref = DatasetReference('project_1', 'dataset_1') - table_ref = self._make_one(dataset_ref, 'table_1') + + dataset_ref = DatasetReference("project_1", "dataset_1") + table_ref = self._make_one(dataset_ref, "table_1") resource = table_ref.to_api_repr() self.assertEqual( resource, - { - 'projectId': 'project_1', - 'datasetId': 'dataset_1', - 'tableId': 'table_1', - }) + {"projectId": "project_1", "datasetId": "dataset_1", "tableId": "table_1"}, + ) def test_from_api_repr(self): from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.table import TableReference - dataset_ref = DatasetReference('project_1', 'dataset_1') - expected = self._make_one(dataset_ref, 'table_1') + + dataset_ref = DatasetReference("project_1", "dataset_1") + expected = self._make_one(dataset_ref, "table_1") got = TableReference.from_api_repr( - { - 'projectId': 'project_1', - 'datasetId': 'dataset_1', - 'tableId': 'table_1', - }) + {"projectId": "project_1", "datasetId": "dataset_1", "tableId": "table_1"} + ) self.assertEqual(expected, got) def test_from_string(self): cls = self._get_target_class() - got = cls.from_string('string-project.string_dataset.string_table') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') - self.assertEqual(got.table_id, 'string_table') + got = cls.from_string("string-project.string_dataset.string_table") + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") + self.assertEqual(got.table_id, "string_table") def test_from_string_legacy_string(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string-project:string_dataset.string_table') + cls.from_string("string-project:string_dataset.string_table") def test_from_string_not_fully_qualified(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string_table') + cls.from_string("string_table") with self.assertRaises(ValueError): - cls.from_string('string_dataset.string_table') + cls.from_string("string_dataset.string_table") with self.assertRaises(ValueError): - cls.from_string('a.b.c.d') + cls.from_string("a.b.c.d") def test_from_string_with_default_project(self): cls = self._get_target_class() got = cls.from_string( - 'string_dataset.string_table', default_project='default-project') - self.assertEqual(got.project, 'default-project') - self.assertEqual(got.dataset_id, 'string_dataset') - self.assertEqual(got.table_id, 'string_table') + "string_dataset.string_table", default_project="default-project" + ) + self.assertEqual(got.project, "default-project") + self.assertEqual(got.dataset_id, "string_dataset") + self.assertEqual(got.table_id, "string_table") def test_from_string_ignores_default_project(self): cls = self._get_target_class() got = cls.from_string( - 'string-project.string_dataset.string_table', - default_project='default-project') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') - self.assertEqual(got.table_id, 'string_table') + "string-project.string_dataset.string_table", + default_project="default-project", + ) + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") + self.assertEqual(got.table_id, "string_table") def test___eq___wrong_type(self): from google.cloud.bigquery.dataset import DatasetReference - dataset_ref = DatasetReference('project_1', 'dataset_1') - table = self._make_one(dataset_ref, 'table_1') + + dataset_ref = DatasetReference("project_1", "dataset_1") + table = self._make_one(dataset_ref, "table_1") other = object() self.assertNotEqual(table, other) self.assertEqual(table, mock.ANY) def test___eq___project_mismatch(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - other_dataset = DatasetReference('project_2', 'dataset_1') - table = self._make_one(dataset, 'table_1') - other = self._make_one(other_dataset, 'table_1') + + dataset = DatasetReference("project_1", "dataset_1") + other_dataset = DatasetReference("project_2", "dataset_1") + table = self._make_one(dataset, "table_1") + other = self._make_one(other_dataset, "table_1") self.assertNotEqual(table, other) def test___eq___dataset_mismatch(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - other_dataset = DatasetReference('project_1', 'dataset_2') - table = self._make_one(dataset, 'table_1') - other = self._make_one(other_dataset, 'table_1') + + dataset = DatasetReference("project_1", "dataset_1") + other_dataset = DatasetReference("project_1", "dataset_2") + table = self._make_one(dataset, "table_1") + other = self._make_one(other_dataset, "table_1") self.assertNotEqual(table, other) def test___eq___table_mismatch(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - table = self._make_one(dataset, 'table_1') - other = self._make_one(dataset, 'table_2') + + dataset = DatasetReference("project_1", "dataset_1") + table = self._make_one(dataset, "table_1") + other = self._make_one(dataset, "table_2") self.assertNotEqual(table, other) def test___eq___equality(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - table = self._make_one(dataset, 'table_1') - other = self._make_one(dataset, 'table_1') + + dataset = DatasetReference("project_1", "dataset_1") + table = self._make_one(dataset, "table_1") + other = self._make_one(dataset, "table_1") self.assertEqual(table, other) def test___hash__set_equality(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - table1 = self._make_one(dataset, 'table1') - table2 = self._make_one(dataset, 'table2') + + dataset = DatasetReference("project_1", "dataset_1") + table1 = self._make_one(dataset, "table1") + table2 = self._make_one(dataset, "table2") set_one = {table1, table2} set_two = {table1, table2} self.assertEqual(set_one, set_two) def test___hash__not_equals(self): from google.cloud.bigquery.dataset import DatasetReference - dataset = DatasetReference('project_1', 'dataset_1') - table1 = self._make_one(dataset, 'table1') - table2 = self._make_one(dataset, 'table2') + + dataset = DatasetReference("project_1", "dataset_1") + table1 = self._make_one(dataset, "table1") + table2 = self._make_one(dataset, "table2") set_one = {table1} set_two = {table2} self.assertNotEqual(set_one, set_two) def test___repr__(self): - dataset = DatasetReference('project1', 'dataset1') - table1 = self._make_one(dataset, 'table1') + dataset = DatasetReference("project1", "dataset1") + table1 = self._make_one(dataset, "table1") expected = ( - "TableReference(DatasetReference('project1', 'dataset1'), " - "'table1')" + "TableReference(DatasetReference('project1', 'dataset1'), " "'table1')" ) self.assertEqual(repr(table1), expected) class TestTable(unittest.TestCase, _SchemaBase): - PROJECT = 'prahj-ekt' - DS_ID = 'dataset-name' - TABLE_NAME = 'table-name' - KMS_KEY_NAME = 'projects/1/locations/global/keyRings/1/cryptoKeys/1' + PROJECT = "prahj-ekt" + DS_ID = "dataset-name" + TABLE_NAME = "table-name" + KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -314,12 +312,10 @@ def _setUpConstants(self): from google.cloud._helpers import UTC self.WHEN_TS = 1437767599.006 - self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( - tzinfo=UTC) - self.ETAG = 'ETAG' - self.TABLE_FULL_ID = '%s:%s.%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - self.RESOURCE_URL = 'http://example.com/path/to/resource' + self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace(tzinfo=UTC) + self.ETAG = "ETAG" + self.TABLE_FULL_ID = "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME) + self.RESOURCE_URL = "http://example.com/path/to/resource" self.NUM_BYTES = 12345 self.NUM_ROWS = 67 self.NUM_EST_BYTES = 1234 @@ -328,115 +324,119 @@ def _setUpConstants(self): def _make_resource(self): self._setUpConstants() return { - 'creationTime': self.WHEN_TS * 1000, - 'tableReference': - {'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - 'etag': 'ETAG', - 'id': self.TABLE_FULL_ID, - 'lastModifiedTime': self.WHEN_TS * 1000, - 'location': 'US', - 'selfLink': self.RESOURCE_URL, - 'numRows': self.NUM_ROWS, - 'numBytes': self.NUM_BYTES, - 'type': 'TABLE', - 'streamingBuffer': { - 'estimatedRows': str(self.NUM_EST_ROWS), - 'estimatedBytes': str(self.NUM_EST_BYTES), - 'oldestEntryTime': self.WHEN_TS * 1000}, - 'externalDataConfiguration': { - 'sourceFormat': 'CSV', - 'csvOptions': { - 'allowJaggedRows': True, - 'encoding': 'encoding'}}, - 'labels': {'x': 'y'}, + "creationTime": self.WHEN_TS * 1000, + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_NAME, + }, + "schema": { + "fields": [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, + ] + }, + "etag": "ETAG", + "id": self.TABLE_FULL_ID, + "lastModifiedTime": self.WHEN_TS * 1000, + "location": "US", + "selfLink": self.RESOURCE_URL, + "numRows": self.NUM_ROWS, + "numBytes": self.NUM_BYTES, + "type": "TABLE", + "streamingBuffer": { + "estimatedRows": str(self.NUM_EST_ROWS), + "estimatedBytes": str(self.NUM_EST_BYTES), + "oldestEntryTime": self.WHEN_TS * 1000, + }, + "externalDataConfiguration": { + "sourceFormat": "CSV", + "csvOptions": {"allowJaggedRows": True, "encoding": "encoding"}, + }, + "labels": {"x": "y"}, } def _verifyReadonlyResourceProperties(self, table, resource): - if 'creationTime' in resource: + if "creationTime" in resource: self.assertEqual(table.created, self.WHEN) else: self.assertIsNone(table.created) - if 'etag' in resource: + if "etag" in resource: self.assertEqual(table.etag, self.ETAG) else: self.assertIsNone(table.etag) - if 'numRows' in resource: + if "numRows" in resource: self.assertEqual(table.num_rows, self.NUM_ROWS) else: self.assertIsNone(table.num_rows) - if 'numBytes' in resource: + if "numBytes" in resource: self.assertEqual(table.num_bytes, self.NUM_BYTES) else: self.assertIsNone(table.num_bytes) - if 'selfLink' in resource: + if "selfLink" in resource: self.assertEqual(table.self_link, self.RESOURCE_URL) else: self.assertIsNone(table.self_link) - if 'streamingBuffer' in resource: - self.assertEqual(table.streaming_buffer.estimated_rows, - self.NUM_EST_ROWS) - self.assertEqual(table.streaming_buffer.estimated_bytes, - self.NUM_EST_BYTES) - self.assertEqual(table.streaming_buffer.oldest_entry_time, - self.WHEN) + if "streamingBuffer" in resource: + self.assertEqual(table.streaming_buffer.estimated_rows, self.NUM_EST_ROWS) + self.assertEqual(table.streaming_buffer.estimated_bytes, self.NUM_EST_BYTES) + self.assertEqual(table.streaming_buffer.oldest_entry_time, self.WHEN) else: self.assertIsNone(table.streaming_buffer) self.assertEqual(table.full_table_id, self.TABLE_FULL_ID) - self.assertEqual(table.table_type, - 'TABLE' if 'view' not in resource else 'VIEW') + self.assertEqual( + table.table_type, "TABLE" if "view" not in resource else "VIEW" + ) def _verifyResourceProperties(self, table, resource): self._verifyReadonlyResourceProperties(table, resource) - if 'expirationTime' in resource: + if "expirationTime" in resource: self.assertEqual(table.expires, self.EXP_TIME) else: self.assertIsNone(table.expires) - self.assertEqual(table.description, resource.get('description')) - self.assertEqual(table.friendly_name, resource.get('friendlyName')) - self.assertEqual(table.location, resource.get('location')) + self.assertEqual(table.description, resource.get("description")) + self.assertEqual(table.friendly_name, resource.get("friendlyName")) + self.assertEqual(table.location, resource.get("location")) - if 'view' in resource: - self.assertEqual(table.view_query, resource['view']['query']) + if "view" in resource: + self.assertEqual(table.view_query, resource["view"]["query"]) self.assertEqual( - table.view_use_legacy_sql, - resource['view'].get('useLegacySql', True)) + table.view_use_legacy_sql, resource["view"].get("useLegacySql", True) + ) else: self.assertIsNone(table.view_query) self.assertIsNone(table.view_use_legacy_sql) - if 'schema' in resource: + if "schema" in resource: self._verifySchema(table.schema, resource) else: self.assertEqual(table.schema, []) - if 'externalDataConfiguration' in resource: + if "externalDataConfiguration" in resource: edc = table.external_data_configuration - self.assertEqual(edc.source_format, 'CSV') + self.assertEqual(edc.source_format, "CSV") self.assertEqual(edc.options.allow_jagged_rows, True) - if 'labels' in resource: - self.assertEqual(table.labels, {'x': 'y'}) + if "labels" in resource: + self.assertEqual(table.labels, {"x": "y"}) else: self.assertEqual(table.labels, {}) - if 'encryptionConfiguration' in resource: + if "encryptionConfiguration" in resource: self.assertIsNotNone(table.encryption_configuration) - self.assertEqual(table.encryption_configuration.kms_key_name, - resource['encryptionConfiguration']['kmsKeyName']) + self.assertEqual( + table.encryption_configuration.kms_key_name, + resource["encryptionConfiguration"]["kmsKeyName"], + ) else: self.assertIsNone(table.encryption_configuration) @@ -453,8 +453,9 @@ def test_ctor(self): self.assertEqual(table.reference.dataset_id, self.DS_ID) self.assertEqual( table.path, - '/projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME)) + "/projects/%s/datasets/%s/tables/%s" + % (self.PROJECT, self.DS_ID, self.TABLE_NAME), + ) self.assertEqual(table.schema, []) self.assertIsNone(table.created) @@ -482,8 +483,8 @@ def test_ctor_w_schema(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") table = self._make_one(table_ref, schema=[full_name, age]) self.assertEqual(table.schema, [full_name, age]) @@ -498,17 +499,17 @@ def test_num_bytes_getter(self): num_bytes = 1337 # Check with integer value set. - table._properties = {'numBytes': num_bytes} + table._properties = {"numBytes": num_bytes} self.assertEqual(table.num_bytes, num_bytes) # Check with a string value set. - table._properties = {'numBytes': str(num_bytes)} + table._properties = {"numBytes": str(num_bytes)} self.assertEqual(table.num_bytes, num_bytes) # Check with invalid int value. - table._properties = {'numBytes': 'x'} + table._properties = {"numBytes": "x"} with self.assertRaises(ValueError): - getattr(table, 'num_bytes') + getattr(table, "num_bytes") def test_num_rows_getter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -520,17 +521,17 @@ def test_num_rows_getter(self): num_rows = 42 # Check with integer value set. - table._properties = {'numRows': num_rows} + table._properties = {"numRows": num_rows} self.assertEqual(table.num_rows, num_rows) # Check with a string value set. - table._properties = {'numRows': str(num_rows)} + table._properties = {"numRows": str(num_rows)} self.assertEqual(table.num_rows, num_rows) # Check with invalid int value. - table._properties = {'numRows': 'x'} + table._properties = {"numRows": "x"} with self.assertRaises(ValueError): - getattr(table, 'num_rows') + getattr(table, "num_rows") def test_schema_setter_non_list(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -545,7 +546,7 @@ def test_schema_setter_invalid_field(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") with self.assertRaises(ValueError): table.schema = [full_name, object()] @@ -555,8 +556,8 @@ def test_schema_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") table.schema = [full_name, age] self.assertEqual(table.schema, [full_name, age]) @@ -567,30 +568,32 @@ def test_props_set_by_server(self): CREATED = datetime.datetime(2015, 7, 29, 12, 13, 22, tzinfo=UTC) MODIFIED = datetime.datetime(2015, 7, 29, 14, 47, 15, tzinfo=UTC) - TABLE_FULL_ID = '%s:%s.%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - URL = 'http://example.com/projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) + TABLE_FULL_ID = "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME) + URL = "http://example.com/projects/%s/datasets/%s/tables/%s" % ( + self.PROJECT, + self.DS_ID, + self.TABLE_NAME, + ) dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table._properties['creationTime'] = _millis(CREATED) - table._properties['etag'] = 'ETAG' - table._properties['lastModifiedTime'] = _millis(MODIFIED) - table._properties['numBytes'] = 12345 - table._properties['numRows'] = 66 - table._properties['selfLink'] = URL - table._properties['id'] = TABLE_FULL_ID - table._properties['type'] = 'TABLE' + table._properties["creationTime"] = _millis(CREATED) + table._properties["etag"] = "ETAG" + table._properties["lastModifiedTime"] = _millis(MODIFIED) + table._properties["numBytes"] = 12345 + table._properties["numRows"] = 66 + table._properties["selfLink"] = URL + table._properties["id"] = TABLE_FULL_ID + table._properties["type"] = "TABLE" self.assertEqual(table.created, CREATED) - self.assertEqual(table.etag, 'ETAG') + self.assertEqual(table.etag, "ETAG") self.assertEqual(table.modified, MODIFIED) self.assertEqual(table.num_bytes, 12345) self.assertEqual(table.num_rows, 66) self.assertEqual(table.self_link, URL) self.assertEqual(table.full_table_id, TABLE_FULL_ID) - self.assertEqual(table.table_type, 'TABLE') + self.assertEqual(table.table_type, "TABLE") def test_description_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -603,8 +606,8 @@ def test_description_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table.description = 'DESCRIPTION' - self.assertEqual(table.description, 'DESCRIPTION') + table.description = "DESCRIPTION" + self.assertEqual(table.description, "DESCRIPTION") def test_expires_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -635,8 +638,8 @@ def test_friendly_name_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table.friendly_name = 'FRIENDLY' - self.assertEqual(table.friendly_name, 'FRIENDLY') + table.friendly_name = "FRIENDLY" + self.assertEqual(table.friendly_name, "FRIENDLY") def test_view_query_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -649,8 +652,8 @@ def test_view_query_setter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table.view_query = 'select * from foo' - self.assertEqual(table.view_query, 'select * from foo') + table.view_query = "select * from foo" + self.assertEqual(table.view_query, "select * from foo") self.assertEqual(table.view_use_legacy_sql, False) table.view_use_legacy_sql = True @@ -660,7 +663,7 @@ def test_view_query_deleter(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table.view_query = 'select * from foo' + table.view_query = "select * from foo" del table.view_query self.assertIsNone(table.view_query) self.assertIsNone(table.view_use_legacy_sql) @@ -677,14 +680,14 @@ def test_view_use_legacy_sql_setter(self): table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) table.view_use_legacy_sql = True - table.view_query = 'select * from foo' + table.view_query = "select * from foo" self.assertEqual(table.view_use_legacy_sql, True) - self.assertEqual(table.view_query, 'select * from foo') + self.assertEqual(table.view_query, "select * from foo") def test_external_data_configuration_setter(self): from google.cloud.bigquery.external_config import ExternalConfig - external_config = ExternalConfig('CSV') + external_config = ExternalConfig("CSV") dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) @@ -693,7 +696,8 @@ def test_external_data_configuration_setter(self): self.assertEqual( table.external_data_configuration.source_format, - external_config.source_format) + external_config.source_format, + ) def test_external_data_configuration_setter_none(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -715,10 +719,10 @@ def test_labels_update_in_place(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - del table._properties['labels'] # don't start w/ existing dict + del table._properties["labels"] # don't start w/ existing dict labels = table.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(table.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(table.labels, {"foo": "bar"}) def test_labels_setter_bad_value(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -729,20 +733,20 @@ def test_labels_setter_bad_value(self): def test_from_string(self): cls = self._get_target_class() - got = cls.from_string('string-project.string_dataset.string_table') - self.assertEqual(got.project, 'string-project') - self.assertEqual(got.dataset_id, 'string_dataset') - self.assertEqual(got.table_id, 'string_table') + got = cls.from_string("string-project.string_dataset.string_table") + self.assertEqual(got.project, "string-project") + self.assertEqual(got.dataset_id, "string_dataset") + self.assertEqual(got.table_id, "string_table") def test_from_string_legacy_string(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string-project:string_dataset.string_table') + cls.from_string("string-project:string_dataset.string_table") def test_from_string_not_fully_qualified(self): cls = self._get_target_class() with self.assertRaises(ValueError): - cls.from_string('string_dataset.string_table') + cls.from_string("string_dataset.string_table") def test_from_api_repr_missing_identity(self): self._setUpConstants() @@ -754,13 +758,13 @@ def test_from_api_repr_missing_identity(self): def test_from_api_repr_bare(self): self._setUpConstants() RESOURCE = { - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, self.TABLE_NAME), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_NAME, }, - 'type': 'TABLE', + "type": "TABLE", } klass = self._get_target_class() table = klass.from_api_repr(RESOURCE) @@ -773,11 +777,11 @@ def test_from_api_repr_w_properties(self): from google.cloud._helpers import _millis RESOURCE = self._make_resource() - RESOURCE['view'] = {'query': 'select fullname, age from person_ages'} - RESOURCE['type'] = 'VIEW' - RESOURCE['location'] = 'EU' + RESOURCE["view"] = {"query": "select fullname, age from person_ages"} + RESOURCE["type"] = "VIEW" + RESOURCE["location"] = "EU" self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) - RESOURCE['expirationTime'] = _millis(self.EXP_TIME) + RESOURCE["expirationTime"] = _millis(self.EXP_TIME) klass = self._get_target_class() table = klass.from_api_repr(RESOURCE) self._verifyResourceProperties(table, RESOURCE) @@ -785,16 +789,14 @@ def test_from_api_repr_w_properties(self): def test_from_api_with_encryption(self): self._setUpConstants() RESOURCE = { - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, self.TABLE_NAME), - 'tableReference': { - 'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME, + "id": "%s:%s.%s" % (self.PROJECT, self.DS_ID, self.TABLE_NAME), + "tableReference": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": self.TABLE_NAME, }, - 'encryptionConfiguration': { - 'kmsKeyName': self.KMS_KEY_NAME - }, - 'type': 'TABLE', + "encryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, + "type": "TABLE", } klass = self._get_target_class() table = klass.from_api_repr(RESOURCE) @@ -804,13 +806,13 @@ def test_to_api_repr_w_custom_field(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table._properties['newAlphaProperty'] = 'unreleased property' + table._properties["newAlphaProperty"] = "unreleased property" resource = table.to_api_repr() exp_resource = { - 'tableReference': table_ref.to_api_repr(), - 'labels': {}, - 'newAlphaProperty': 'unreleased property' + "tableReference": table_ref.to_api_repr(), + "labels": {}, + "newAlphaProperty": "unreleased property", } self.assertEqual(resource, exp_resource) @@ -818,20 +820,18 @@ def test__build_resource_w_custom_field(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - table._properties['newAlphaProperty'] = 'unreleased property' - resource = table._build_resource(['newAlphaProperty']) + table._properties["newAlphaProperty"] = "unreleased property" + resource = table._build_resource(["newAlphaProperty"]) - exp_resource = { - 'newAlphaProperty': 'unreleased property' - } + exp_resource = {"newAlphaProperty": "unreleased property"} self.assertEqual(resource, exp_resource) def test__build_resource_w_custom_field_not_in__properties(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table = self._make_one(dataset.table(self.TABLE_NAME)) - table.bad = 'value' + table.bad = "value" with self.assertRaises(ValueError): - table._build_resource(['bad']) + table._build_resource(["bad"]) def test_time_partitioning_setter(self): from google.cloud.bigquery.table import TimePartitioning @@ -844,19 +844,18 @@ def test_time_partitioning_setter(self): table.time_partitioning = time_partitioning - self.assertEqual( - table.time_partitioning.type_, TimePartitioningType.DAY) + self.assertEqual(table.time_partitioning.type_, TimePartitioningType.DAY) # Both objects point to the same properties dict self.assertIs( - table._properties['timePartitioning'], - time_partitioning._properties) + table._properties["timePartitioning"], time_partitioning._properties + ) time_partitioning.expiration_ms = 10000 # Changes to TimePartitioning object are reflected in Table properties self.assertEqual( - table.time_partitioning.expiration_ms, - time_partitioning.expiration_ms) + table.time_partitioning.expiration_ms, time_partitioning.expiration_ms + ) def test_time_partitioning_setter_bad_type(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -864,7 +863,7 @@ def test_time_partitioning_setter_bad_type(self): table = self._make_one(table_ref) with self.assertRaises(ValueError): - table.time_partitioning = {'timePartitioning': {'type': 'DAY'}} + table.time_partitioning = {"timePartitioning": {"type": "DAY"}} def test_time_partitioning_setter_none(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -888,7 +887,7 @@ def test_partitioning_type_setter(self): table.partitioning_type = TimePartitioningType.DAY - self.assertEqual(table.partitioning_type, 'DAY') + self.assertEqual(table.partitioning_type, "DAY") self.assertEqual(len(warned), 3) for warning in warned: @@ -904,9 +903,9 @@ def test_partitioning_type_setter_w_time_partitioning_set(self): table.time_partitioning = TimePartitioning() with warnings.catch_warnings(record=True) as warned: - table.partitioning_type = 'NEW_FAKE_TYPE' + table.partitioning_type = "NEW_FAKE_TYPE" - self.assertEqual(table.partitioning_type, 'NEW_FAKE_TYPE') + self.assertEqual(table.partitioning_type, "NEW_FAKE_TYPE") self.assertEqual(len(warned), 2) for warning in warned: @@ -944,7 +943,7 @@ def test_partition_expiration_setter(self): self.assertEqual(table.partition_expiration, 100) # defaults to 'DAY' when expiration is set and type is not set - self.assertEqual(table.partitioning_type, 'DAY') + self.assertEqual(table.partitioning_type, "DAY") self.assertEqual(len(warned), 4) for warning in warned: @@ -954,22 +953,22 @@ def test_clustering_fields_setter_w_fields(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - fields = ['email', 'phone'] + fields = ["email", "phone"] table.clustering_fields = fields self.assertEqual(table.clustering_fields, fields) - self.assertEqual(table._properties['clustering'], {'fields': fields}) + self.assertEqual(table._properties["clustering"], {"fields": fields}) def test_clustering_fields_setter_w_none(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) - fields = ['email', 'phone'] + fields = ["email", "phone"] - table._properties['clustering'] = {'fields': fields} + table._properties["clustering"] = {"fields": fields} table.clustering_fields = None self.assertEqual(table.clustering_fields, None) - self.assertFalse('clustering' in table._properties) + self.assertFalse("clustering" in table._properties) def test_clustering_fields_setter_w_none_noop(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -978,25 +977,27 @@ def test_clustering_fields_setter_w_none_noop(self): table.clustering_fields = None self.assertEqual(table.clustering_fields, None) - self.assertFalse('clustering' in table._properties) + self.assertFalse("clustering" in table._properties) def test_encryption_configuration_setter(self): from google.cloud.bigquery.table import EncryptionConfiguration + dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) encryption_configuration = EncryptionConfiguration( - kms_key_name=self.KMS_KEY_NAME) + kms_key_name=self.KMS_KEY_NAME + ) table.encryption_configuration = encryption_configuration - self.assertEqual(table.encryption_configuration.kms_key_name, - self.KMS_KEY_NAME) + self.assertEqual(table.encryption_configuration.kms_key_name, self.KMS_KEY_NAME) table.encryption_configuration = None self.assertIsNone(table.encryption_configuration) def test___repr__(self): from google.cloud.bigquery.table import TableReference - dataset = DatasetReference('project1', 'dataset1') - table1 = self._make_one(TableReference(dataset, 'table1')) + + dataset = DatasetReference("project1", "dataset1") + table1 = self._make_one(TableReference(dataset, "table1")) expected = ( "Table(TableReference(" "DatasetReference('project1', 'dataset1'), " @@ -1007,9 +1008,9 @@ def test___repr__(self): class Test_row_from_mapping(unittest.TestCase, _SchemaBase): - PROJECT = 'prahj-ekt' - DS_ID = 'dataset-name' - TABLE_NAME = 'table-name' + PROJECT = "prahj-ekt" + DS_ID = "dataset-name" + TABLE_NAME = "table-name" def _call_fut(self, mapping, schema): from google.cloud.bigquery.table import _row_from_mapping @@ -1018,7 +1019,8 @@ def _call_fut(self, mapping, schema): def test__row_from_mapping_wo_schema(self): from google.cloud.bigquery.table import Table, _TABLE_HAS_NO_SCHEMA - MAPPING = {'full_name': 'Phred Phlyntstone', 'age': 32} + + MAPPING = {"full_name": "Phred Phlyntstone", "age": 32} dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = Table(table_ref) @@ -1030,48 +1032,50 @@ def test__row_from_mapping_wo_schema(self): def test__row_from_mapping_w_invalid_schema(self): from google.cloud.bigquery.table import Table, SchemaField + MAPPING = { - 'full_name': 'Phred Phlyntstone', - 'age': 32, - 'colors': ['red', 'green'], - 'bogus': 'WHATEVER', + "full_name": "Phred Phlyntstone", + "age": 32, + "colors": ["red", "green"], + "bogus": "WHATEVER", } dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - colors = SchemaField('colors', 'DATETIME', mode='REPEATED') - bogus = SchemaField('joined', 'STRING', mode='BOGUS') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") + colors = SchemaField("colors", "DATETIME", mode="REPEATED") + bogus = SchemaField("joined", "STRING", mode="BOGUS") table = Table(table_ref, schema=[full_name, age, colors, bogus]) with self.assertRaises(ValueError) as exc: self._call_fut(MAPPING, table.schema) - self.assertIn('Unknown field mode: BOGUS', str(exc.exception)) + self.assertIn("Unknown field mode: BOGUS", str(exc.exception)) def test__row_from_mapping_w_schema(self): from google.cloud.bigquery.table import Table, SchemaField + MAPPING = { - 'full_name': 'Phred Phlyntstone', - 'age': 32, - 'colors': ['red', 'green'], - 'extra': 'IGNORED', + "full_name": "Phred Phlyntstone", + "age": 32, + "colors": ["red", "green"], + "extra": "IGNORED", } dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - colors = SchemaField('colors', 'DATETIME', mode='REPEATED') - joined = SchemaField('joined', 'STRING', mode='NULLABLE') + full_name = SchemaField("full_name", "STRING", mode="REQUIRED") + age = SchemaField("age", "INTEGER", mode="REQUIRED") + colors = SchemaField("colors", "DATETIME", mode="REPEATED") + joined = SchemaField("joined", "STRING", mode="NULLABLE") table = Table(table_ref, schema=[full_name, age, colors, joined]) self.assertEqual( self._call_fut(MAPPING, table.schema), - ('Phred Phlyntstone', 32, ['red', 'green'], None)) + ("Phred Phlyntstone", 32, ["red", "green"], None), + ) class TestTableListItem(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.bigquery.table import TableListItem @@ -1084,27 +1088,25 @@ def _make_one(self, *args, **kw): def test_ctor(self): import warnings - project = 'test-project' - dataset_id = 'test_dataset' - table_id = 'coffee_table' + project = "test-project" + dataset_id = "test_dataset" + table_id = "coffee_table" resource = { - 'kind': 'bigquery#table', - 'id': '{}:{}.{}'.format(project, dataset_id, table_id), - 'tableReference': { - 'projectId': project, - 'datasetId': dataset_id, - 'tableId': table_id, - }, - 'friendlyName': 'Mahogany Coffee Table', - 'type': 'TABLE', - 'timePartitioning': { - 'type': 'DAY', - 'field': 'mycolumn', - 'expirationMs': '10000', + "kind": "bigquery#table", + "id": "{}:{}.{}".format(project, dataset_id, table_id), + "tableReference": { + "projectId": project, + "datasetId": dataset_id, + "tableId": table_id, }, - 'labels': { - 'some-stuff': 'this-is-a-label', + "friendlyName": "Mahogany Coffee Table", + "type": "TABLE", + "timePartitioning": { + "type": "DAY", + "field": "mycolumn", + "expirationMs": "10000", }, + "labels": {"some-stuff": "this-is-a-label"}, } table = self._make_one(resource) @@ -1112,21 +1114,21 @@ def test_ctor(self): self.assertEqual(table.dataset_id, dataset_id) self.assertEqual(table.table_id, table_id) self.assertEqual( - table.full_table_id, - '{}:{}.{}'.format(project, dataset_id, table_id)) + table.full_table_id, "{}:{}.{}".format(project, dataset_id, table_id) + ) self.assertEqual(table.reference.project, project) self.assertEqual(table.reference.dataset_id, dataset_id) self.assertEqual(table.reference.table_id, table_id) - self.assertEqual(table.friendly_name, 'Mahogany Coffee Table') - self.assertEqual(table.table_type, 'TABLE') - self.assertEqual(table.time_partitioning.type_, 'DAY') + self.assertEqual(table.friendly_name, "Mahogany Coffee Table") + self.assertEqual(table.table_type, "TABLE") + self.assertEqual(table.time_partitioning.type_, "DAY") self.assertEqual(table.time_partitioning.expiration_ms, 10000) - self.assertEqual(table.time_partitioning.field, 'mycolumn') - self.assertEqual(table.labels['some-stuff'], 'this-is-a-label') + self.assertEqual(table.time_partitioning.field, "mycolumn") + self.assertEqual(table.labels["some-stuff"], "this-is-a-label") self.assertIsNone(table.view_use_legacy_sql) with warnings.catch_warnings(record=True) as warned: - self.assertEqual(table.partitioning_type, 'DAY') + self.assertEqual(table.partitioning_type, "DAY") self.assertEqual(table.partition_expiration, 10000) self.assertEqual(len(warned), 2) @@ -1134,18 +1136,18 @@ def test_ctor(self): self.assertIs(warning.category, PendingDeprecationWarning) def test_ctor_view(self): - project = 'test-project' - dataset_id = 'test_dataset' - table_id = 'just_looking' + project = "test-project" + dataset_id = "test_dataset" + table_id = "just_looking" resource = { - 'kind': 'bigquery#table', - 'id': '{}:{}.{}'.format(project, dataset_id, table_id), - 'tableReference': { - 'projectId': project, - 'datasetId': dataset_id, - 'tableId': table_id, + "kind": "bigquery#table", + "id": "{}:{}.{}".format(project, dataset_id, table_id), + "tableReference": { + "projectId": project, + "datasetId": dataset_id, + "tableId": table_id, }, - 'type': 'VIEW', + "type": "VIEW", } table = self._make_one(resource) @@ -1153,12 +1155,12 @@ def test_ctor_view(self): self.assertEqual(table.dataset_id, dataset_id) self.assertEqual(table.table_id, table_id) self.assertEqual( - table.full_table_id, - '{}:{}.{}'.format(project, dataset_id, table_id)) + table.full_table_id, "{}:{}.{}".format(project, dataset_id, table_id) + ) self.assertEqual(table.reference.project, project) self.assertEqual(table.reference.dataset_id, dataset_id) self.assertEqual(table.reference.table_id, table_id) - self.assertEqual(table.table_type, 'VIEW') + self.assertEqual(table.table_type, "VIEW") # Server default for useLegacySql is True. self.assertTrue(table.view_use_legacy_sql) @@ -1166,16 +1168,16 @@ def test_ctor_missing_properties(self): import warnings resource = { - 'tableReference': { - 'projectId': 'testproject', - 'datasetId': 'testdataset', - 'tableId': 'testtable', - }, + "tableReference": { + "projectId": "testproject", + "datasetId": "testdataset", + "tableId": "testtable", + } } table = self._make_one(resource) - self.assertEqual(table.project, 'testproject') - self.assertEqual(table.dataset_id, 'testdataset') - self.assertEqual(table.table_id, 'testtable') + self.assertEqual(table.project, "testproject") + self.assertEqual(table.dataset_id, "testdataset") + self.assertEqual(table.table_id, "testtable") self.assertIsNone(table.full_table_id) self.assertIsNone(table.friendly_name) self.assertIsNone(table.table_type) @@ -1193,30 +1195,21 @@ def test_ctor_missing_properties(self): def test_ctor_wo_project(self): resource = { - 'tableReference': { - 'datasetId': 'testdataset', - 'tableId': 'testtable', - }, + "tableReference": {"datasetId": "testdataset", "tableId": "testtable"} } with self.assertRaises(ValueError): self._make_one(resource) def test_ctor_wo_dataset(self): resource = { - 'tableReference': { - 'projectId': 'testproject', - 'tableId': 'testtable', - }, + "tableReference": {"projectId": "testproject", "tableId": "testtable"} } with self.assertRaises(ValueError): self._make_one(resource) def test_ctor_wo_table(self): resource = { - 'tableReference': { - 'projectId': 'testproject', - 'datasetId': 'testdataset', - }, + "tableReference": {"projectId": "testproject", "datasetId": "testdataset"} } with self.assertRaises(ValueError): self._make_one(resource) @@ -1227,59 +1220,57 @@ def test_ctor_wo_reference(self): def test_labels_update_in_place(self): resource = { - 'tableReference': { - 'projectId': 'testproject', - 'datasetId': 'testdataset', - 'tableId': 'testtable', - }, + "tableReference": { + "projectId": "testproject", + "datasetId": "testdataset", + "tableId": "testtable", + } } table = self._make_one(resource) labels = table.labels - labels['foo'] = 'bar' # update in place - self.assertEqual(table.labels, {'foo': 'bar'}) + labels["foo"] = "bar" # update in place + self.assertEqual(table.labels, {"foo": "bar"}) class TestRow(unittest.TestCase): - def test_row(self): from google.cloud.bigquery.table import Row VALUES = (1, 2, 3) - row = Row(VALUES, {'a': 0, 'b': 1, 'c': 2}) + row = Row(VALUES, {"a": 0, "b": 1, "c": 2}) self.assertEqual(row.a, 1) self.assertEqual(row[1], 2) - self.assertEqual(row['c'], 3) + self.assertEqual(row["c"], 3) self.assertEqual(len(row), 3) self.assertEqual(row.values(), VALUES) - self.assertEqual(set(row.keys()), set({'a': 1, 'b': 2, 'c': 3}.keys())) - self.assertEqual(set(row.items()), - set({'a': 1, 'b': 2, 'c': 3}.items())) - self.assertEqual(row.get('a'), 1) - self.assertEqual(row.get('d'), None) - self.assertEqual(row.get('d', ''), '') - self.assertEqual(row.get('d', default=''), '') - self.assertEqual(repr(row), - "Row((1, 2, 3), {'a': 0, 'b': 1, 'c': 2})") + self.assertEqual(set(row.keys()), set({"a": 1, "b": 2, "c": 3}.keys())) + self.assertEqual(set(row.items()), set({"a": 1, "b": 2, "c": 3}.items())) + self.assertEqual(row.get("a"), 1) + self.assertEqual(row.get("d"), None) + self.assertEqual(row.get("d", ""), "") + self.assertEqual(row.get("d", default=""), "") + self.assertEqual(repr(row), "Row((1, 2, 3), {'a': 0, 'b': 1, 'c': 2})") self.assertFalse(row != row) self.assertFalse(row == 3) with self.assertRaises(AttributeError): row.z with self.assertRaises(KeyError): - row['z'] + row["z"] class Test_EmptyRowIterator(unittest.TestCase): - - @mock.patch('google.cloud.bigquery.table.pandas', new=None) + @mock.patch("google.cloud.bigquery.table.pandas", new=None) def test_to_dataframe_error_if_pandas_is_none(self): from google.cloud.bigquery.table import _EmptyRowIterator + row_iterator = _EmptyRowIterator() with self.assertRaises(ValueError): row_iterator.to_dataframe() - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): from google.cloud.bigquery.table import _EmptyRowIterator + row_iterator = _EmptyRowIterator() df = row_iterator.to_dataframe() self.assertIsInstance(df, pandas.DataFrame) @@ -1287,7 +1278,6 @@ def test_to_dataframe(self): class TestRowIterator(unittest.TestCase): - def test_constructor(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import _item_to_row @@ -1295,7 +1285,7 @@ def test_constructor(self): client = mock.sentinel.client api_request = mock.sentinel.api_request - path = '/foo' + path = "/foo" schema = [] iterator = RowIterator(client, api_request, path, schema) @@ -1303,7 +1293,7 @@ def test_constructor(self): self.assertIs(iterator.client, client) self.assertEqual(iterator.path, path) self.assertIs(iterator.item_to_value, _item_to_row) - self.assertEqual(iterator._items_key, 'rows') + self.assertEqual(iterator._items_key, "rows") self.assertIsNone(iterator.max_results) self.assertEqual(iterator.extra_params, {}) self.assertIs(iterator._page_start, _rows_page_start) @@ -1317,131 +1307,129 @@ def test_iterate(self): from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] rows = [ - {'f': [{'v': 'Phred Phlyntstone'}, {'v': '32'}]}, - {'f': [{'v': 'Bharney Rhubble'}, {'v': '33'}]}, + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, ] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) self.assertEqual(row_iterator.num_results, 0) rows_iter = iter(row_iterator) val1 = six.next(rows_iter) - self.assertEqual(val1.name, 'Phred Phlyntstone') + self.assertEqual(val1.name, "Phred Phlyntstone") self.assertEqual(row_iterator.num_results, 1) val2 = six.next(rows_iter) - self.assertEqual(val2.name, 'Bharney Rhubble') + self.assertEqual(val2.name, "Bharney Rhubble") self.assertEqual(row_iterator.num_results, 2) with self.assertRaises(StopIteration): six.next(rows_iter) - api_request.assert_called_once_with( - method='GET', path=path, query_params={}) + api_request.assert_called_once_with(method="GET", path=path, query_params={}) def test_page_size(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] rows = [ - {'f': [{'v': 'Phred Phlyntstone'}, {'v': '32'}]}, - {'f': [{'v': 'Bharney Rhubble'}, {'v': '33'}]}, + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, ] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema, page_size=4) + mock.sentinel.client, api_request, path, schema, page_size=4 + ) row_iterator._get_next_page_response() api_request.assert_called_once_with( - method='GET', path=path, query_params={ - 'maxResults': row_iterator._page_size}) + method="GET", + path=path, + query_params={"maxResults": row_iterator._page_size}, + ) - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] rows = [ - {'f': [{'v': 'Phred Phlyntstone'}, {'v': '32'}]}, - {'f': [{'v': 'Bharney Rhubble'}, {'v': '33'}]}, - {'f': [{'v': 'Wylma Phlyntstone'}, {'v': '29'}]}, - {'f': [{'v': 'Bhettye Rhubble'}, {'v': '27'}]}, + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, + {"f": [{"v": "Wylma Phlyntstone"}, {"v": "29"}]}, + {"f": [{"v": "Bhettye Rhubble"}, {"v": "27"}]}, ] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) df = row_iterator.to_dataframe() self.assertIsInstance(df, pandas.DataFrame) self.assertEqual(len(df), 4) # verify the number of rows - self.assertEqual(list(df), ['name', 'age']) # verify the column names - self.assertEqual(df.name.dtype.name, 'object') - self.assertEqual(df.age.dtype.name, 'int64') + self.assertEqual(list(df), ["name", "age"]) # verify the column names + self.assertEqual(df.name.dtype.name, "object") + self.assertEqual(df.age.dtype.name, "int64") - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_empty_results(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] - path = '/foo' - api_request = mock.Mock(return_value={'rows': []}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + path = "/foo" + api_request = mock.Mock(return_value={"rows": []}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) df = row_iterator.to_dataframe() self.assertIsInstance(df, pandas.DataFrame) self.assertEqual(len(df), 0) # verify the number of rows - self.assertEqual(list(df), ['name', 'age']) # verify the column names + self.assertEqual(list(df), ["name", "age"]) # verify the column names - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_various_types_nullable(self): import datetime from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('start_timestamp', 'TIMESTAMP'), - SchemaField('seconds', 'INT64'), - SchemaField('miles', 'FLOAT64'), - SchemaField('payment_type', 'STRING'), - SchemaField('complete', 'BOOL'), - SchemaField('date', 'DATE'), + SchemaField("start_timestamp", "TIMESTAMP"), + SchemaField("seconds", "INT64"), + SchemaField("miles", "FLOAT64"), + SchemaField("payment_type", "STRING"), + SchemaField("complete", "BOOL"), + SchemaField("date", "DATE"), ] row_data = [ [None, None, None, None, None, None], - ['1.4338368E9', '420', '1.1', 'Cash', 'true', '1999-12-01'], - ['1.3878117E9', '2580', '17.7', 'Cash', 'false', '1953-06-14'], - ['1.3855653E9', '2280', '4.4', 'Credit', 'true', '1981-11-04'], + ["1.4338368E9", "420", "1.1", "Cash", "true", "1999-12-01"], + ["1.3878117E9", "2580", "17.7", "Cash", "false", "1953-06-14"], + ["1.3855653E9", "2280", "4.4", "Credit", "true", "1981-11-04"], ] - rows = [{'f': [{'v': field} for field in row]} for row in row_data] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + rows = [{"f": [{"v": field} for field in row]} for row in row_data] + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) df = row_iterator.to_dataframe() @@ -1460,29 +1448,28 @@ def test_to_dataframe_w_various_types_nullable(self): self.assertIsInstance(row.complete, bool) self.assertIsInstance(row.date, datetime.date) - @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_column_dtypes(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('start_timestamp', 'TIMESTAMP'), - SchemaField('seconds', 'INT64'), - SchemaField('miles', 'FLOAT64'), - SchemaField('payment_type', 'STRING'), - SchemaField('complete', 'BOOL'), - SchemaField('date', 'DATE'), + SchemaField("start_timestamp", "TIMESTAMP"), + SchemaField("seconds", "INT64"), + SchemaField("miles", "FLOAT64"), + SchemaField("payment_type", "STRING"), + SchemaField("complete", "BOOL"), + SchemaField("date", "DATE"), ] row_data = [ - ['1.4338368E9', '420', '1.1', 'Cash', 'true', '1999-12-01'], - ['1.3878117E9', '2580', '17.7', 'Cash', 'false', '1953-06-14'], - ['1.3855653E9', '2280', '4.4', 'Credit', 'true', '1981-11-04'], + ["1.4338368E9", "420", "1.1", "Cash", "true", "1999-12-01"], + ["1.3878117E9", "2580", "17.7", "Cash", "false", "1953-06-14"], + ["1.3855653E9", "2280", "4.4", "Credit", "true", "1981-11-04"], ] - rows = [{'f': [{'v': field} for field in row]} for row in row_data] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + rows = [{"f": [{"v": field} for field in row]} for row in row_data] + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) df = row_iterator.to_dataframe() @@ -1491,37 +1478,35 @@ def test_to_dataframe_column_dtypes(self): exp_columns = [field.name for field in schema] self.assertEqual(list(df), exp_columns) # verify the column names - self.assertEqual(df.start_timestamp.dtype.name, 'datetime64[ns, UTC]') - self.assertEqual(df.seconds.dtype.name, 'int64') - self.assertEqual(df.miles.dtype.name, 'float64') - self.assertEqual(df.payment_type.dtype.name, 'object') - self.assertEqual(df.complete.dtype.name, 'bool') - self.assertEqual(df.date.dtype.name, 'object') + self.assertEqual(df.start_timestamp.dtype.name, "datetime64[ns, UTC]") + self.assertEqual(df.seconds.dtype.name, "int64") + self.assertEqual(df.miles.dtype.name, "float64") + self.assertEqual(df.payment_type.dtype.name, "object") + self.assertEqual(df.complete.dtype.name, "bool") + self.assertEqual(df.date.dtype.name, "object") - @mock.patch('google.cloud.bigquery.table.pandas', new=None) + @mock.patch("google.cloud.bigquery.table.pandas", new=None) def test_to_dataframe_error_if_pandas_is_none(self): from google.cloud.bigquery.table import RowIterator from google.cloud.bigquery.table import SchemaField schema = [ - SchemaField('name', 'STRING', mode='REQUIRED'), - SchemaField('age', 'INTEGER', mode='REQUIRED') + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), ] rows = [ - {'f': [{'v': 'Phred Phlyntstone'}, {'v': '32'}]}, - {'f': [{'v': 'Bharney Rhubble'}, {'v': '33'}]}, + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, ] - path = '/foo' - api_request = mock.Mock(return_value={'rows': rows}) - row_iterator = RowIterator( - mock.sentinel.client, api_request, path, schema) + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + row_iterator = RowIterator(mock.sentinel.client, api_request, path, schema) with self.assertRaises(ValueError): row_iterator.to_dataframe() class TestTimePartitioning(unittest.TestCase): - def _get_target_class(self): from google.cloud.bigquery.table import TimePartitioning @@ -1533,7 +1518,7 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): time_partitioning = self._make_one() - self.assertEqual(time_partitioning.type_, 'DAY') + self.assertEqual(time_partitioning.type_, "DAY") self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) self.assertIsNone(time_partitioning.require_partition_filter) @@ -1543,13 +1528,13 @@ def test_constructor_explicit(self): time_partitioning = self._make_one( type_=TimePartitioningType.DAY, - field='name', + field="name", expiration_ms=10000, - require_partition_filter=True + require_partition_filter=True, ) - self.assertEqual(time_partitioning.type_, 'DAY') - self.assertEqual(time_partitioning.field, 'name') + self.assertEqual(time_partitioning.type_, "DAY") + self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) self.assertTrue(time_partitioning.require_partition_filter) @@ -1557,7 +1542,7 @@ def test_from_api_repr_minimal(self): from google.cloud.bigquery.table import TimePartitioningType klass = self._get_target_class() - api_repr = {'type': 'DAY'} + api_repr = {"type": "DAY"} time_partitioning = klass.from_api_repr(api_repr) self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) @@ -1570,21 +1555,21 @@ def test_from_api_repr_explicit(self): klass = self._get_target_class() api_repr = { - 'type': 'DAY', - 'field': 'name', - 'expirationMs': '10000', - 'requirePartitionFilter': True, + "type": "DAY", + "field": "name", + "expirationMs": "10000", + "requirePartitionFilter": True, } time_partitioning = klass.from_api_repr(api_repr) self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) - self.assertEqual(time_partitioning.field, 'name') + self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) self.assertTrue(time_partitioning.require_partition_filter) def test_to_api_repr_defaults(self): time_partitioning = self._make_one() - expected = {'type': 'DAY'} + expected = {"type": "DAY"} self.assertEqual(time_partitioning.to_api_repr(), expected) def test_to_api_repr_explicit(self): @@ -1592,16 +1577,16 @@ def test_to_api_repr_explicit(self): time_partitioning = self._make_one( type_=TimePartitioningType.DAY, - field='name', + field="name", expiration_ms=10000, - require_partition_filter=True + require_partition_filter=True, ) expected = { - 'type': 'DAY', - 'field': 'name', - 'expirationMs': '10000', - 'requirePartitionFilter': True, + "type": "DAY", + "field": "name", + "expirationMs": "10000", + "requirePartitionFilter": True, } self.assertEqual(time_partitioning.to_api_repr(), expected) @@ -1613,31 +1598,35 @@ def test___eq___wrong_type(self): def test___eq___type__mismatch(self): time_partitioning = self._make_one() - other = self._make_one(type_='HOUR') + other = self._make_one(type_="HOUR") self.assertNotEqual(time_partitioning, other) def test___eq___field_mismatch(self): - time_partitioning = self._make_one(field='foo') - other = self._make_one(field='bar') + time_partitioning = self._make_one(field="foo") + other = self._make_one(field="bar") self.assertNotEqual(time_partitioning, other) def test___eq___expiration_ms_mismatch(self): - time_partitioning = self._make_one(field='foo', expiration_ms=100000) - other = self._make_one(field='foo', expiration_ms=200000) + time_partitioning = self._make_one(field="foo", expiration_ms=100000) + other = self._make_one(field="foo", expiration_ms=200000) self.assertNotEqual(time_partitioning, other) def test___eq___require_partition_filter_mismatch(self): time_partitioning = self._make_one( - field='foo', expiration_ms=100000, require_partition_filter=True) + field="foo", expiration_ms=100000, require_partition_filter=True + ) other = self._make_one( - field='foo', expiration_ms=100000, require_partition_filter=False) + field="foo", expiration_ms=100000, require_partition_filter=False + ) self.assertNotEqual(time_partitioning, other) def test___eq___hit(self): time_partitioning = self._make_one( - field='foo', expiration_ms=100000, require_partition_filter=True) + field="foo", expiration_ms=100000, require_partition_filter=True + ) other = self._make_one( - field='foo', expiration_ms=100000, require_partition_filter=True) + field="foo", expiration_ms=100000, require_partition_filter=True + ) self.assertEqual(time_partitioning, other) def test___ne___wrong_type(self): @@ -1650,24 +1639,24 @@ def test___ne___same_value(self): time_partitioning1 = self._make_one() time_partitioning2 = self._make_one() # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = (time_partitioning1 != time_partitioning2) + comparison_val = time_partitioning1 != time_partitioning2 self.assertFalse(comparison_val) def test___ne___different_values(self): time_partitioning1 = self._make_one() - time_partitioning2 = self._make_one(type_='HOUR') + time_partitioning2 = self._make_one(type_="HOUR") self.assertNotEqual(time_partitioning1, time_partitioning2) def test___hash__set_equality(self): - time_partitioning1 = self._make_one(field='foo') - time_partitioning2 = self._make_one(field='foo') + time_partitioning1 = self._make_one(field="foo") + time_partitioning2 = self._make_one(field="foo") set_one = {time_partitioning1, time_partitioning2} set_two = {time_partitioning1, time_partitioning2} self.assertEqual(set_one, set_two) def test___hash__not_equals(self): - time_partitioning1 = self._make_one(field='foo') - time_partitioning2 = self._make_one(field='bar') + time_partitioning1 = self._make_one(field="foo") + time_partitioning2 = self._make_one(field="bar") set_one = {time_partitioning1} set_two = {time_partitioning2} self.assertNotEqual(set_one, set_two) @@ -1682,14 +1671,15 @@ def test___repr___explicit(self): time_partitioning = self._make_one( type_=TimePartitioningType.DAY, - field='name', + field="name", expiration_ms=10000, - require_partition_filter=True + require_partition_filter=True, ) expected = ( "TimePartitioning(" "expirationMs=10000," "field=name," "requirePartitionFilter=True," - "type=DAY)") + "type=DAY)" + ) self.assertEqual(repr(time_partitioning), expected) diff --git a/firestore/docs/conf.py b/firestore/docs/conf.py index 08dafcb41e4d..8ad727de47ba 100644 --- a/firestore/docs/conf.py +++ b/firestore/docs/conf.py @@ -18,50 +18,50 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -__version__ = '0.1.0' +__version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", ] # autodoc/autosummary flags -autoclass_content = 'both' -autodoc_default_flags = ['members'] +autoclass_content = "both" +autodoc_default_flags = ["members"] autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud-firestore' -copyright = u'2017, Google' -author = u'Google APIs' +project = u"google-cloud-firestore" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -70,7 +70,7 @@ # The full version, including alpha/beta/rc tags. release = __version__ # The short X.Y version. -version = '.'.join(release.split('.')[0:2]) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -81,37 +81,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -120,31 +120,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -154,78 +154,75 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-firestore-doc' +htmlhelp_basename = "google-cloud-firestore-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. #'preamble': '', - # Latex figure (float) alignment #'figure_align': 'htbp', } @@ -234,39 +231,51 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'google-cloud-firestore.tex', - u'google-cloud-firestore Documentation', author, 'manual'), + ( + master_doc, + "google-cloud-firestore.tex", + u"google-cloud-firestore Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'google-cloud-firestore', - u'google-cloud-firestore Documentation', [author], 1)] +man_pages = [ + ( + master_doc, + "google-cloud-firestore", + u"google-cloud-firestore Documentation", + [author], + 1, + ) +] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -274,27 +283,33 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'google-cloud-firestore', - u'google-cloud-firestore Documentation', author, 'google-cloud-firestore', - 'GAPIC library for the {metadata.shortName} v1beta1 service', 'APIs'), + ( + master_doc, + "google-cloud-firestore", + u"google-cloud-firestore Documentation", + author, + "google-cloud-firestore", + "GAPIC library for the {metadata.shortName} v1beta1 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('http://python.readthedocs.org/en/latest/', None), - 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), } # Napoleon settings diff --git a/firestore/google/__init__.py b/firestore/google/__init__.py index 7a11b50cbdd5..aa5aeae602bc 100644 --- a/firestore/google/__init__.py +++ b/firestore/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/firestore/google/cloud/__init__.py b/firestore/google/cloud/__init__.py index 7a11b50cbdd5..aa5aeae602bc 100644 --- a/firestore/google/cloud/__init__.py +++ b/firestore/google/cloud/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/firestore/google/cloud/firestore.py b/firestore/google/cloud/firestore.py index a03ae65ea798..98ccb62f3416 100644 --- a/firestore/google/cloud/firestore.py +++ b/firestore/google/cloud/firestore.py @@ -37,23 +37,23 @@ __all__ = [ - '__version__', - 'Client', - 'CollectionReference', - 'DELETE_FIELD', - 'DocumentReference', - 'DocumentSnapshot', - 'enums', - 'ExistsOption', - 'GeoPoint', - 'LastUpdateOption', - 'Query', - 'ReadAfterWriteError', - 'SERVER_TIMESTAMP', - 'Transaction', - 'transactional', - 'types', - 'Watch', - 'WriteBatch', - 'WriteOption', + "__version__", + "Client", + "CollectionReference", + "DELETE_FIELD", + "DocumentReference", + "DocumentSnapshot", + "enums", + "ExistsOption", + "GeoPoint", + "LastUpdateOption", + "Query", + "ReadAfterWriteError", + "SERVER_TIMESTAMP", + "Transaction", + "transactional", + "types", + "Watch", + "WriteBatch", + "WriteOption", ] diff --git a/firestore/google/cloud/firestore_v1beta1/__init__.py b/firestore/google/cloud/firestore_v1beta1/__init__.py index dda63c728177..f681d84e6a37 100644 --- a/firestore/google/cloud/firestore_v1beta1/__init__.py +++ b/firestore/google/cloud/firestore_v1beta1/__init__.py @@ -15,7 +15,8 @@ """Python idiomatic client for Google Cloud Firestore.""" from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-firestore').version + +__version__ = get_distribution("google-cloud-firestore").version from google.cloud.firestore_v1beta1 import types from google.cloud.firestore_v1beta1._helpers import GeoPoint @@ -40,25 +41,25 @@ __all__ = [ - '__version__', - 'ArrayRemove', - 'ArrayUnion', - 'Client', - 'CollectionReference', - 'DELETE_FIELD', - 'DocumentReference', - 'DocumentSnapshot', - 'enums', - 'ExistsOption', - 'GeoPoint', - 'LastUpdateOption', - 'Query', - 'ReadAfterWriteError', - 'SERVER_TIMESTAMP', - 'Transaction', - 'transactional', - 'types', - 'Watch', - 'WriteBatch', - 'WriteOption', + "__version__", + "ArrayRemove", + "ArrayUnion", + "Client", + "CollectionReference", + "DELETE_FIELD", + "DocumentReference", + "DocumentSnapshot", + "enums", + "ExistsOption", + "GeoPoint", + "LastUpdateOption", + "Query", + "ReadAfterWriteError", + "SERVER_TIMESTAMP", + "Transaction", + "transactional", + "types", + "Watch", + "WriteBatch", + "WriteOption", ] diff --git a/firestore/google/cloud/firestore_v1beta1/_helpers.py b/firestore/google/cloud/firestore_v1beta1/_helpers.py index 634f1081bf68..42b5b6b1245e 100644 --- a/firestore/google/cloud/firestore_v1beta1/_helpers.py +++ b/firestore/google/cloud/firestore_v1beta1/_helpers.py @@ -38,26 +38,25 @@ from google.cloud.firestore_v1beta1.proto import write_pb2 -BAD_PATH_TEMPLATE = ( - 'A path element must be a string. Received {}, which is a {}.') -FIELD_PATH_MISSING_TOP = '{!r} is not contained in the data' -FIELD_PATH_MISSING_KEY = '{!r} is not contained in the data for the key {!r}' +BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}." +FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data" +FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}" FIELD_PATH_WRONG_TYPE = ( - 'The data at {!r} is not a dictionary, so it cannot contain the key {!r}') -FIELD_PATH_DELIMITER = '.' -DOCUMENT_PATH_DELIMITER = '/' -INACTIVE_TXN = ( - 'Transaction not in progress, cannot be used in API requests.') -READ_AFTER_WRITE_ERROR = 'Attempted read after write in a transaction.' + "The data at {!r} is not a dictionary, so it cannot contain the key {!r}" +) +FIELD_PATH_DELIMITER = "." +DOCUMENT_PATH_DELIMITER = "/" +INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests." +READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction." BAD_REFERENCE_ERROR = ( - 'Reference value {!r} in unexpected format, expected to be of the form ' - '``projects/{{project}}/databases/{{database}}/' - 'documents/{{document_path}}``.') + "Reference value {!r} in unexpected format, expected to be of the form " + "``projects/{{project}}/databases/{{database}}/" + "documents/{{document_path}}``." +) WRONG_APP_REFERENCE = ( - 'Document {!r} does not correspond to the same database ' - '({!r}) as the client.') -REQUEST_TIME_ENUM = ( - enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME) + "Document {!r} does not correspond to the same database " "({!r}) as the client." +) +REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME _GRPC_ERROR_MAPPING = { grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict, grpc.StatusCode.NOT_FOUND: exceptions.NotFound, @@ -82,8 +81,7 @@ def to_protobuf(self): Returns: google.type.latlng_pb2.LatLng: The current point as a protobuf. """ - return latlng_pb2.LatLng(latitude=self.latitude, - longitude=self.longitude) + return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude) def __eq__(self, other): """Compare two geo points for equality. @@ -96,8 +94,7 @@ def __eq__(self, other): if not isinstance(other, GeoPoint): return NotImplemented - return (self.latitude == other.latitude and - self.longitude == other.longitude) + return self.latitude == other.latitude and self.longitude == other.longitude def __ne__(self, other): """Compare two geo points for inequality. @@ -121,10 +118,11 @@ class FieldPath(object): parts: (one or more strings) Indicating path of the key to be used. """ + def __init__(self, *parts): for part in parts: if not isinstance(part, six.string_types) or not part: - error = 'One or more components is not a string or is empty.' + error = "One or more components is not a string or is empty." raise ValueError(error) self.parts = tuple(parts) @@ -147,11 +145,11 @@ def from_string(string): as arguments to `FieldPath`. """ # XXX this should just handle things with the invalid chars - invalid_characters = '~*/[]' + invalid_characters = "~*/[]" for invalid_character in invalid_characters: if invalid_character in string: - raise ValueError('Invalid characters in string.') - string = string.split('.') + raise ValueError("Invalid characters in string.") + string = string.split(".") return FieldPath(*string) def __repr__(self): @@ -159,7 +157,7 @@ def __repr__(self): for part in self.parts: paths += "'" + part + "'," paths = paths[:-1] - return 'FieldPath({})'.format(paths) + return "FieldPath({})".format(paths) def __hash__(self): return hash(self.to_api_repr()) @@ -191,7 +189,7 @@ def __add__(self, other): return NotImplemented def eq_or_parent(self, other): - return self.parts[:len(other.parts)] == other.parts[:len(self.parts)] + return self.parts[: len(other.parts)] == other.parts[: len(self.parts)] def to_api_repr(self): """ Returns quoted string representation of the FieldPath @@ -239,16 +237,14 @@ def verify_path(path, is_collection): """ num_elements = len(path) if num_elements == 0: - raise ValueError('Document or collection path cannot be empty') + raise ValueError("Document or collection path cannot be empty") if is_collection: if num_elements % 2 == 0: - raise ValueError( - 'A collection must have an odd number of path elements') + raise ValueError("A collection must have an odd number of path elements") else: if num_elements % 2 == 1: - raise ValueError( - 'A document must have an even number of path elements') + raise ValueError("A document must have an even number of path elements") for element in path: if not isinstance(element, six.string_types): @@ -285,8 +281,7 @@ def encode_value(value): return document_pb2.Value(double_value=value) if isinstance(value, datetime.datetime): - return document_pb2.Value( - timestamp_value=_datetime_to_pb_timestamp(value)) + return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value)) if isinstance(value, six.text_type): return document_pb2.Value(string_value=value) @@ -296,7 +291,7 @@ def encode_value(value): # NOTE: We avoid doing an isinstance() check for a Document # here to avoid import cycles. - document_path = getattr(value, '_document_path', None) + document_path = getattr(value, "_document_path", None) if document_path is not None: return document_pb2.Value(reference_value=document_path) @@ -314,8 +309,8 @@ def encode_value(value): return document_pb2.Value(map_value=value_pb) raise TypeError( - 'Cannot convert to a Firestore Value', value, - 'Invalid type', type(value)) + "Cannot convert to a Firestore Value", value, "Invalid type", type(value) + ) def encode_dict(values_dict): @@ -329,10 +324,7 @@ def encode_dict(values_dict): dictionary of string keys and ``Value`` protobufs as dictionary values. """ - return { - key: encode_value(value) - for key, value in six.iteritems(values_dict) - } + return {key: encode_value(value) for key, value in six.iteritems(values_dict)} def reference_value_to_document(reference_value, client): @@ -363,8 +355,7 @@ def reference_value_to_document(reference_value, client): # The sixth part is `a/b/c/d` (i.e. the document path) document = client.document(parts[-1]) if document._document_path != reference_value: - msg = WRONG_APP_REFERENCE.format( - reference_value, client._database_string) + msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string) raise ValueError(msg) return document @@ -388,38 +379,35 @@ def decode_value(value, client): NotImplementedError: If the ``value_type`` is ``reference_value``. ValueError: If the ``value_type`` is unknown. """ - value_type = value.WhichOneof('value_type') + value_type = value.WhichOneof("value_type") - if value_type == 'null_value': + if value_type == "null_value": return None - elif value_type == 'boolean_value': + elif value_type == "boolean_value": return value.boolean_value - elif value_type == 'integer_value': + elif value_type == "integer_value": return value.integer_value - elif value_type == 'double_value': + elif value_type == "double_value": return value.double_value - elif value_type == 'timestamp_value': + elif value_type == "timestamp_value": # NOTE: This conversion is "lossy", Python ``datetime.datetime`` # has microsecond precision but ``timestamp_value`` has # nanosecond precision. return _pb_timestamp_to_datetime(value.timestamp_value) - elif value_type == 'string_value': + elif value_type == "string_value": return value.string_value - elif value_type == 'bytes_value': + elif value_type == "bytes_value": return value.bytes_value - elif value_type == 'reference_value': + elif value_type == "reference_value": return reference_value_to_document(value.reference_value, client) - elif value_type == 'geo_point_value': - return GeoPoint( - value.geo_point_value.latitude, - value.geo_point_value.longitude) - elif value_type == 'array_value': - return [decode_value(element, client) - for element in value.array_value.values] - elif value_type == 'map_value': + elif value_type == "geo_point_value": + return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude) + elif value_type == "array_value": + return [decode_value(element, client) for element in value.array_value.values] + elif value_type == "map_value": return decode_dict(value.map_value.fields, client) else: - raise ValueError('Unknown ``value_type``', value_type) + raise ValueError("Unknown ``value_type``", value_type) def decode_dict(value_fields, client): @@ -437,12 +425,11 @@ def decode_dict(value_fields, client): of native Python values converted from the ``value_fields``. """ return { - key: decode_value(value, client) - for key, value in six.iteritems(value_fields) + key: decode_value(value, client) for key, value in six.iteritems(value_fields) } -SIMPLE_FIELD_NAME = re.compile('^[_a-zA-Z][_a-zA-Z0-9]*$') +SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$") def get_field_path(field_names): @@ -478,19 +465,18 @@ def get_field_path(field_names): if match and match.group(0) == field_name: result.append(field_name) else: - replaced = field_name.replace('\\', '\\\\').replace('`', '\\`') - result.append('`' + replaced + '`') + replaced = field_name.replace("\\", "\\\\").replace("`", "\\`") + result.append("`" + replaced + "`") return FIELD_PATH_DELIMITER.join(result) PATH_ELEMENT_TOKENS = [ - ('SIMPLE', r'[_a-zA-Z][_a-zA-Z0-9]*'), # unquoted elements - ('QUOTED', r'`(?:\\`|[^`])*?`'), # quoted elements, unquoted - ('DOT', r'\.'), # separator + ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements + ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted + ("DOT", r"\."), # separator ] -TOKENS_PATTERN = '|'.join( - '(?P<{}>{})'.format(*pair) for pair in PATH_ELEMENT_TOKENS) +TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS) TOKENS_REGEX = re.compile(TOKENS_PATTERN) @@ -532,12 +518,12 @@ def split_field_path(path): for element in _tokenize_field_path(path): if want_dot: - if element != '.': + if element != ".": raise ValueError("Invalid path: {}".format(path)) else: want_dot = False else: - if element == '.': + if element == ".": raise ValueError("Invalid path: {}".format(path)) elements.append(element) want_dot = True @@ -569,10 +555,10 @@ def parse_field_path(api_repr): field_names = [] for field_name in split_field_path(api_repr): # non-simple field name - if field_name[0] == '`' and field_name[-1] == '`': + if field_name[0] == "`" and field_name[-1] == "`": field_name = field_name[1:-1] - field_name = field_name.replace('\\`', '`') - field_name = field_name.replace('\\\\', '\\') + field_name = field_name.replace("\\`", "`") + field_name = field_name.replace("\\\\", "\\") field_names.append(field_name) return field_names @@ -670,12 +656,14 @@ def get_doc_id(document_pb, expected_prefix): Raises: ValueError: If the name does not begin with the prefix. """ - prefix, document_id = document_pb.name.rsplit( - DOCUMENT_PATH_DELIMITER, 1) + prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1) if prefix != expected_prefix: raise ValueError( - 'Unexpected document name', document_pb.name, - 'Expected to begin with', expected_prefix) + "Unexpected document name", + document_pb.name, + "Expected to begin with", + expected_prefix, + ) return document_id @@ -734,6 +722,7 @@ class DocumentExtractor(object): Property names and values to use for sending a change to a document. """ + def __init__(self, document_data): self.document_data = document_data self.field_paths = [] @@ -773,24 +762,18 @@ def _get_document_iterator(self, prefix_path): @property def has_transforms(self): - return bool( - self.server_timestamps - or self.array_removes - or self.array_unions - ) + return bool(self.server_timestamps or self.array_removes or self.array_unions) @property def transform_paths(self): return sorted( - self.server_timestamps - + list(self.array_removes) - + list(self.array_unions)) + self.server_timestamps + list(self.array_removes) + list(self.array_unions) + ) def _get_update_mask(self, allow_empty_mask=False): return None - def get_update_pb( - self, document_path, exists=None, allow_empty_mask=False): + def get_update_pb(self, document_path, exists=None, allow_empty_mask=False): if exists is not None: current_document = common_pb2.Precondition(exists=exists) @@ -799,8 +782,7 @@ def get_update_pb( update_pb = write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields=encode_dict(self.set_fields), + name=document_path, fields=encode_dict(self.set_fields) ), update_mask=self._get_update_mask(allow_empty_mask), current_document=current_document, @@ -809,39 +791,54 @@ def get_update_pb( return update_pb def get_transform_pb(self, document_path, exists=None): - def make_array_value(values): value_list = [encode_value(element) for element in values] return document_pb2.ArrayValue(values=value_list) - path_field_transforms = [ - (path, write_pb2.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - set_to_server_value=REQUEST_TIME_ENUM, - )) for path in self.server_timestamps - ] + [ - (path, write_pb2.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - remove_all_from_array=make_array_value(values), - )) for path, values in self.array_removes.items() - ] + [ - (path, write_pb2.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - append_missing_elements=make_array_value(values), - )) for path, values in self.array_unions.items() - ] + path_field_transforms = ( + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + set_to_server_value=REQUEST_TIME_ENUM, + ), + ) + for path in self.server_timestamps + ] + + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + remove_all_from_array=make_array_value(values), + ), + ) + for path, values in self.array_removes.items() + ] + + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + append_missing_elements=make_array_value(values), + ), + ) + for path, values in self.array_unions.items() + ] + ) field_transforms = [ transform for path, transform in sorted(path_field_transforms) ] transform_pb = write_pb2.Write( transform=write_pb2.DocumentTransform( - document=document_path, - field_transforms=field_transforms, - ), + document=document_path, field_transforms=field_transforms + ) ) if exists is not None: transform_pb.current_document.CopyFrom( - common_pb2.Precondition(exists=exists)) + common_pb2.Precondition(exists=exists) + ) return transform_pb @@ -912,6 +909,7 @@ def pbs_for_set_no_merge(document_path, document_data): class DocumentExtractorForMerge(DocumentExtractor): """ Break document data up into actual data and transforms. """ + def __init__(self, document_data): super(DocumentExtractorForMerge, self).__init__(document_data) self.data_merge = [] @@ -953,8 +951,7 @@ def _normalize_merge_paths(self, merge): for index in range(len(merge_paths) - 1): lhs, rhs = merge_paths[index], merge_paths[index + 1] if lhs.eq_or_parent(rhs): - raise ValueError("Merge paths overlap: {}, {}".format( - lhs, rhs)) + raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs)) for merge_path in merge_paths: if merge_path in self.deleted_fields: @@ -969,8 +966,7 @@ def _normalize_merge_paths(self, merge): def _apply_merge_paths(self, merge): if self.empty_document: - raise ValueError( - "Cannot merge specific fields with empty document.") + raise ValueError("Cannot merge specific fields with empty document.") merge_paths = self._normalize_merge_paths(merge) @@ -995,35 +991,39 @@ def _apply_merge_paths(self, merge): self.set_fields = merged_set_fields unmerged_deleted_fields = [ - field_path for field_path in self.deleted_fields + field_path + for field_path in self.deleted_fields if field_path not in self.merge ] if unmerged_deleted_fields: - raise ValueError("Cannot delete unmerged fields: {}".format( - unmerged_deleted_fields)) + raise ValueError( + "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields) + ) self.data_merge = sorted(self.data_merge + self.deleted_fields) # Keep only transforms which are within merge. merged_transform_paths = set() for merge_path in self.merge: tranform_merge_paths = [ - transform_path for transform_path in self.transform_paths + transform_path + for transform_path in self.transform_paths if merge_path.eq_or_parent(transform_path) ] merged_transform_paths.update(tranform_merge_paths) self.server_timestamps = [ - path for path in self.server_timestamps - if path in merged_transform_paths + path for path in self.server_timestamps if path in merged_transform_paths ] self.array_removes = { - path: values for path, values in self.array_removes.items() + path: values + for path, values in self.array_removes.items() if path in merged_transform_paths } self.array_unions = { - path: values for path, values in self.array_unions.items() + path: values + for path, values in self.array_unions.items() if path in merged_transform_paths } @@ -1036,7 +1036,8 @@ def apply_merge(self, merge): def _get_update_mask(self, allow_empty_mask=False): # Mask uses dotted / quoted paths. mask_paths = [ - field_path.to_api_repr() for field_path in self.merge + field_path.to_api_repr() + for field_path in self.merge if field_path not in self.transform_merge ] @@ -1067,8 +1068,8 @@ def pbs_for_set_with_merge(document_path, document_data, merge): if extractor.has_updates or merge_empty: write_pbs.append( - extractor.get_update_pb( - document_path, allow_empty_mask=merge_empty)) + extractor.get_update_pb(document_path, allow_empty_mask=merge_empty) + ) if extractor.transform_paths: transform_pb = extractor.get_transform_pb(document_path) @@ -1080,26 +1081,30 @@ def pbs_for_set_with_merge(document_path, document_data, merge): class DocumentExtractorForUpdate(DocumentExtractor): """ Break document data up into actual data and transforms. """ + def __init__(self, document_data): super(DocumentExtractorForUpdate, self).__init__(document_data) - self.top_level_paths = sorted([ - FieldPath.from_string(key) for key in document_data - ]) + self.top_level_paths = sorted( + [FieldPath.from_string(key) for key in document_data] + ) tops = set(self.top_level_paths) for top_level_path in self.top_level_paths: for ancestor in top_level_path.lineage(): if ancestor in tops: - raise ValueError("Conflicting field path: {}, {}".format( - top_level_path, ancestor)) + raise ValueError( + "Conflicting field path: {}, {}".format( + top_level_path, ancestor + ) + ) for field_path in self.deleted_fields: if field_path not in tops: - raise ValueError("Cannot update with nest delete: {}".format( - field_path)) + raise ValueError( + "Cannot update with nest delete: {}".format(field_path) + ) def _get_document_iterator(self, prefix_path): - return extract_fields( - self.document_data, prefix_path, expand_dots=True) + return extract_fields(self.document_data, prefix_path, expand_dots=True) def _get_update_mask(self, allow_empty_mask=False): mask_paths = [] @@ -1132,7 +1137,7 @@ def pbs_for_update(document_path, field_updates, option): extractor = DocumentExtractorForUpdate(field_updates) if extractor.empty_document: - raise ValueError('Cannot update with an empty document.') + raise ValueError("Cannot update with an empty document.") if option is None: # Default is to use ``exists=True``. option = ExistsOption(exists=True) @@ -1221,7 +1226,7 @@ def metadata_with_prefix(prefix, **kw): Returns: List[Tuple[str, str]]: RPC metadata with supplied prefix """ - return [('google-cloud-resource-prefix', prefix)] + return [("google-cloud-resource-prefix", prefix)] class WriteOption(object): @@ -1276,8 +1281,7 @@ def modify_write(self, write_pb, **unused_kwargs): unused_kwargs (Dict[str, Any]): Keyword arguments accepted by other subclasses that are unused here. """ - current_doc = types.Precondition( - update_time=self._last_update_time) + current_doc = types.Precondition(update_time=self._last_update_time) write_pb.current_document.CopyFrom(current_doc) diff --git a/firestore/google/cloud/firestore_v1beta1/batch.py b/firestore/google/cloud/firestore_v1beta1/batch.py index c976c8dc64d0..978da04ada23 100644 --- a/firestore/google/cloud/firestore_v1beta1/batch.py +++ b/firestore/google/cloud/firestore_v1beta1/batch.py @@ -57,8 +57,7 @@ def create(self, reference, document_data): document_data (dict): Property names and values to use for creating a document. """ - write_pbs = _helpers.pbs_for_create( - reference._document_path, document_data) + write_pbs = _helpers.pbs_for_create(reference._document_path, document_data) self._add_write_pbs(write_pbs) def set(self, reference, document_data, merge=False): @@ -80,10 +79,12 @@ def set(self, reference, document_data, merge=False): """ if merge is not False: write_pbs = _helpers.pbs_for_set_with_merge( - reference._document_path, document_data, merge) + reference._document_path, document_data, merge + ) else: write_pbs = _helpers.pbs_for_set_no_merge( - reference._document_path, document_data) + reference._document_path, document_data + ) self._add_write_pbs(write_pbs) @@ -103,11 +104,11 @@ def update(self, reference, field_updates, option=None): write option to make assertions / preconditions on the server state of the document before applying changes. """ - if option.__class__.__name__ == 'ExistsOption': - raise ValueError('you must not pass an explicit write option to ' - 'update.') + if option.__class__.__name__ == "ExistsOption": + raise ValueError("you must not pass an explicit write option to " "update.") write_pbs = _helpers.pbs_for_update( - reference._document_path, field_updates, option) + reference._document_path, field_updates, option + ) self._add_write_pbs(write_pbs) def delete(self, reference, option=None): @@ -139,8 +140,11 @@ def commit(self): ``update_time`` field. """ commit_response = self._client._firestore_api.commit( - self._client._database_string, self._write_pbs, - transaction=None, metadata=self._client._rpc_metadata) + self._client._database_string, + self._write_pbs, + transaction=None, + metadata=self._client._rpc_metadata, + ) self._write_pbs = [] return list(commit_response.write_results) diff --git a/firestore/google/cloud/firestore_v1beta1/client.py b/firestore/google/cloud/firestore_v1beta1/client.py index 0091f04027eb..08e97ad332f8 100644 --- a/firestore/google/cloud/firestore_v1beta1/client.py +++ b/firestore/google/cloud/firestore_v1beta1/client.py @@ -35,16 +35,16 @@ from google.cloud.firestore_v1beta1.transaction import Transaction -DEFAULT_DATABASE = '(default)' +DEFAULT_DATABASE = "(default)" """str: The default database used in a :class:`~.firestore.client.Client`.""" _BAD_OPTION_ERR = ( - 'Exactly one of ``last_update_time`` or ``exists`` ' - 'must be provided.' + "Exactly one of ``last_update_time`` or ``exists`` " "must be provided." ) _BAD_DOC_TEMPLATE = ( - 'Document {!r} appeared in response but was not present among references') -_ACTIVE_TXN = 'There is already an active transaction.' -_INACTIVE_TXN = 'There is no active transaction.' + "Document {!r} appeared in response but was not present among references" +) +_ACTIVE_TXN = "There is already an active transaction." +_INACTIVE_TXN = "There is no active transaction." class Client(ClientWithProject): @@ -68,8 +68,8 @@ class Client(ClientWithProject): """ SCOPE = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", ) """The scopes required for authenticating with the Firestore service.""" @@ -77,13 +77,13 @@ class Client(ClientWithProject): _database_string_internal = None _rpc_metadata_internal = None - def __init__(self, project=None, credentials=None, - database=DEFAULT_DATABASE): + def __init__(self, project=None, credentials=None, database=DEFAULT_DATABASE): # NOTE: This API has no use for the _http argument, but sending it # will have no impact since the _http() @property only lazily # creates a working HTTP object. super(Client, self).__init__( - project=project, credentials=credentials, _http=None) + project=project, credentials=credentials, _http=None + ) self._database = database @property @@ -96,7 +96,8 @@ def _firestore_api(self): """ if self._firestore_api_internal is None: self._firestore_api_internal = firestore_client.FirestoreClient( - credentials=self._credentials) + credentials=self._credentials + ) return self._firestore_api_internal @@ -120,7 +121,8 @@ def _database_string(self): # NOTE: database_root_path() is a classmethod, so we don't use # self._firestore_api (it isn't necessary). db_str = firestore_client.FirestoreClient.database_root_path( - self.project, self._database) + self.project, self._database + ) self._database_string_internal = db_str return self._database_string_internal @@ -135,7 +137,8 @@ def _rpc_metadata(self): """ if self._rpc_metadata_internal is None: self._rpc_metadata_internal = _helpers.metadata_with_prefix( - self._database_string) + self._database_string + ) return self._rpc_metadata_internal @@ -279,12 +282,12 @@ def write_option(**kwargs): raise TypeError(_BAD_OPTION_ERR) name, value = kwargs.popitem() - if name == 'last_update_time': + if name == "last_update_time": return _helpers.LastUpdateOption(value) - elif name == 'exists': + elif name == "exists": return _helpers.ExistsOption(value) else: - extra = '{!r} was provided'.format(name) + extra = "{!r} was provided".format(name) raise TypeError(_BAD_OPTION_ERR, extra) def get_all(self, references, field_paths=None, transaction=None): @@ -325,9 +328,12 @@ def get_all(self, references, field_paths=None, transaction=None): document_paths, reference_map = _reference_info(references) mask = _get_doc_mask(field_paths) response_iterator = self._firestore_api.batch_get_documents( - self._database_string, document_paths, mask, + self._database_string, + document_paths, + mask, transaction=_helpers.get_transaction_id(transaction), - metadata=self._rpc_metadata) + metadata=self._rpc_metadata, + ) for get_doc_response in response_iterator: yield _parse_batch_get(get_doc_response, reference_map, self) @@ -340,7 +346,8 @@ def collections(self): iterator of subcollections of the current document. """ iterator = self._firestore_api.list_collection_ids( - self._database_string, metadata=self._rpc_metadata) + self._database_string, metadata=self._rpc_metadata + ) iterator.client = self iterator.item_to_value = _item_to_collection_ref return iterator @@ -447,10 +454,9 @@ def _parse_batch_get(get_doc_response, reference_map, client): ValueError: If the response has a ``result`` field (a oneof) other than ``found`` or ``missing``. """ - result_type = get_doc_response.WhichOneof('result') - if result_type == 'found': - reference = _get_reference( - get_doc_response.found.name, reference_map) + result_type = get_doc_response.WhichOneof("result") + if result_type == "found": + reference = _get_reference(get_doc_response.found.name, reference_map) data = _helpers.decode_dict(get_doc_response.found.fields, client) snapshot = DocumentSnapshot( reference, @@ -458,19 +464,22 @@ def _parse_batch_get(get_doc_response, reference_map, client): exists=True, read_time=get_doc_response.read_time, create_time=get_doc_response.found.create_time, - update_time=get_doc_response.found.update_time) - elif result_type == 'missing': + update_time=get_doc_response.found.update_time, + ) + elif result_type == "missing": snapshot = DocumentSnapshot( None, None, exists=False, read_time=get_doc_response.read_time, create_time=None, - update_time=None) + update_time=None, + ) else: raise ValueError( - '`BatchGetDocumentsResponse.result` (a oneof) had a field other ' - 'than `found` or `missing` set, or was unset') + "`BatchGetDocumentsResponse.result` (a oneof) had a field other " + "than `found` or `missing` set, or was unset" + ) return snapshot diff --git a/firestore/google/cloud/firestore_v1beta1/collection.py b/firestore/google/cloud/firestore_v1beta1/collection.py index 26f3de835559..6957f6eb0d33 100644 --- a/firestore/google/cloud/firestore_v1beta1/collection.py +++ b/firestore/google/cloud/firestore_v1beta1/collection.py @@ -25,8 +25,7 @@ from google.cloud.firestore_v1beta1.watch import Watch from google.cloud.firestore_v1beta1 import document -_AUTO_ID_CHARS = ( - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789') +_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" class CollectionReference(object): @@ -58,11 +57,11 @@ class CollectionReference(object): def __init__(self, *path, **kwargs): _helpers.verify_path(path, is_collection=True) self._path = path - self._client = kwargs.pop('client', None) + self._client = kwargs.pop("client", None) if kwargs: raise TypeError( - 'Received unexpected arguments', kwargs, - 'Only `client` is supported') + "Received unexpected arguments", kwargs, "Only `client` is supported" + ) @property def id(self): @@ -121,14 +120,12 @@ def _parent_info(self): parent_doc = self.parent if parent_doc is None: parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join( - (self._client._database_string, 'documents'), + (self._client._database_string, "documents") ) else: parent_path = parent_doc._document_path - expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join( - (parent_path, self.id), - ) + expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id)) return parent_path, expected_prefix def add(self, document_data, document_id=None): @@ -158,15 +155,19 @@ def add(self, document_data, document_id=None): if document_id is None: parent_path, expected_prefix = self._parent_info() document_pb = document_pb2.Document( - fields=_helpers.encode_dict(document_data)) + fields=_helpers.encode_dict(document_data) + ) created_document_pb = self._client._firestore_api.create_document( - parent_path, collection_id=self.id, document_id=None, - document=document_pb, mask=None, - metadata=self._client._rpc_metadata) + parent_path, + collection_id=self.id, + document_id=None, + document=document_pb, + mask=None, + metadata=self._client._rpc_metadata, + ) - new_document_id = _helpers.get_doc_id( - created_document_pb, expected_prefix) + new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix) document_ref = self.document(new_document_id) return created_document_pb.update_time, document_ref else: @@ -398,10 +399,12 @@ def on_snapshot(collection_snapshot): # Terminate this watch collection_watch.unsubscribe() """ - return Watch.for_query(query_mod.Query(self), - callback, - document.DocumentSnapshot, - document.DocumentReference) + return Watch.for_query( + query_mod.Query(self), + callback, + document.DocumentSnapshot, + document.DocumentReference, + ) def _auto_id(): @@ -411,5 +414,4 @@ def _auto_id(): str: A 20 character string composed of digits, uppercase and lowercase and letters. """ - return ''.join( - random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20)) + return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20)) diff --git a/firestore/google/cloud/firestore_v1beta1/document.py b/firestore/google/cloud/firestore_v1beta1/document.py index 097664badf4b..466dae1b9661 100644 --- a/firestore/google/cloud/firestore_v1beta1/document.py +++ b/firestore/google/cloud/firestore_v1beta1/document.py @@ -54,11 +54,11 @@ class DocumentReference(object): def __init__(self, *path, **kwargs): _helpers.verify_path(path, is_collection=False) self._path = path - self._client = kwargs.pop('client', None) + self._client = kwargs.pop("client", None) if kwargs: raise TypeError( - 'Received unexpected arguments', kwargs, - 'Only `client` is supported') + "Received unexpected arguments", kwargs, "Only `client` is supported" + ) def __copy__(self): """Shallow copy the instance. @@ -94,10 +94,7 @@ def __eq__(self, other): equal. """ if isinstance(other, DocumentReference): - return ( - self._client == other._client and - self._path == other._path - ) + return self._client == other._client and self._path == other._path else: return NotImplemented @@ -112,10 +109,7 @@ def __ne__(self, other): not equal. """ if isinstance(other, DocumentReference): - return ( - self._client != other._client or - self._path != other._path - ) + return self._client != other._client or self._path != other._path else: return NotImplemented @@ -136,9 +130,8 @@ def _document_path(self): """ if self._document_path_internal is None: if self._client is None: - raise ValueError('A document reference requires a `client`.') - self._document_path_internal = _get_document_path( - self._client, self._path) + raise ValueError("A document reference requires a `client`.") + self._document_path_internal = _get_document_path(self._client, self._path) return self._document_path_internal @@ -391,8 +384,11 @@ def delete(self, option=None): """ write_pb = _helpers.pb_for_delete(self._document_path, option) commit_response = self._client._firestore_api.commit( - self._client._database_string, [write_pb], transaction=None, - metadata=self._client._rpc_metadata) + self._client._database_string, + [write_pb], + transaction=None, + metadata=self._client._rpc_metadata, + ) return commit_response.commit_time @@ -423,8 +419,7 @@ def get(self, field_paths=None, transaction=None): `None` and `exists` will be `False`. """ if isinstance(field_paths, six.string_types): - raise ValueError( - "'field_paths' must be a sequence of paths, not a string.") + raise ValueError("'field_paths' must be a sequence of paths, not a string.") if field_paths is not None: mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) @@ -437,7 +432,8 @@ def get(self, field_paths=None, transaction=None): self._document_path, mask=mask, transaction=_helpers.get_transaction_id(transaction), - metadata=self._client._rpc_metadata) + metadata=self._client._rpc_metadata, + ) except exceptions.NotFound: data = None exists = False @@ -455,7 +451,8 @@ def get(self, field_paths=None, transaction=None): exists=exists, read_time=None, # No server read_time available create_time=create_time, - update_time=update_time) + update_time=update_time, + ) def collections(self, page_size=None): """List subcollections of the current document. @@ -470,8 +467,10 @@ def collections(self, page_size=None): iterator will be empty """ iterator = self._client._firestore_api.list_collection_ids( - self._document_path, page_size=page_size, - metadata=self._client._rpc_metadata) + self._document_path, + page_size=page_size, + metadata=self._client._rpc_metadata, + ) iterator.document = self iterator.item_to_value = _item_to_collection_ref return iterator @@ -505,8 +504,7 @@ def on_snapshot(document_snapshot): # Terminate this watch doc_watch.unsubscribe() """ - return Watch.for_document(self, callback, DocumentSnapshot, - DocumentReference) + return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference) class DocumentSnapshot(object): @@ -535,9 +533,7 @@ class DocumentSnapshot(object): this document was last updated. """ - def __init__( - self, reference, data, exists, - read_time, create_time, update_time): + def __init__(self, reference, data, exists, read_time, create_time, update_time): self._reference = reference # We want immutable data, so callers can't modify this value # out from under us. @@ -687,7 +683,7 @@ def _get_document_path(client, path): Returns: str: The fully-qualified document path. """ - parts = (client._database_string, 'documents') + path + parts = (client._database_string, "documents") + path return _helpers.DOCUMENT_PATH_DELIMITER.join(parts) @@ -714,8 +710,10 @@ def _consume_single_get(response_iterator): all_responses = list(response_iterator) if len(all_responses) != 1: raise ValueError( - 'Unexpected response from `BatchGetDocumentsResponse`', - all_responses, 'Expected only one result') + "Unexpected response from `BatchGetDocumentsResponse`", + all_responses, + "Expected only one result", + ) return all_responses[0] @@ -741,7 +739,7 @@ def _first_write_result(write_results): **never** occur, since the backend should be stable. """ if not write_results: - raise ValueError('Expected at least one write result') + raise ValueError("Expected at least one write result") return write_results[0] diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py index e30b456c925a..137fae7a2528 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py @@ -28,6 +28,7 @@ class NullValue(enum.IntEnum): Attributes: NULL_VALUE (int): Null value. """ + NULL_VALUE = 0 @@ -42,6 +43,7 @@ class ServerValue(enum.IntEnum): REQUEST_TIME (int): The time at which the server processed the request, with millisecond precision. """ + SERVER_VALUE_UNSPECIFIED = 0 REQUEST_TIME = 1 @@ -56,6 +58,7 @@ class Direction(enum.IntEnum): ASCENDING (int): Ascending. DESCENDING (int): Descending. """ + DIRECTION_UNSPECIFIED = 0 ASCENDING = 1 DESCENDING = 2 @@ -69,6 +72,7 @@ class Operator(enum.IntEnum): OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. AND (int): The results are required to satisfy each of the combined filters. """ + OPERATOR_UNSPECIFIED = 0 AND = 1 @@ -87,6 +91,7 @@ class Operator(enum.IntEnum): EQUAL (int): Equal. ARRAY_CONTAINS (int): Contains. Requires that the field is an array. """ + OPERATOR_UNSPECIFIED = 0 LESS_THAN = 1 LESS_THAN_OR_EQUAL = 2 @@ -105,6 +110,7 @@ class Operator(enum.IntEnum): IS_NAN (int): Test if a field is equal to NaN. IS_NULL (int): Test if an exprestion evaluates to Null. """ + OPERATOR_UNSPECIFIED = 0 IS_NAN = 2 IS_NULL = 3 @@ -133,6 +139,7 @@ class TargetChangeType(enum.IntEnum): After the initial state is complete, ``CURRENT`` will be returned even if the target was previously indicated to be ``CURRENT``. """ + NO_CHANGE = 0 ADD = 1 REMOVE = 2 diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client.py b/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client.py index d77ac3b84774..e9baed7a9c62 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client.py @@ -42,7 +42,8 @@ from google.protobuf import timestamp_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-firestore', ).version + "google-cloud-firestore" +).version class FirestoreClient(object): @@ -66,12 +67,12 @@ class FirestoreClient(object): guaranteed to see the effects of the transaction. """ - SERVICE_ADDRESS = 'firestore.googleapis.com:443' + SERVICE_ADDRESS = "firestore.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.firestore.v1beta1.Firestore' + _INTERFACE_NAME = "google.firestore.v1beta1.Firestore" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -87,9 +88,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: FirestoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -98,7 +98,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def database_root_path(cls, project, database): """Return a fully-qualified database_root string.""" return google.api_core.path_template.expand( - 'projects/{project}/databases/{database}', + "projects/{project}/databases/{database}", project=project, database=database, ) @@ -107,7 +107,7 @@ def database_root_path(cls, project, database): def document_root_path(cls, project, database): """Return a fully-qualified document_root string.""" return google.api_core.path_template.expand( - 'projects/{project}/databases/{database}/documents', + "projects/{project}/databases/{database}/documents", project=project, database=database, ) @@ -116,7 +116,7 @@ def document_root_path(cls, project, database): def document_path_path(cls, project, database, document_path): """Return a fully-qualified document_path string.""" return google.api_core.path_template.expand( - 'projects/{project}/databases/{database}/documents/{document_path=**}', + "projects/{project}/databases/{database}/documents/{document_path=**}", project=project, database=database, document_path=document_path, @@ -126,19 +126,21 @@ def document_path_path(cls, project, database, document_path): def any_path_path(cls, project, database, document, any_path): """Return a fully-qualified any_path string.""" return google.api_core.path_template.expand( - 'projects/{project}/databases/{database}/documents/{document}/{any_path=**}', + "projects/{project}/databases/{database}/documents/{document}/{any_path=**}", project=project, database=database, document=document, any_path=any_path, ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -172,18 +174,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = firestore_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -192,25 +195,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=firestore_grpc_transport. - FirestoreGrpcTransport, + default_class=firestore_grpc_transport.FirestoreGrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = firestore_grpc_transport.FirestoreGrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -220,7 +222,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -229,14 +232,16 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def get_document(self, - name, - mask=None, - transaction=None, - read_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def get_document( + self, + name, + mask=None, + transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets a single document. @@ -285,44 +290,43 @@ def get_document(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_document' not in self._inner_api_calls: + if "get_document" not in self._inner_api_calls: self._inner_api_calls[ - 'get_document'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_document, - default_retry=self._method_configs['GetDocument'].retry, - default_timeout=self._method_configs['GetDocument']. - timeout, - client_info=self._client_info, - ) + "get_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_document, + default_retry=self._method_configs["GetDocument"].retry, + default_timeout=self._method_configs["GetDocument"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( - transaction=transaction, - read_time=read_time, + transaction=transaction, read_time=read_time ) request = firestore_pb2.GetDocumentRequest( - name=name, - mask=mask, - transaction=transaction, - read_time=read_time, + name=name, mask=mask, transaction=transaction, read_time=read_time ) - return self._inner_api_calls['get_document']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def list_documents(self, - parent, - collection_id, - page_size=None, - order_by=None, - mask=None, - transaction=None, - read_time=None, - show_missing=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["get_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_documents( + self, + parent, + collection_id, + page_size=None, + order_by=None, + mask=None, + transaction=None, + read_time=None, + show_missing=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists documents. @@ -407,21 +411,20 @@ def list_documents(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_documents' not in self._inner_api_calls: + if "list_documents" not in self._inner_api_calls: self._inner_api_calls[ - 'list_documents'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_documents, - default_retry=self._method_configs['ListDocuments'].retry, - default_timeout=self._method_configs['ListDocuments']. - timeout, - client_info=self._client_info, - ) + "list_documents" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_documents, + default_retry=self._method_configs["ListDocuments"].retry, + default_timeout=self._method_configs["ListDocuments"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( - transaction=transaction, - read_time=read_time, + transaction=transaction, read_time=read_time ) request = firestore_pb2.ListDocumentsRequest( @@ -437,26 +440,29 @@ def list_documents(self, iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_documents'], + self._inner_api_calls["list_documents"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='documents', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="documents", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def create_document(self, - parent, - collection_id, - document_id, - document, - mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def create_document( + self, + parent, + collection_id, + document_id, + document, + mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a new document. @@ -518,15 +524,15 @@ def create_document(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_document' not in self._inner_api_calls: + if "create_document" not in self._inner_api_calls: self._inner_api_calls[ - 'create_document'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_document, - default_retry=self._method_configs['CreateDocument'].retry, - default_timeout=self._method_configs['CreateDocument']. - timeout, - client_info=self._client_info, - ) + "create_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_document, + default_retry=self._method_configs["CreateDocument"].retry, + default_timeout=self._method_configs["CreateDocument"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.CreateDocumentRequest( parent=parent, @@ -535,17 +541,20 @@ def create_document(self, document=document, mask=mask, ) - return self._inner_api_calls['create_document']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_document(self, - document, - update_mask, - mask=None, - current_document=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["create_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_document( + self, + document, + update_mask, + mask=None, + current_document=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Updates or inserts a document. @@ -610,15 +619,15 @@ def update_document(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_document' not in self._inner_api_calls: + if "update_document" not in self._inner_api_calls: self._inner_api_calls[ - 'update_document'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_document, - default_retry=self._method_configs['UpdateDocument'].retry, - default_timeout=self._method_configs['UpdateDocument']. - timeout, - client_info=self._client_info, - ) + "update_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_document, + default_retry=self._method_configs["UpdateDocument"].retry, + default_timeout=self._method_configs["UpdateDocument"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.UpdateDocumentRequest( document=document, @@ -626,15 +635,18 @@ def update_document(self, mask=mask, current_document=current_document, ) - return self._inner_api_calls['update_document']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def delete_document(self, - name, - current_document=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_document( + self, + name, + current_document=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes a document. @@ -672,33 +684,35 @@ def delete_document(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_document' not in self._inner_api_calls: + if "delete_document" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_document'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_document, - default_retry=self._method_configs['DeleteDocument'].retry, - default_timeout=self._method_configs['DeleteDocument']. - timeout, - client_info=self._client_info, - ) + "delete_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_document, + default_retry=self._method_configs["DeleteDocument"].retry, + default_timeout=self._method_configs["DeleteDocument"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.DeleteDocumentRequest( - name=name, - current_document=current_document, + name=name, current_document=current_document + ) + self._inner_api_calls["delete_document"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - self._inner_api_calls['delete_document']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def batch_get_documents(self, - database, - documents, - mask=None, - transaction=None, - new_transaction=None, - read_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def batch_get_documents( + self, + database, + documents, + mask=None, + transaction=None, + new_transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets multiple documents. @@ -766,16 +780,15 @@ def batch_get_documents(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'batch_get_documents' not in self._inner_api_calls: + if "batch_get_documents" not in self._inner_api_calls: self._inner_api_calls[ - 'batch_get_documents'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.batch_get_documents, - default_retry=self._method_configs['BatchGetDocuments']. - retry, - default_timeout=self._method_configs['BatchGetDocuments']. - timeout, - client_info=self._client_info, - ) + "batch_get_documents" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.batch_get_documents, + default_retry=self._method_configs["BatchGetDocuments"].retry, + default_timeout=self._method_configs["BatchGetDocuments"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. @@ -793,15 +806,18 @@ def batch_get_documents(self, new_transaction=new_transaction, read_time=read_time, ) - return self._inner_api_calls['batch_get_documents']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def begin_transaction(self, - database, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["batch_get_documents"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def begin_transaction( + self, + database, + options_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Starts a new transaction. @@ -842,31 +858,32 @@ def begin_transaction(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'begin_transaction' not in self._inner_api_calls: + if "begin_transaction" not in self._inner_api_calls: self._inner_api_calls[ - 'begin_transaction'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.begin_transaction, - default_retry=self._method_configs['BeginTransaction']. - retry, - default_timeout=self._method_configs['BeginTransaction']. - timeout, - client_info=self._client_info, - ) + "begin_transaction" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs["BeginTransaction"].retry, + default_timeout=self._method_configs["BeginTransaction"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.BeginTransactionRequest( - database=database, - options=options_, + database=database, options=options_ + ) + return self._inner_api_calls["begin_transaction"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['begin_transaction']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def commit(self, - database, - writes, - transaction=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def commit( + self, + database, + writes, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Commits a transaction, while optionally updating documents. @@ -912,29 +929,31 @@ def commit(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'commit' not in self._inner_api_calls: + if "commit" not in self._inner_api_calls: self._inner_api_calls[ - 'commit'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.commit, - default_retry=self._method_configs['Commit'].retry, - default_timeout=self._method_configs['Commit'].timeout, - client_info=self._client_info, - ) + "commit" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs["Commit"].retry, + default_timeout=self._method_configs["Commit"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.CommitRequest( - database=database, - writes=writes, - transaction=transaction, + database=database, writes=writes, transaction=transaction + ) + return self._inner_api_calls["commit"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['commit']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def rollback(self, - database, - transaction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def rollback( + self, + database, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Rolls back a transaction. @@ -971,31 +990,34 @@ def rollback(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'rollback' not in self._inner_api_calls: + if "rollback" not in self._inner_api_calls: self._inner_api_calls[ - 'rollback'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.rollback, - default_retry=self._method_configs['Rollback'].retry, - default_timeout=self._method_configs['Rollback'].timeout, - client_info=self._client_info, - ) + "rollback" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs["Rollback"].retry, + default_timeout=self._method_configs["Rollback"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.RollbackRequest( - database=database, - transaction=transaction, + database=database, transaction=transaction + ) + self._inner_api_calls["rollback"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - self._inner_api_calls['rollback']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def run_query(self, - parent, - structured_query=None, - transaction=None, - new_transaction=None, - read_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def run_query( + self, + parent, + structured_query=None, + transaction=None, + new_transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Runs a query. @@ -1053,19 +1075,19 @@ def run_query(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'run_query' not in self._inner_api_calls: + if "run_query" not in self._inner_api_calls: self._inner_api_calls[ - 'run_query'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.run_query, - default_retry=self._method_configs['RunQuery'].retry, - default_timeout=self._method_configs['RunQuery'].timeout, - client_info=self._client_info, - ) + "run_query" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs["RunQuery"].retry, + default_timeout=self._method_configs["RunQuery"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - structured_query=structured_query, ) + google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. @@ -1082,14 +1104,17 @@ def run_query(self, new_transaction=new_transaction, read_time=read_time, ) - return self._inner_api_calls['run_query']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def write(self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["run_query"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def write( + self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Streams batches of document updates and deletes, in order. @@ -1131,23 +1156,27 @@ def write(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'write' not in self._inner_api_calls: + if "write" not in self._inner_api_calls: self._inner_api_calls[ - 'write'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.write, - default_retry=self._method_configs['Write'].retry, - default_timeout=self._method_configs['Write'].timeout, - client_info=self._client_info, - ) + "write" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.write, + default_retry=self._method_configs["Write"].retry, + default_timeout=self._method_configs["Write"].timeout, + client_info=self._client_info, + ) - return self._inner_api_calls['write']( - requests, retry=retry, timeout=timeout, metadata=metadata) + return self._inner_api_calls["write"]( + requests, retry=retry, timeout=timeout, metadata=metadata + ) - def listen(self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def listen( + self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Listens to changes. @@ -1189,24 +1218,28 @@ def listen(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'listen' not in self._inner_api_calls: + if "listen" not in self._inner_api_calls: self._inner_api_calls[ - 'listen'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.listen, - default_retry=self._method_configs['Listen'].retry, - default_timeout=self._method_configs['Listen'].timeout, - client_info=self._client_info, - ) + "listen" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.listen, + default_retry=self._method_configs["Listen"].retry, + default_timeout=self._method_configs["Listen"].timeout, + client_info=self._client_info, + ) - return self._inner_api_calls['listen']( - requests, retry=retry, timeout=timeout, metadata=metadata) + return self._inner_api_calls["listen"]( + requests, retry=retry, timeout=timeout, metadata=metadata + ) - def list_collection_ids(self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_collection_ids( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists all the collection IDs underneath a document. @@ -1264,31 +1297,30 @@ def list_collection_ids(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_collection_ids' not in self._inner_api_calls: + if "list_collection_ids" not in self._inner_api_calls: self._inner_api_calls[ - 'list_collection_ids'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_collection_ids, - default_retry=self._method_configs['ListCollectionIds']. - retry, - default_timeout=self._method_configs['ListCollectionIds']. - timeout, - client_info=self._client_info, - ) + "list_collection_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_collection_ids, + default_retry=self._method_configs["ListCollectionIds"].retry, + default_timeout=self._method_configs["ListCollectionIds"].timeout, + client_info=self._client_info, + ) request = firestore_pb2.ListCollectionIdsRequest( - parent=parent, - page_size=page_size, + parent=parent, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_collection_ids'], + self._inner_api_calls["list_collection_ids"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='collection_ids', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="collection_ids", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py b/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py index 4c01538441d1..dd458fe97643 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/firestore_client_config.py @@ -3,7 +3,7 @@ "google.firestore.v1beta1.Firestore": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -13,7 +13,7 @@ "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "streaming": { "initial_retry_delay_millis": 100, @@ -22,76 +22,76 @@ "initial_rpc_timeout_millis": 300000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 300000, - "total_timeout_millis": 600000 - } + "total_timeout_millis": 600000, + }, }, "methods": { "GetDocument": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListDocuments": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "CreateDocument": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateDocument": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteDocument": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "BatchGetDocuments": { "timeout_millis": 300000, "retry_codes_name": "idempotent", - "retry_params_name": "streaming" + "retry_params_name": "streaming", }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Commit": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Rollback": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "RunQuery": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "streaming" + "retry_params_name": "streaming", }, "Write": { "timeout_millis": 86400000, "retry_codes_name": "non_idempotent", - "retry_params_name": "streaming" + "retry_params_name": "streaming", }, "Listen": { "timeout_millis": 86400000, "retry_codes_name": "idempotent", - "retry_params_name": "streaming" + "retry_params_name": "streaming", }, "ListCollectionIds": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py b/firestore/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py index 0e4f3e5b612a..914bd77db620 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/transports/firestore_grpc_transport.py @@ -27,17 +27,17 @@ class FirestoreGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/datastore', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", ) - def __init__(self, - channel=None, - credentials=None, - address='firestore.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="firestore.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -55,28 +55,21 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = { - 'firestore_stub': firestore_pb2_grpc.FirestoreStub(channel), - } + self._stubs = {"firestore_stub": firestore_pb2_grpc.FirestoreStub(channel)} @classmethod - def create_channel(cls, - address='firestore.googleapis.com:443', - credentials=None): + def create_channel(cls, address="firestore.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -91,9 +84,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -116,7 +107,7 @@ def get_document(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].GetDocument + return self._stubs["firestore_stub"].GetDocument @property def list_documents(self): @@ -129,7 +120,7 @@ def list_documents(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].ListDocuments + return self._stubs["firestore_stub"].ListDocuments @property def create_document(self): @@ -142,7 +133,7 @@ def create_document(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].CreateDocument + return self._stubs["firestore_stub"].CreateDocument @property def update_document(self): @@ -155,7 +146,7 @@ def update_document(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].UpdateDocument + return self._stubs["firestore_stub"].UpdateDocument @property def delete_document(self): @@ -168,7 +159,7 @@ def delete_document(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].DeleteDocument + return self._stubs["firestore_stub"].DeleteDocument @property def batch_get_documents(self): @@ -184,7 +175,7 @@ def batch_get_documents(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].BatchGetDocuments + return self._stubs["firestore_stub"].BatchGetDocuments @property def begin_transaction(self): @@ -197,7 +188,7 @@ def begin_transaction(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].BeginTransaction + return self._stubs["firestore_stub"].BeginTransaction @property def commit(self): @@ -210,7 +201,7 @@ def commit(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].Commit + return self._stubs["firestore_stub"].Commit @property def rollback(self): @@ -223,7 +214,7 @@ def rollback(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].Rollback + return self._stubs["firestore_stub"].Rollback @property def run_query(self): @@ -236,7 +227,7 @@ def run_query(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].RunQuery + return self._stubs["firestore_stub"].RunQuery @property def write(self): @@ -249,7 +240,7 @@ def write(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].Write + return self._stubs["firestore_stub"].Write @property def listen(self): @@ -262,7 +253,7 @@ def listen(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].Listen + return self._stubs["firestore_stub"].Listen @property def list_collection_ids(self): @@ -275,4 +266,4 @@ def list_collection_ids(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['firestore_stub'].ListCollectionIds + return self._stubs["firestore_stub"].ListCollectionIds diff --git a/firestore/google/cloud/firestore_v1beta1/order.py b/firestore/google/cloud/firestore_v1beta1/order.py index e5003df14091..79207f530c42 100644 --- a/firestore/google/cloud/firestore_v1beta1/order.py +++ b/firestore/google/cloud/firestore_v1beta1/order.py @@ -32,39 +32,38 @@ class TypeOrder(Enum): @staticmethod def from_value(value): - v = value.WhichOneof('value_type') + v = value.WhichOneof("value_type") lut = { - 'null_value': TypeOrder.NULL, - 'boolean_value': TypeOrder.BOOLEAN, - 'integer_value': TypeOrder.NUMBER, - 'double_value': TypeOrder.NUMBER, - 'timestamp_value': TypeOrder.TIMESTAMP, - 'string_value': TypeOrder.STRING, - 'bytes_value': TypeOrder.BLOB, - 'reference_value': TypeOrder.REF, - 'geo_point_value': TypeOrder.GEO_POINT, - 'array_value': TypeOrder.ARRAY, - 'map_value': TypeOrder.OBJECT, + "null_value": TypeOrder.NULL, + "boolean_value": TypeOrder.BOOLEAN, + "integer_value": TypeOrder.NUMBER, + "double_value": TypeOrder.NUMBER, + "timestamp_value": TypeOrder.TIMESTAMP, + "string_value": TypeOrder.STRING, + "bytes_value": TypeOrder.BLOB, + "reference_value": TypeOrder.REF, + "geo_point_value": TypeOrder.GEO_POINT, + "array_value": TypeOrder.ARRAY, + "map_value": TypeOrder.OBJECT, } if v not in lut: - raise ValueError( - "Could not detect value type for " + v) + raise ValueError("Could not detect value type for " + v) return lut[v] class Order(object): - ''' + """ Order implements the ordering semantics of the backend. - ''' + """ @classmethod def compare(cls, left, right): - ''' + """ Main comparison function for all Firestore types. @return -1 is left < right, 0 if left == right, otherwise 1 - ''' + """ # First compare the types. leftType = TypeOrder.from_value(left).value rightType = TypeOrder.from_value(right).value @@ -74,32 +73,32 @@ def compare(cls, left, right): return -1 return 1 - value_type = left.WhichOneof('value_type') + value_type = left.WhichOneof("value_type") - if value_type == 'null_value': + if value_type == "null_value": return 0 # nulls are all equal - elif value_type == 'boolean_value': + elif value_type == "boolean_value": return cls._compare_to(left.boolean_value, right.boolean_value) - elif value_type == 'integer_value': + elif value_type == "integer_value": return cls.compare_numbers(left, right) - elif value_type == 'double_value': + elif value_type == "double_value": return cls.compare_numbers(left, right) - elif value_type == 'timestamp_value': + elif value_type == "timestamp_value": return cls.compare_timestamps(left, right) - elif value_type == 'string_value': + elif value_type == "string_value": return cls._compare_to(left.string_value, right.string_value) - elif value_type == 'bytes_value': + elif value_type == "bytes_value": return cls.compare_blobs(left, right) - elif value_type == 'reference_value': + elif value_type == "reference_value": return cls.compare_resource_paths(left, right) - elif value_type == 'geo_point_value': + elif value_type == "geo_point_value": return cls.compare_geo_points(left, right) - elif value_type == 'array_value': + elif value_type == "array_value": return cls.compare_arrays(left, right) - elif value_type == 'map_value': + elif value_type == "map_value": return cls.compare_objects(left, right) else: - raise ValueError('Unknown ``value_type``', str(value_type)) + raise ValueError("Unknown ``value_type``", str(value_type)) @staticmethod def compare_blobs(left, right): @@ -123,16 +122,14 @@ def compare_timestamps(left, right): def compare_geo_points(left, right): left_value = decode_value(left, None) right_value = decode_value(right, None) - cmp = ( - (left_value.latitude > right_value.latitude) - - (left_value.latitude < right_value.latitude) + cmp = (left_value.latitude > right_value.latitude) - ( + left_value.latitude < right_value.latitude ) if cmp != 0: return cmp - return ( - (left_value.longitude > right_value.longitude) - - (left_value.longitude < right_value.longitude) + return (left_value.longitude > right_value.longitude) - ( + left_value.longitude < right_value.longitude ) @staticmethod @@ -140,14 +137,14 @@ def compare_resource_paths(left, right): left = left.reference_value right = right.reference_value - left_segments = left.split('/') - right_segments = right.split('/') + left_segments = left.split("/") + right_segments = right.split("/") shorter = min(len(left_segments), len(right_segments)) # compare segments for i in range(shorter): - if (left_segments[i] < right_segments[i]): + if left_segments[i] < right_segments[i]: return -1 - if (left_segments[i] > right_segments[i]): + if left_segments[i] > right_segments[i]: return 1 left_length = len(left) @@ -172,17 +169,16 @@ def compare_objects(left, right): left_fields = left.map_value.fields right_fields = right.map_value.fields - for left_key, right_key in zip( - sorted(left_fields), sorted(right_fields) - ): + for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)): keyCompare = Order._compare_to(left_key, right_key) if keyCompare != 0: return keyCompare value_compare = Order.compare( - left_fields[left_key], right_fields[right_key]) + left_fields[left_key], right_fields[right_key] + ) if value_compare != 0: - return value_compare + return value_compare return Order._compare_to(len(left_fields), len(right_fields)) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py index ec4eff553cce..9bb7f6553b04 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2.py @@ -2,372 +2,561 @@ # source: google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.firestore_v1beta1.proto.admin import index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2 -from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.cloud.firestore_v1beta1.proto.admin import ( + index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto', - package='google.firestore.admin.v1beta1', - syntax='proto3', - serialized_pb=_b('\n@google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x36google/cloud/firestore_v1beta1/proto/admin/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x80\x03\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\\\n\x0eoperation_type\x18\x04 \x01(\x0e\x32\x44.google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType\x12\x11\n\tcancelled\x18\x05 \x01(\x08\x12\x43\n\x11\x64ocument_progress\x18\x06 \x01(\x0b\x32(.google.firestore.admin.v1beta1.Progress\"C\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x43REATING_INDEX\x10\x01\":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03\"Z\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32%.google.firestore.admin.v1beta1.Index\"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"\"\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"f\n\x13ListIndexesResponse\x12\x36\n\x07indexes\x18\x01 \x03(\x0b\x32%.google.firestore.admin.v1beta1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9c\x05\n\x0e\x46irestoreAdmin\x12\xa1\x01\n\x0b\x43reateIndex\x12\x32.google.firestore.admin.v1beta1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation\"?\x82\xd3\xe4\x93\x02\x39\"0/v1beta1/{parent=projects/*/databases/*}/indexes:\x05index\x12\xb0\x01\n\x0bListIndexes\x12\x32.google.firestore.admin.v1beta1.ListIndexesRequest\x1a\x33.google.firestore.admin.v1beta1.ListIndexesResponse\"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{parent=projects/*/databases/*}/indexes\x12\x9c\x01\n\x08GetIndex\x12/.google.firestore.admin.v1beta1.GetIndexRequest\x1a%.google.firestore.admin.v1beta1.Index\"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{name=projects/*/databases/*/indexes/*}\x12\x93\x01\n\x0b\x44\x65leteIndex\x12\x32.google.firestore.admin.v1beta1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty\"8\x82\xd3\xe4\x93\x02\x32*0/v1beta1/{name=projects/*/databases/*/indexes/*}B\xae\x01\n\"com.google.firestore.admin.v1beta1B\x13\x46irestoreAdminProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto", + package="google.firestore.admin.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n@google/cloud/firestore_v1beta1/proto/admin/firestore_admin.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x36google/cloud/firestore_v1beta1/proto/admin/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x03\n\x16IndexOperationMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\r\n\x05index\x18\x03 \x01(\t\x12\\\n\x0eoperation_type\x18\x04 \x01(\x0e\x32\x44.google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType\x12\x11\n\tcancelled\x18\x05 \x01(\x08\x12\x43\n\x11\x64ocument_progress\x18\x06 \x01(\x0b\x32(.google.firestore.admin.v1beta1.Progress"C\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x43REATING_INDEX\x10\x01":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"Z\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x34\n\x05index\x18\x02 \x01(\x0b\x32%.google.firestore.admin.v1beta1.Index"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t""\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"f\n\x13ListIndexesResponse\x12\x36\n\x07indexes\x18\x01 \x03(\x0b\x32%.google.firestore.admin.v1beta1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x9c\x05\n\x0e\x46irestoreAdmin\x12\xa1\x01\n\x0b\x43reateIndex\x12\x32.google.firestore.admin.v1beta1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"0/v1beta1/{parent=projects/*/databases/*}/indexes:\x05index\x12\xb0\x01\n\x0bListIndexes\x12\x32.google.firestore.admin.v1beta1.ListIndexesRequest\x1a\x33.google.firestore.admin.v1beta1.ListIndexesResponse"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{parent=projects/*/databases/*}/indexes\x12\x9c\x01\n\x08GetIndex\x12/.google.firestore.admin.v1beta1.GetIndexRequest\x1a%.google.firestore.admin.v1beta1.Index"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1beta1/{name=projects/*/databases/*/indexes/*}\x12\x93\x01\n\x0b\x44\x65leteIndex\x12\x32.google.firestore.admin.v1beta1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1beta1/{name=projects/*/databases/*/indexes/*}B\xae\x01\n"com.google.firestore.admin.v1beta1B\x13\x46irestoreAdminProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _INDEXOPERATIONMETADATA_OPERATIONTYPE = _descriptor.EnumDescriptor( - name='OperationType', - full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATION_TYPE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='CREATING_INDEX', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=603, - serialized_end=670, + name="OperationType", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.OperationType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATION_TYPE_UNSPECIFIED", + index=0, + number=0, + options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="CREATING_INDEX", index=1, number=1, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=603, + serialized_end=670, ) _sym_db.RegisterEnumDescriptor(_INDEXOPERATIONMETADATA_OPERATIONTYPE) _INDEXOPERATIONMETADATA = _descriptor.Descriptor( - name='IndexOperationMetadata', - full_name='google.firestore.admin.v1beta1.IndexOperationMetadata', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='start_time', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.start_time', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_time', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.end_time', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.index', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='operation_type', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.operation_type', index=3, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='cancelled', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.cancelled', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='document_progress', full_name='google.firestore.admin.v1beta1.IndexOperationMetadata.document_progress', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _INDEXOPERATIONMETADATA_OPERATIONTYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=286, - serialized_end=670, + name="IndexOperationMetadata", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.start_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.end_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="index", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.index", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="operation_type", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.operation_type", + index=3, + number=4, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="cancelled", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.cancelled", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="document_progress", + full_name="google.firestore.admin.v1beta1.IndexOperationMetadata.document_progress", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_INDEXOPERATIONMETADATA_OPERATIONTYPE], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=286, + serialized_end=670, ) _PROGRESS = _descriptor.Descriptor( - name='Progress', - full_name='google.firestore.admin.v1beta1.Progress', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='work_completed', full_name='google.firestore.admin.v1beta1.Progress.work_completed', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='work_estimated', full_name='google.firestore.admin.v1beta1.Progress.work_estimated', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=672, - serialized_end=730, + name="Progress", + full_name="google.firestore.admin.v1beta1.Progress", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="work_completed", + full_name="google.firestore.admin.v1beta1.Progress.work_completed", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="work_estimated", + full_name="google.firestore.admin.v1beta1.Progress.work_estimated", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=672, + serialized_end=730, ) _CREATEINDEXREQUEST = _descriptor.Descriptor( - name='CreateIndexRequest', - full_name='google.firestore.admin.v1beta1.CreateIndexRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.admin.v1beta1.CreateIndexRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index', full_name='google.firestore.admin.v1beta1.CreateIndexRequest.index', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=732, - serialized_end=822, + name="CreateIndexRequest", + full_name="google.firestore.admin.v1beta1.CreateIndexRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.admin.v1beta1.CreateIndexRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="index", + full_name="google.firestore.admin.v1beta1.CreateIndexRequest.index", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=732, + serialized_end=822, ) _GETINDEXREQUEST = _descriptor.Descriptor( - name='GetIndexRequest', - full_name='google.firestore.admin.v1beta1.GetIndexRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.admin.v1beta1.GetIndexRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=824, - serialized_end=855, + name="GetIndexRequest", + full_name="google.firestore.admin.v1beta1.GetIndexRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.admin.v1beta1.GetIndexRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=824, + serialized_end=855, ) _LISTINDEXESREQUEST = _descriptor.Descriptor( - name='ListIndexesRequest', - full_name='google.firestore.admin.v1beta1.ListIndexesRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.admin.v1beta1.ListIndexesRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filter', full_name='google.firestore.admin.v1beta1.ListIndexesRequest.filter', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.firestore.admin.v1beta1.ListIndexesRequest.page_size', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.firestore.admin.v1beta1.ListIndexesRequest.page_token', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=857, - serialized_end=948, + name="ListIndexesRequest", + full_name="google.firestore.admin.v1beta1.ListIndexesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.admin.v1beta1.ListIndexesRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.firestore.admin.v1beta1.ListIndexesRequest.filter", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.firestore.admin.v1beta1.ListIndexesRequest.page_token", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=857, + serialized_end=948, ) _DELETEINDEXREQUEST = _descriptor.Descriptor( - name='DeleteIndexRequest', - full_name='google.firestore.admin.v1beta1.DeleteIndexRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.admin.v1beta1.DeleteIndexRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=950, - serialized_end=984, + name="DeleteIndexRequest", + full_name="google.firestore.admin.v1beta1.DeleteIndexRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.admin.v1beta1.DeleteIndexRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=950, + serialized_end=984, ) _LISTINDEXESRESPONSE = _descriptor.Descriptor( - name='ListIndexesResponse', - full_name='google.firestore.admin.v1beta1.ListIndexesResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='indexes', full_name='google.firestore.admin.v1beta1.ListIndexesResponse.indexes', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.firestore.admin.v1beta1.ListIndexesResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=986, - serialized_end=1088, + name="ListIndexesResponse", + full_name="google.firestore.admin.v1beta1.ListIndexesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="indexes", + full_name="google.firestore.admin.v1beta1.ListIndexesResponse.indexes", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.firestore.admin.v1beta1.ListIndexesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=986, + serialized_end=1088, ) -_INDEXOPERATIONMETADATA.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_INDEXOPERATIONMETADATA.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_INDEXOPERATIONMETADATA.fields_by_name['operation_type'].enum_type = _INDEXOPERATIONMETADATA_OPERATIONTYPE -_INDEXOPERATIONMETADATA.fields_by_name['document_progress'].message_type = _PROGRESS +_INDEXOPERATIONMETADATA.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_INDEXOPERATIONMETADATA.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_INDEXOPERATIONMETADATA.fields_by_name[ + "operation_type" +].enum_type = _INDEXOPERATIONMETADATA_OPERATIONTYPE +_INDEXOPERATIONMETADATA.fields_by_name["document_progress"].message_type = _PROGRESS _INDEXOPERATIONMETADATA_OPERATIONTYPE.containing_type = _INDEXOPERATIONMETADATA -_CREATEINDEXREQUEST.fields_by_name['index'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX -_LISTINDEXESRESPONSE.fields_by_name['indexes'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX -DESCRIPTOR.message_types_by_name['IndexOperationMetadata'] = _INDEXOPERATIONMETADATA -DESCRIPTOR.message_types_by_name['Progress'] = _PROGRESS -DESCRIPTOR.message_types_by_name['CreateIndexRequest'] = _CREATEINDEXREQUEST -DESCRIPTOR.message_types_by_name['GetIndexRequest'] = _GETINDEXREQUEST -DESCRIPTOR.message_types_by_name['ListIndexesRequest'] = _LISTINDEXESREQUEST -DESCRIPTOR.message_types_by_name['DeleteIndexRequest'] = _DELETEINDEXREQUEST -DESCRIPTOR.message_types_by_name['ListIndexesResponse'] = _LISTINDEXESRESPONSE +_CREATEINDEXREQUEST.fields_by_name[ + "index" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX +) +_LISTINDEXESRESPONSE.fields_by_name[ + "indexes" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX +) +DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA +DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS +DESCRIPTOR.message_types_by_name["CreateIndexRequest"] = _CREATEINDEXREQUEST +DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST +DESCRIPTOR.message_types_by_name["DeleteIndexRequest"] = _DELETEINDEXREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -IndexOperationMetadata = _reflection.GeneratedProtocolMessageType('IndexOperationMetadata', (_message.Message,), dict( - DESCRIPTOR = _INDEXOPERATIONMETADATA, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """Metadata for index operations. This metadata populates the metadata +IndexOperationMetadata = _reflection.GeneratedProtocolMessageType( + "IndexOperationMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_INDEXOPERATIONMETADATA, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""Metadata for index operations. This metadata populates the metadata field of [google.longrunning.Operation][google.longrunning.Operation]. @@ -392,15 +581,18 @@ Progress of the existing operation, measured in number of documents. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexOperationMetadata) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexOperationMetadata) + ), +) _sym_db.RegisterMessage(IndexOperationMetadata) -Progress = _reflection.GeneratedProtocolMessageType('Progress', (_message.Message,), dict( - DESCRIPTOR = _PROGRESS, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """Measures the progress of a particular metric. +Progress = _reflection.GeneratedProtocolMessageType( + "Progress", + (_message.Message,), + dict( + DESCRIPTOR=_PROGRESS, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""Measures the progress of a particular metric. Attributes: @@ -412,15 +604,18 @@ the work estimate is unavailable. May change as work progresses. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Progress) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Progress) + ), +) _sym_db.RegisterMessage(Progress) -CreateIndexRequest = _reflection.GeneratedProtocolMessageType('CreateIndexRequest', (_message.Message,), dict( - DESCRIPTOR = _CREATEINDEXREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """The request for +CreateIndexRequest = _reflection.GeneratedProtocolMessageType( + "CreateIndexRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEINDEXREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex]. @@ -433,15 +628,18 @@ specified. Certain single field indexes cannot be created or deleted. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.CreateIndexRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.CreateIndexRequest) + ), +) _sym_db.RegisterMessage(CreateIndexRequest) -GetIndexRequest = _reflection.GeneratedProtocolMessageType('GetIndexRequest', (_message.Message,), dict( - DESCRIPTOR = _GETINDEXREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """The request for +GetIndexRequest = _reflection.GeneratedProtocolMessageType( + "GetIndexRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETINDEXREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex]. @@ -450,15 +648,18 @@ The name of the index. For example: ``projects/{project_id}/da tabases/{database_id}/indexes/{index_id}`` """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.GetIndexRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.GetIndexRequest) + ), +) _sym_db.RegisterMessage(GetIndexRequest) -ListIndexesRequest = _reflection.GeneratedProtocolMessageType('ListIndexesRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTINDEXESREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """The request for +ListIndexesRequest = _reflection.GeneratedProtocolMessageType( + "ListIndexesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTINDEXESREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes]. @@ -471,15 +672,18 @@ page_token: The standard List page token. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesRequest) + ), +) _sym_db.RegisterMessage(ListIndexesRequest) -DeleteIndexRequest = _reflection.GeneratedProtocolMessageType('DeleteIndexRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETEINDEXREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """The request for +DeleteIndexRequest = _reflection.GeneratedProtocolMessageType( + "DeleteIndexRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEINDEXREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex]. @@ -488,15 +692,18 @@ The index name. For example: ``projects/{project_id}/databases /{database_id}/indexes/{index_id}`` """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.DeleteIndexRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.DeleteIndexRequest) + ), +) _sym_db.RegisterMessage(DeleteIndexRequest) -ListIndexesResponse = _reflection.GeneratedProtocolMessageType('ListIndexesResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTINDEXESRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2' - , - __doc__ = """The response for +ListIndexesResponse = _reflection.GeneratedProtocolMessageType( + "ListIndexesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTINDEXESRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.admin.firestore_admin_pb2", + __doc__="""The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes]. @@ -506,76 +713,102 @@ next_page_token: The standard List next-page token. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.ListIndexesResponse) + ), +) _sym_db.RegisterMessage(ListIndexesResponse) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\"com.google.firestore.admin.v1beta1B\023FirestoreAdminProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + '\n"com.google.firestore.admin.v1beta1B\023FirestoreAdminProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1' + ), +) _FIRESTOREADMIN = _descriptor.ServiceDescriptor( - name='FirestoreAdmin', - full_name='google.firestore.admin.v1beta1.FirestoreAdmin', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=1091, - serialized_end=1759, - methods=[ - _descriptor.MethodDescriptor( - name='CreateIndex', - full_name='google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex', + name="FirestoreAdmin", + full_name="google.firestore.admin.v1beta1.FirestoreAdmin", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_CREATEINDEXREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029\"0/v1beta1/{parent=projects/*/databases/*}/indexes:\005index')), - ), - _descriptor.MethodDescriptor( - name='ListIndexes', - full_name='google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes', - index=1, - containing_service=None, - input_type=_LISTINDEXESREQUEST, - output_type=_LISTINDEXESRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0022\0220/v1beta1/{parent=projects/*/databases/*}/indexes')), - ), - _descriptor.MethodDescriptor( - name='GetIndex', - full_name='google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex', - index=2, - containing_service=None, - input_type=_GETINDEXREQUEST, - output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0022\0220/v1beta1/{name=projects/*/databases/*/indexes/*}')), - ), - _descriptor.MethodDescriptor( - name='DeleteIndex', - full_name='google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex', - index=3, - containing_service=None, - input_type=_DELETEINDEXREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0022*0/v1beta1/{name=projects/*/databases/*/indexes/*}')), - ), -]) + options=None, + serialized_start=1091, + serialized_end=1759, + methods=[ + _descriptor.MethodDescriptor( + name="CreateIndex", + full_name="google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex", + index=0, + containing_service=None, + input_type=_CREATEINDEXREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\0029"0/v1beta1/{parent=projects/*/databases/*}/indexes:\005index' + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListIndexes", + full_name="google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes", + index=1, + containing_service=None, + input_type=_LISTINDEXESREQUEST, + output_type=_LISTINDEXESRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0022\0220/v1beta1/{parent=projects/*/databases/*}/indexes" + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetIndex", + full_name="google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex", + index=2, + containing_service=None, + input_type=_GETINDEXREQUEST, + output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2._INDEX, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0022\0220/v1beta1/{name=projects/*/databases/*/indexes/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteIndex", + full_name="google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex", + index=3, + containing_service=None, + input_type=_DELETEINDEXREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0022*0/v1beta1/{name=projects/*/databases/*/indexes/*}" + ), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_FIRESTOREADMIN) -DESCRIPTOR.services_by_name['FirestoreAdmin'] = _FIRESTOREADMIN +DESCRIPTOR.services_by_name["FirestoreAdmin"] = _FIRESTOREADMIN try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities - - class FirestoreAdminStub(object): - """The Cloud Firestore Admin API. + class FirestoreAdminStub(object): + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -620,36 +853,35 @@ class FirestoreAdminStub(object): service `google.longrunning.Operations`. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.CreateIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex', - request_serializer=CreateIndexRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.ListIndexes = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes', - request_serializer=ListIndexesRequest.SerializeToString, - response_deserializer=ListIndexesResponse.FromString, - ) - self.GetIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex', - request_serializer=GetIndexRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, - ) - self.DeleteIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex', - request_serializer=DeleteIndexRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - - class FirestoreAdminServicer(object): - """The Cloud Firestore Admin API. + self.CreateIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex", + request_serializer=CreateIndexRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ListIndexes = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes", + request_serializer=ListIndexesRequest.SerializeToString, + response_deserializer=ListIndexesResponse.FromString, + ) + self.GetIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex", + request_serializer=GetIndexRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, + ) + self.DeleteIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex", + request_serializer=DeleteIndexRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + class FirestoreAdminServicer(object): + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -694,8 +926,8 @@ class FirestoreAdminServicer(object): service `google.longrunning.Operations`. """ - def CreateIndex(self, request, context): - """Creates the specified index. + def CreateIndex(self, request, context): + """Creates the specified index. A newly created index's initial state is `CREATING`. On completion of the returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. If the index already exists, the call will return an `ALREADY_EXISTS` @@ -709,67 +941,67 @@ def CreateIndex(self, request, context): Indexes with a single field cannot be created. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListIndexes(self, request, context): - """Lists the indexes that match the specified filters. + def ListIndexes(self, request, context): + """Lists the indexes that match the specified filters. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetIndex(self, request, context): - """Gets an index. + def GetIndex(self, request, context): + """Gets an index. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteIndex(self, request, context): - """Deletes an index. + def DeleteIndex(self, request, context): + """Deletes an index. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_FirestoreAdminServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateIndex': grpc.unary_unary_rpc_method_handler( - servicer.CreateIndex, - request_deserializer=CreateIndexRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'ListIndexes': grpc.unary_unary_rpc_method_handler( - servicer.ListIndexes, - request_deserializer=ListIndexesRequest.FromString, - response_serializer=ListIndexesResponse.SerializeToString, - ), - 'GetIndex': grpc.unary_unary_rpc_method_handler( - servicer.GetIndex, - request_deserializer=GetIndexRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, - ), - 'DeleteIndex': grpc.unary_unary_rpc_method_handler( - servicer.DeleteIndex, - request_deserializer=DeleteIndexRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.firestore.admin.v1beta1.FirestoreAdmin', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaFirestoreAdminServicer(object): - """The Beta API is deprecated for 0.15.0 and later. + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def add_FirestoreAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + "CreateIndex": grpc.unary_unary_rpc_method_handler( + servicer.CreateIndex, + request_deserializer=CreateIndexRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ListIndexes": grpc.unary_unary_rpc_method_handler( + servicer.ListIndexes, + request_deserializer=ListIndexesRequest.FromString, + response_serializer=ListIndexesResponse.SerializeToString, + ), + "GetIndex": grpc.unary_unary_rpc_method_handler( + servicer.GetIndex, + request_deserializer=GetIndexRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, + ), + "DeleteIndex": grpc.unary_unary_rpc_method_handler( + servicer.DeleteIndex, + request_deserializer=DeleteIndexRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + class BetaFirestoreAdminServicer(object): + """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The Cloud Firestore Admin API. + + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -813,8 +1045,9 @@ class BetaFirestoreAdminServicer(object): Operations are created by service `FirestoreAdmin`, but are accessed via service `google.longrunning.Operations`. """ - def CreateIndex(self, request, context): - """Creates the specified index. + + def CreateIndex(self, request, context): + """Creates the specified index. A newly created index's initial state is `CREATING`. On completion of the returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. If the index already exists, the call will return an `ALREADY_EXISTS` @@ -828,28 +1061,31 @@ def CreateIndex(self, request, context): Indexes with a single field cannot be created. """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListIndexes(self, request, context): - """Lists the indexes that match the specified filters. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetIndex(self, request, context): - """Gets an index. + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + def ListIndexes(self, request, context): + """Lists the indexes that match the specified filters. """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteIndex(self, request, context): - """Deletes an index. + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + def GetIndex(self, request, context): + """Gets an index. """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteIndex(self, request, context): + """Deletes an index. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - class BetaFirestoreAdminStub(object): - """The Beta API is deprecated for 0.15.0 and later. + class BetaFirestoreAdminStub(object): + """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The Cloud Firestore Admin API. + + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -893,8 +1129,16 @@ class BetaFirestoreAdminStub(object): Operations are created by service `FirestoreAdmin`, but are accessed via service `google.longrunning.Operations`. """ - def CreateIndex(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates the specified index. + + def CreateIndex( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Creates the specified index. A newly created index's initial state is `CREATING`. On completion of the returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. If the index already exists, the call will return an `ALREADY_EXISTS` @@ -908,79 +1152,192 @@ def CreateIndex(self, request, timeout, metadata=None, with_call=False, protocol Indexes with a single field cannot be created. """ - raise NotImplementedError() - CreateIndex.future = None - def ListIndexes(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists the indexes that match the specified filters. + raise NotImplementedError() + + CreateIndex.future = None + + def ListIndexes( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Lists the indexes that match the specified filters. """ - raise NotImplementedError() - ListIndexes.future = None - def GetIndex(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets an index. + raise NotImplementedError() + + ListIndexes.future = None + + def GetIndex( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Gets an index. """ - raise NotImplementedError() - GetIndex.future = None - def DeleteIndex(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes an index. + raise NotImplementedError() + + GetIndex.future = None + + def DeleteIndex( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Deletes an index. """ - raise NotImplementedError() - DeleteIndex.future = None + raise NotImplementedError() + DeleteIndex.future = None - def beta_create_FirestoreAdmin_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. + def beta_create_FirestoreAdmin_server( + servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None + ): + """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This function was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'CreateIndex'): CreateIndexRequest.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'DeleteIndex'): DeleteIndexRequest.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'GetIndex'): GetIndexRequest.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'ListIndexes'): ListIndexesRequest.FromString, - } - response_serializers = { - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'CreateIndex'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'DeleteIndex'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'GetIndex'): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'ListIndexes'): ListIndexesResponse.SerializeToString, - } - method_implementations = { - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'CreateIndex'): face_utilities.unary_unary_inline(servicer.CreateIndex), - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'DeleteIndex'): face_utilities.unary_unary_inline(servicer.DeleteIndex), - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'GetIndex'): face_utilities.unary_unary_inline(servicer.GetIndex), - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'ListIndexes'): face_utilities.unary_unary_inline(servicer.ListIndexes), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_FirestoreAdmin_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. + request_deserializers = { + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "CreateIndex", + ): CreateIndexRequest.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "DeleteIndex", + ): DeleteIndexRequest.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "GetIndex", + ): GetIndexRequest.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "ListIndexes", + ): ListIndexesRequest.FromString, + } + response_serializers = { + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "CreateIndex", + ): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "DeleteIndex", + ): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "GetIndex", + ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "ListIndexes", + ): ListIndexesResponse.SerializeToString, + } + method_implementations = { + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "CreateIndex", + ): face_utilities.unary_unary_inline(servicer.CreateIndex), + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "DeleteIndex", + ): face_utilities.unary_unary_inline(servicer.DeleteIndex), + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "GetIndex", + ): face_utilities.unary_unary_inline(servicer.GetIndex), + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "ListIndexes", + ): face_utilities.unary_unary_inline(servicer.ListIndexes), + } + server_options = beta_implementations.server_options( + request_deserializers=request_deserializers, + response_serializers=response_serializers, + thread_pool=pool, + thread_pool_size=pool_size, + default_timeout=default_timeout, + maximum_timeout=maximum_timeout, + ) + return beta_implementations.server( + method_implementations, options=server_options + ) + + def beta_create_FirestoreAdmin_stub( + channel, host=None, metadata_transformer=None, pool=None, pool_size=None + ): + """The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This function was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'CreateIndex'): CreateIndexRequest.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'DeleteIndex'): DeleteIndexRequest.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'GetIndex'): GetIndexRequest.SerializeToString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'ListIndexes'): ListIndexesRequest.SerializeToString, - } - response_deserializers = { - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'CreateIndex'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'DeleteIndex'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'GetIndex'): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, - ('google.firestore.admin.v1beta1.FirestoreAdmin', 'ListIndexes'): ListIndexesResponse.FromString, - } - cardinalities = { - 'CreateIndex': cardinality.Cardinality.UNARY_UNARY, - 'DeleteIndex': cardinality.Cardinality.UNARY_UNARY, - 'GetIndex': cardinality.Cardinality.UNARY_UNARY, - 'ListIndexes': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.firestore.admin.v1beta1.FirestoreAdmin', cardinalities, options=stub_options) + request_serializers = { + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "CreateIndex", + ): CreateIndexRequest.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "DeleteIndex", + ): DeleteIndexRequest.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "GetIndex", + ): GetIndexRequest.SerializeToString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "ListIndexes", + ): ListIndexesRequest.SerializeToString, + } + response_deserializers = { + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "CreateIndex", + ): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "DeleteIndex", + ): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "GetIndex", + ): google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, + ( + "google.firestore.admin.v1beta1.FirestoreAdmin", + "ListIndexes", + ): ListIndexesResponse.FromString, + } + cardinalities = { + "CreateIndex": cardinality.Cardinality.UNARY_UNARY, + "DeleteIndex": cardinality.Cardinality.UNARY_UNARY, + "GetIndex": cardinality.Cardinality.UNARY_UNARY, + "ListIndexes": cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options( + host=host, + metadata_transformer=metadata_transformer, + request_serializers=request_serializers, + response_deserializers=response_deserializers, + thread_pool=pool, + thread_pool_size=pool_size, + ) + return beta_implementations.dynamic_stub( + channel, + "google.firestore.admin.v1beta1.FirestoreAdmin", + cardinalities, + options=stub_options, + ) + + except ImportError: - pass + pass # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py index d6cf901121b6..81eaad7ad17e 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/admin/firestore_admin_pb2_grpc.py @@ -1,14 +1,20 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.firestore_v1beta1.proto.admin import firestore_admin_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2 -from google.cloud.firestore_v1beta1.proto.admin import index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2 -from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.cloud.firestore_v1beta1.proto.admin import ( + firestore_admin_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2, +) +from google.cloud.firestore_v1beta1.proto.admin import ( + index_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class FirestoreAdminStub(object): - """The Cloud Firestore Admin API. + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -53,36 +59,36 @@ class FirestoreAdminStub(object): service `google.longrunning.Operations`. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.CreateIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + self.CreateIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) - self.ListIndexes = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.FromString, + self.ListIndexes = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.FromString, ) - self.GetIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, + self.GetIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.FromString, ) - self.DeleteIndex = channel.unary_unary( - '/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteIndex = channel.unary_unary( + "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) class FirestoreAdminServicer(object): - """The Cloud Firestore Admin API. + """The Cloud Firestore Admin API. This API provides several administrative services for Cloud Firestore. @@ -127,8 +133,8 @@ class FirestoreAdminServicer(object): service `google.longrunning.Operations`. """ - def CreateIndex(self, request, context): - """Creates the specified index. + def CreateIndex(self, request, context): + """Creates the specified index. A newly created index's initial state is `CREATING`. On completion of the returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. If the index already exists, the call will return an `ALREADY_EXISTS` @@ -142,55 +148,56 @@ def CreateIndex(self, request, context): Indexes with a single field cannot be created. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListIndexes(self, request, context): - """Lists the indexes that match the specified filters. + def ListIndexes(self, request, context): + """Lists the indexes that match the specified filters. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetIndex(self, request, context): - """Gets an index. + def GetIndex(self, request, context): + """Gets an index. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteIndex(self, request, context): - """Deletes an index. + def DeleteIndex(self, request, context): + """Deletes an index. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_FirestoreAdminServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateIndex': grpc.unary_unary_rpc_method_handler( - servicer.CreateIndex, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - 'ListIndexes': grpc.unary_unary_rpc_method_handler( - servicer.ListIndexes, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString, - ), - 'GetIndex': grpc.unary_unary_rpc_method_handler( - servicer.GetIndex, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, - ), - 'DeleteIndex': grpc.unary_unary_rpc_method_handler( - servicer.DeleteIndex, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.firestore.admin.v1beta1.FirestoreAdmin', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "CreateIndex": grpc.unary_unary_rpc_method_handler( + servicer.CreateIndex, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.CreateIndexRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ListIndexes": grpc.unary_unary_rpc_method_handler( + servicer.ListIndexes, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.ListIndexesResponse.SerializeToString, + ), + "GetIndex": grpc.unary_unary_rpc_method_handler( + servicer.GetIndex, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.GetIndexRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_index__pb2.Index.SerializeToString, + ), + "DeleteIndex": grpc.unary_unary_rpc_method_handler( + servicer.DeleteIndex, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_admin_dot_firestore__admin__pb2.DeleteIndexRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.firestore.admin.v1beta1.FirestoreAdmin", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py index 98e7bd717dd0..de43ee88e44c 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2.py @@ -2,12 +2,14 @@ # source: google/cloud/firestore_v1beta1/proto/admin/index.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -17,177 +19,225 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/admin/index.proto', - package='google.firestore.admin.v1beta1', - syntax='proto3', - serialized_pb=_b('\n6google/cloud/firestore_v1beta1/proto/admin/index.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\"\x9c\x01\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12=\n\x04mode\x18\x02 \x01(\x0e\x32/.google.firestore.admin.v1beta1.IndexField.Mode\";\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x02\x12\x0e\n\nDESCENDING\x10\x03\"\xe8\x01\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12:\n\x06\x66ields\x18\x03 \x03(\x0b\x32*.google.firestore.admin.v1beta1.IndexField\x12:\n\x05state\x18\x06 \x01(\x0e\x32+.google.firestore.admin.v1beta1.Index.State\"B\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x03\x12\t\n\x05READY\x10\x02\x12\t\n\x05\x45RROR\x10\x05\x42\xa5\x01\n\"com.google.firestore.admin.v1beta1B\nIndexProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/admin/index.proto", + package="google.firestore.admin.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n6google/cloud/firestore_v1beta1/proto/admin/index.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto"\x9c\x01\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12=\n\x04mode\x18\x02 \x01(\x0e\x32/.google.firestore.admin.v1beta1.IndexField.Mode";\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x02\x12\x0e\n\nDESCENDING\x10\x03"\xe8\x01\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12:\n\x06\x66ields\x18\x03 \x03(\x0b\x32*.google.firestore.admin.v1beta1.IndexField\x12:\n\x05state\x18\x06 \x01(\x0e\x32+.google.firestore.admin.v1beta1.Index.State"B\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x03\x12\t\n\x05READY\x10\x02\x12\t\n\x05\x45RROR\x10\x05\x42\xa5\x01\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3' + ), + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], +) _INDEXFIELD_MODE = _descriptor.EnumDescriptor( - name='Mode', - full_name='google.firestore.admin.v1beta1.IndexField.Mode', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='MODE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ASCENDING', index=1, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DESCENDING', index=2, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=218, - serialized_end=277, + name="Mode", + full_name="google.firestore.admin.v1beta1.IndexField.Mode", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="MODE_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ASCENDING", index=1, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DESCENDING", index=2, number=3, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=218, + serialized_end=277, ) _sym_db.RegisterEnumDescriptor(_INDEXFIELD_MODE) _INDEX_STATE = _descriptor.EnumDescriptor( - name='State', - full_name='google.firestore.admin.v1beta1.Index.State', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='STATE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='CREATING', index=1, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='READY', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ERROR', index=3, number=5, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=446, - serialized_end=512, + name="State", + full_name="google.firestore.admin.v1beta1.Index.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CREATING", index=1, number=3, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="READY", index=2, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ERROR", index=3, number=5, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=446, + serialized_end=512, ) _sym_db.RegisterEnumDescriptor(_INDEX_STATE) _INDEXFIELD = _descriptor.Descriptor( - name='IndexField', - full_name='google.firestore.admin.v1beta1.IndexField', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field_path', full_name='google.firestore.admin.v1beta1.IndexField.field_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='mode', full_name='google.firestore.admin.v1beta1.IndexField.mode', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _INDEXFIELD_MODE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=121, - serialized_end=277, + name="IndexField", + full_name="google.firestore.admin.v1beta1.IndexField", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field_path", + full_name="google.firestore.admin.v1beta1.IndexField.field_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="mode", + full_name="google.firestore.admin.v1beta1.IndexField.mode", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_INDEXFIELD_MODE], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=121, + serialized_end=277, ) _INDEX = _descriptor.Descriptor( - name='Index', - full_name='google.firestore.admin.v1beta1.Index', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.admin.v1beta1.Index.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='collection_id', full_name='google.firestore.admin.v1beta1.Index.collection_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='fields', full_name='google.firestore.admin.v1beta1.Index.fields', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='state', full_name='google.firestore.admin.v1beta1.Index.state', index=3, - number=6, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _INDEX_STATE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=280, - serialized_end=512, + name="Index", + full_name="google.firestore.admin.v1beta1.Index", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.admin.v1beta1.Index.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="collection_id", + full_name="google.firestore.admin.v1beta1.Index.collection_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.admin.v1beta1.Index.fields", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.firestore.admin.v1beta1.Index.state", + index=3, + number=6, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_INDEX_STATE], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=280, + serialized_end=512, ) -_INDEXFIELD.fields_by_name['mode'].enum_type = _INDEXFIELD_MODE +_INDEXFIELD.fields_by_name["mode"].enum_type = _INDEXFIELD_MODE _INDEXFIELD_MODE.containing_type = _INDEXFIELD -_INDEX.fields_by_name['fields'].message_type = _INDEXFIELD -_INDEX.fields_by_name['state'].enum_type = _INDEX_STATE +_INDEX.fields_by_name["fields"].message_type = _INDEXFIELD +_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE _INDEX_STATE.containing_type = _INDEX -DESCRIPTOR.message_types_by_name['IndexField'] = _INDEXFIELD -DESCRIPTOR.message_types_by_name['Index'] = _INDEX +DESCRIPTOR.message_types_by_name["IndexField"] = _INDEXFIELD +DESCRIPTOR.message_types_by_name["Index"] = _INDEX _sym_db.RegisterFileDescriptor(DESCRIPTOR) -IndexField = _reflection.GeneratedProtocolMessageType('IndexField', (_message.Message,), dict( - DESCRIPTOR = _INDEXFIELD, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.index_pb2' - , - __doc__ = """A field of an index. +IndexField = _reflection.GeneratedProtocolMessageType( + "IndexField", + (_message.Message,), + dict( + DESCRIPTOR=_INDEXFIELD, + __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2", + __doc__="""A field of an index. Attributes: @@ -200,15 +250,18 @@ mode: The field's mode. """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexField) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexField) + ), +) _sym_db.RegisterMessage(IndexField) -Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), dict( - DESCRIPTOR = _INDEX, - __module__ = 'google.cloud.firestore_v1beta1.proto.admin.index_pb2' - , - __doc__ = """An index definition. +Index = _reflection.GeneratedProtocolMessageType( + "Index", + (_message.Message,), + dict( + DESCRIPTOR=_INDEX, + __module__="google.cloud.firestore_v1beta1.proto.admin.index_pb2", + __doc__="""An index definition. Attributes: @@ -221,21 +274,27 @@ state: The state of the index. The state is read-only. @OutputOnly """, - # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Index) - )) + # @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Index) + ), +) _sym_db.RegisterMessage(Index) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\"com.google.firestore.admin.v1beta1B\nIndexProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + '\n"com.google.firestore.admin.v1beta1B\nIndexProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1' + ), +) try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities except ImportError: - pass + pass # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/admin/index_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/proto/common_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/common_pb2.py index d0246836a662..35aed16c1fbb 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/common_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/common_pb2.py @@ -2,12 +2,14 @@ # source: google/cloud/firestore_v1beta1/proto/common.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -18,222 +20,327 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/common.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\n1google/cloud/firestore_v1beta1/proto/common.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"#\n\x0c\x44ocumentMask\x12\x13\n\x0b\x66ield_paths\x18\x01 \x03(\t\"e\n\x0cPrecondition\x12\x10\n\x06\x65xists\x18\x01 \x01(\x08H\x00\x12\x31\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x10\n\x0e\x63ondition_type\"\xb3\x02\n\x12TransactionOptions\x12J\n\tread_only\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.TransactionOptions.ReadOnlyH\x00\x12L\n\nread_write\x18\x03 \x01(\x0b\x32\x36.google.firestore.v1beta1.TransactionOptions.ReadWriteH\x00\x1a&\n\tReadWrite\x12\x19\n\x11retry_transaction\x18\x01 \x01(\x0c\x1aS\n\x08ReadOnly\x12/\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selectorB\x06\n\x04modeB\xb9\x01\n\x1c\x63om.google.firestore.v1beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - + name="google/cloud/firestore_v1beta1/proto/common.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n1google/cloud/firestore_v1beta1/proto/common.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"#\n\x0c\x44ocumentMask\x12\x13\n\x0b\x66ield_paths\x18\x01 \x03(\t"e\n\x0cPrecondition\x12\x10\n\x06\x65xists\x18\x01 \x01(\x08H\x00\x12\x31\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x10\n\x0e\x63ondition_type"\xb3\x02\n\x12TransactionOptions\x12J\n\tread_only\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.TransactionOptions.ReadOnlyH\x00\x12L\n\nread_write\x18\x03 \x01(\x0b\x32\x36.google.firestore.v1beta1.TransactionOptions.ReadWriteH\x00\x1a&\n\tReadWrite\x12\x19\n\x11retry_transaction\x18\x01 \x01(\x0c\x1aS\n\x08ReadOnly\x12/\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selectorB\x06\n\x04modeB\xb9\x01\n\x1c\x63om.google.firestore.v1beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _DOCUMENTMASK = _descriptor.Descriptor( - name='DocumentMask', - full_name='google.firestore.v1beta1.DocumentMask', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field_paths', full_name='google.firestore.v1beta1.DocumentMask.field_paths', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=142, - serialized_end=177, + name="DocumentMask", + full_name="google.firestore.v1beta1.DocumentMask", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field_paths", + full_name="google.firestore.v1beta1.DocumentMask.field_paths", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=142, + serialized_end=177, ) _PRECONDITION = _descriptor.Descriptor( - name='Precondition', - full_name='google.firestore.v1beta1.Precondition', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='exists', full_name='google.firestore.v1beta1.Precondition.exists', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_time', full_name='google.firestore.v1beta1.Precondition.update_time', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='condition_type', full_name='google.firestore.v1beta1.Precondition.condition_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=179, - serialized_end=280, + name="Precondition", + full_name="google.firestore.v1beta1.Precondition", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="exists", + full_name="google.firestore.v1beta1.Precondition.exists", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.firestore.v1beta1.Precondition.update_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="condition_type", + full_name="google.firestore.v1beta1.Precondition.condition_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=179, + serialized_end=280, ) _TRANSACTIONOPTIONS_READWRITE = _descriptor.Descriptor( - name='ReadWrite', - full_name='google.firestore.v1beta1.TransactionOptions.ReadWrite', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='retry_transaction', full_name='google.firestore.v1beta1.TransactionOptions.ReadWrite.retry_transaction', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=459, - serialized_end=497, + name="ReadWrite", + full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="retry_transaction", + full_name="google.firestore.v1beta1.TransactionOptions.ReadWrite.retry_transaction", + index=0, + number=1, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=459, + serialized_end=497, ) _TRANSACTIONOPTIONS_READONLY = _descriptor.Descriptor( - name='ReadOnly', - full_name='google.firestore.v1beta1.TransactionOptions.ReadOnly', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.TransactionOptions.ReadOnly.read_time', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_selector', full_name='google.firestore.v1beta1.TransactionOptions.ReadOnly.consistency_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=499, - serialized_end=582, + name="ReadOnly", + full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.read_time", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="consistency_selector", + full_name="google.firestore.v1beta1.TransactionOptions.ReadOnly.consistency_selector", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=499, + serialized_end=582, ) _TRANSACTIONOPTIONS = _descriptor.Descriptor( - name='TransactionOptions', - full_name='google.firestore.v1beta1.TransactionOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='read_only', full_name='google.firestore.v1beta1.TransactionOptions.read_only', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_write', full_name='google.firestore.v1beta1.TransactionOptions.read_write', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_TRANSACTIONOPTIONS_READWRITE, _TRANSACTIONOPTIONS_READONLY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='mode', full_name='google.firestore.v1beta1.TransactionOptions.mode', - index=0, containing_type=None, fields=[]), - ], - serialized_start=283, - serialized_end=590, + name="TransactionOptions", + full_name="google.firestore.v1beta1.TransactionOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="read_only", + full_name="google.firestore.v1beta1.TransactionOptions.read_only", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_write", + full_name="google.firestore.v1beta1.TransactionOptions.read_write", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_TRANSACTIONOPTIONS_READWRITE, _TRANSACTIONOPTIONS_READONLY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="mode", + full_name="google.firestore.v1beta1.TransactionOptions.mode", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=283, + serialized_end=590, ) -_PRECONDITION.fields_by_name['update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_PRECONDITION.oneofs_by_name['condition_type'].fields.append( - _PRECONDITION.fields_by_name['exists']) -_PRECONDITION.fields_by_name['exists'].containing_oneof = _PRECONDITION.oneofs_by_name['condition_type'] -_PRECONDITION.oneofs_by_name['condition_type'].fields.append( - _PRECONDITION.fields_by_name['update_time']) -_PRECONDITION.fields_by_name['update_time'].containing_oneof = _PRECONDITION.oneofs_by_name['condition_type'] +_PRECONDITION.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PRECONDITION.oneofs_by_name["condition_type"].fields.append( + _PRECONDITION.fields_by_name["exists"] +) +_PRECONDITION.fields_by_name["exists"].containing_oneof = _PRECONDITION.oneofs_by_name[ + "condition_type" +] +_PRECONDITION.oneofs_by_name["condition_type"].fields.append( + _PRECONDITION.fields_by_name["update_time"] +) +_PRECONDITION.fields_by_name[ + "update_time" +].containing_oneof = _PRECONDITION.oneofs_by_name["condition_type"] _TRANSACTIONOPTIONS_READWRITE.containing_type = _TRANSACTIONOPTIONS -_TRANSACTIONOPTIONS_READONLY.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRANSACTIONOPTIONS_READONLY.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _TRANSACTIONOPTIONS_READONLY.containing_type = _TRANSACTIONOPTIONS -_TRANSACTIONOPTIONS_READONLY.oneofs_by_name['consistency_selector'].fields.append( - _TRANSACTIONOPTIONS_READONLY.fields_by_name['read_time']) -_TRANSACTIONOPTIONS_READONLY.fields_by_name['read_time'].containing_oneof = _TRANSACTIONOPTIONS_READONLY.oneofs_by_name['consistency_selector'] -_TRANSACTIONOPTIONS.fields_by_name['read_only'].message_type = _TRANSACTIONOPTIONS_READONLY -_TRANSACTIONOPTIONS.fields_by_name['read_write'].message_type = _TRANSACTIONOPTIONS_READWRITE -_TRANSACTIONOPTIONS.oneofs_by_name['mode'].fields.append( - _TRANSACTIONOPTIONS.fields_by_name['read_only']) -_TRANSACTIONOPTIONS.fields_by_name['read_only'].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name['mode'] -_TRANSACTIONOPTIONS.oneofs_by_name['mode'].fields.append( - _TRANSACTIONOPTIONS.fields_by_name['read_write']) -_TRANSACTIONOPTIONS.fields_by_name['read_write'].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name['mode'] -DESCRIPTOR.message_types_by_name['DocumentMask'] = _DOCUMENTMASK -DESCRIPTOR.message_types_by_name['Precondition'] = _PRECONDITION -DESCRIPTOR.message_types_by_name['TransactionOptions'] = _TRANSACTIONOPTIONS +_TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"].fields.append( + _TRANSACTIONOPTIONS_READONLY.fields_by_name["read_time"] +) +_TRANSACTIONOPTIONS_READONLY.fields_by_name[ + "read_time" +].containing_oneof = _TRANSACTIONOPTIONS_READONLY.oneofs_by_name["consistency_selector"] +_TRANSACTIONOPTIONS.fields_by_name[ + "read_only" +].message_type = _TRANSACTIONOPTIONS_READONLY +_TRANSACTIONOPTIONS.fields_by_name[ + "read_write" +].message_type = _TRANSACTIONOPTIONS_READWRITE +_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append( + _TRANSACTIONOPTIONS.fields_by_name["read_only"] +) +_TRANSACTIONOPTIONS.fields_by_name[ + "read_only" +].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"] +_TRANSACTIONOPTIONS.oneofs_by_name["mode"].fields.append( + _TRANSACTIONOPTIONS.fields_by_name["read_write"] +) +_TRANSACTIONOPTIONS.fields_by_name[ + "read_write" +].containing_oneof = _TRANSACTIONOPTIONS.oneofs_by_name["mode"] +DESCRIPTOR.message_types_by_name["DocumentMask"] = _DOCUMENTMASK +DESCRIPTOR.message_types_by_name["Precondition"] = _PRECONDITION +DESCRIPTOR.message_types_by_name["TransactionOptions"] = _TRANSACTIONOPTIONS _sym_db.RegisterFileDescriptor(DESCRIPTOR) -DocumentMask = _reflection.GeneratedProtocolMessageType('DocumentMask', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENTMASK, - __module__ = 'google.cloud.firestore_v1beta1.proto.common_pb2' - , - __doc__ = """A set of field paths on a document. Used to restrict a get or update +DocumentMask = _reflection.GeneratedProtocolMessageType( + "DocumentMask", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENTMASK, + __module__="google.cloud.firestore_v1beta1.proto.common_pb2", + __doc__="""A set of field paths on a document. Used to restrict a get or update operation on a document to a subset of its fields. This is different from standard field masks, as this is always scoped to a [Document][google.firestore.v1beta1.Document], and takes in account the @@ -246,15 +353,18 @@ [Document.fields][google.firestore.v1beta1.Document.fields] for a field path syntax reference. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentMask) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentMask) + ), +) _sym_db.RegisterMessage(DocumentMask) -Precondition = _reflection.GeneratedProtocolMessageType('Precondition', (_message.Message,), dict( - DESCRIPTOR = _PRECONDITION, - __module__ = 'google.cloud.firestore_v1beta1.proto.common_pb2' - , - __doc__ = """A precondition on a document, used for conditional operations. +Precondition = _reflection.GeneratedProtocolMessageType( + "Precondition", + (_message.Message,), + dict( + DESCRIPTOR=_PRECONDITION, + __module__="google.cloud.firestore_v1beta1.proto.common_pb2", + __doc__="""A precondition on a document, used for conditional operations. Attributes: @@ -267,32 +377,38 @@ When set, the target document must exist and have been last updated at that time. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Precondition) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Precondition) + ), +) _sym_db.RegisterMessage(Precondition) -TransactionOptions = _reflection.GeneratedProtocolMessageType('TransactionOptions', (_message.Message,), dict( - - ReadWrite = _reflection.GeneratedProtocolMessageType('ReadWrite', (_message.Message,), dict( - DESCRIPTOR = _TRANSACTIONOPTIONS_READWRITE, - __module__ = 'google.cloud.firestore_v1beta1.proto.common_pb2' - , - __doc__ = """Options for a transaction that can be used to read and write documents. +TransactionOptions = _reflection.GeneratedProtocolMessageType( + "TransactionOptions", + (_message.Message,), + dict( + ReadWrite=_reflection.GeneratedProtocolMessageType( + "ReadWrite", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSACTIONOPTIONS_READWRITE, + __module__="google.cloud.firestore_v1beta1.proto.common_pb2", + __doc__="""Options for a transaction that can be used to read and write documents. Attributes: retry_transaction: An optional transaction to retry. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadWrite) - )) - , - - ReadOnly = _reflection.GeneratedProtocolMessageType('ReadOnly', (_message.Message,), dict( - DESCRIPTOR = _TRANSACTIONOPTIONS_READONLY, - __module__ = 'google.cloud.firestore_v1beta1.proto.common_pb2' - , - __doc__ = """Options for a transaction that can only be used to read documents. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadWrite) + ), + ), + ReadOnly=_reflection.GeneratedProtocolMessageType( + "ReadOnly", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSACTIONOPTIONS_READONLY, + __module__="google.cloud.firestore_v1beta1.proto.common_pb2", + __doc__="""Options for a transaction that can only be used to read documents. Attributes: @@ -303,13 +419,12 @@ Reads documents at the given time. This may not be older than 60 seconds. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadOnly) - )) - , - DESCRIPTOR = _TRANSACTIONOPTIONS, - __module__ = 'google.cloud.firestore_v1beta1.proto.common_pb2' - , - __doc__ = """Options for creating a new transaction. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions.ReadOnly) + ), + ), + DESCRIPTOR=_TRANSACTIONOPTIONS, + __module__="google.cloud.firestore_v1beta1.proto.common_pb2", + __doc__="""Options for creating a new transaction. Attributes: @@ -321,13 +436,19 @@ The transaction can be used for both read and write operations. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TransactionOptions) + ), +) _sym_db.RegisterMessage(TransactionOptions) _sym_db.RegisterMessage(TransactionOptions.ReadWrite) _sym_db.RegisterMessage(TransactionOptions.ReadOnly) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\013CommonProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\013CommonProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" + ), +) # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/common_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/proto/document_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/document_pb2.py index 992e88ee4103..ded32d644e5a 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/document_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/document_pb2.py @@ -2,12 +2,14 @@ # source: google/cloud/firestore_v1beta1/proto/document.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -20,372 +22,630 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/document.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\n3google/cloud/firestore_v1beta1/proto/document.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"\x8a\x02\n\x08\x44ocument\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32\".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type\"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xbb\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,]) - - + name="google/cloud/firestore_v1beta1/proto/document.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n3google/cloud/firestore_v1beta1/proto/document.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto"\x8a\x02\n\x08\x44ocument\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xbb\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_type_dot_latlng__pb2.DESCRIPTOR, + ], +) _DOCUMENT_FIELDSENTRY = _descriptor.Descriptor( - name='FieldsEntry', - full_name='google.firestore.v1beta1.Document.FieldsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.firestore.v1beta1.Document.FieldsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.firestore.v1beta1.Document.FieldsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=389, - serialized_end=467, + name="FieldsEntry", + full_name="google.firestore.v1beta1.Document.FieldsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.firestore.v1beta1.Document.FieldsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.firestore.v1beta1.Document.FieldsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=389, + serialized_end=467, ) _DOCUMENT = _descriptor.Descriptor( - name='Document', - full_name='google.firestore.v1beta1.Document', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.v1beta1.Document.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='fields', full_name='google.firestore.v1beta1.Document.fields', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='create_time', full_name='google.firestore.v1beta1.Document.create_time', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_time', full_name='google.firestore.v1beta1.Document.update_time', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_DOCUMENT_FIELDSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=201, - serialized_end=467, + name="Document", + full_name="google.firestore.v1beta1.Document", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.v1beta1.Document.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.v1beta1.Document.fields", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.firestore.v1beta1.Document.create_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.firestore.v1beta1.Document.update_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DOCUMENT_FIELDSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=201, + serialized_end=467, ) _VALUE = _descriptor.Descriptor( - name='Value', - full_name='google.firestore.v1beta1.Value', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='null_value', full_name='google.firestore.v1beta1.Value.null_value', index=0, - number=11, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='boolean_value', full_name='google.firestore.v1beta1.Value.boolean_value', index=1, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='integer_value', full_name='google.firestore.v1beta1.Value.integer_value', index=2, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.firestore.v1beta1.Value.double_value', index=3, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp_value', full_name='google.firestore.v1beta1.Value.timestamp_value', index=4, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.firestore.v1beta1.Value.string_value', index=5, - number=17, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bytes_value', full_name='google.firestore.v1beta1.Value.bytes_value', index=6, - number=18, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='reference_value', full_name='google.firestore.v1beta1.Value.reference_value', index=7, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='geo_point_value', full_name='google.firestore.v1beta1.Value.geo_point_value', index=8, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='array_value', full_name='google.firestore.v1beta1.Value.array_value', index=9, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='map_value', full_name='google.firestore.v1beta1.Value.map_value', index=10, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value_type', full_name='google.firestore.v1beta1.Value.value_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=470, - serialized_end=910, + name="Value", + full_name="google.firestore.v1beta1.Value", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="null_value", + full_name="google.firestore.v1beta1.Value.null_value", + index=0, + number=11, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="boolean_value", + full_name="google.firestore.v1beta1.Value.boolean_value", + index=1, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="integer_value", + full_name="google.firestore.v1beta1.Value.integer_value", + index=2, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="double_value", + full_name="google.firestore.v1beta1.Value.double_value", + index=3, + number=3, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timestamp_value", + full_name="google.firestore.v1beta1.Value.timestamp_value", + index=4, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="string_value", + full_name="google.firestore.v1beta1.Value.string_value", + index=5, + number=17, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bytes_value", + full_name="google.firestore.v1beta1.Value.bytes_value", + index=6, + number=18, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="reference_value", + full_name="google.firestore.v1beta1.Value.reference_value", + index=7, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="geo_point_value", + full_name="google.firestore.v1beta1.Value.geo_point_value", + index=8, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="array_value", + full_name="google.firestore.v1beta1.Value.array_value", + index=9, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="map_value", + full_name="google.firestore.v1beta1.Value.map_value", + index=10, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="value_type", + full_name="google.firestore.v1beta1.Value.value_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=470, + serialized_end=910, ) _ARRAYVALUE = _descriptor.Descriptor( - name='ArrayValue', - full_name='google.firestore.v1beta1.ArrayValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='google.firestore.v1beta1.ArrayValue.values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=912, - serialized_end=973, + name="ArrayValue", + full_name="google.firestore.v1beta1.ArrayValue", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="google.firestore.v1beta1.ArrayValue.values", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=912, + serialized_end=973, ) _MAPVALUE_FIELDSENTRY = _descriptor.Descriptor( - name='FieldsEntry', - full_name='google.firestore.v1beta1.MapValue.FieldsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.firestore.v1beta1.MapValue.FieldsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.firestore.v1beta1.MapValue.FieldsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=389, - serialized_end=467, + name="FieldsEntry", + full_name="google.firestore.v1beta1.MapValue.FieldsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.firestore.v1beta1.MapValue.FieldsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.firestore.v1beta1.MapValue.FieldsEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=389, + serialized_end=467, ) _MAPVALUE = _descriptor.Descriptor( - name='MapValue', - full_name='google.firestore.v1beta1.MapValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='google.firestore.v1beta1.MapValue.fields', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_MAPVALUE_FIELDSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=976, - serialized_end=1130, + name="MapValue", + full_name="google.firestore.v1beta1.MapValue", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.v1beta1.MapValue.fields", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[_MAPVALUE_FIELDSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=976, + serialized_end=1130, ) -_DOCUMENT_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE +_DOCUMENT_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE _DOCUMENT_FIELDSENTRY.containing_type = _DOCUMENT -_DOCUMENT.fields_by_name['fields'].message_type = _DOCUMENT_FIELDSENTRY -_DOCUMENT.fields_by_name['create_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DOCUMENT.fields_by_name['update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE -_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG -_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE -_VALUE.fields_by_name['map_value'].message_type = _MAPVALUE -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['null_value']) -_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['boolean_value']) -_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['integer_value']) -_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['double_value']) -_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['timestamp_value']) -_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['string_value']) -_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['bytes_value']) -_VALUE.fields_by_name['bytes_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['reference_value']) -_VALUE.fields_by_name['reference_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['geo_point_value']) -_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['array_value']) -_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_VALUE.oneofs_by_name['value_type'].fields.append( - _VALUE.fields_by_name['map_value']) -_VALUE.fields_by_name['map_value'].containing_oneof = _VALUE.oneofs_by_name['value_type'] -_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE -_MAPVALUE_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE +_DOCUMENT.fields_by_name["fields"].message_type = _DOCUMENT_FIELDSENTRY +_DOCUMENT.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DOCUMENT.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name[ + "null_value" +].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE +_VALUE.fields_by_name[ + "timestamp_value" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name[ + "geo_point_value" +].message_type = google_dot_type_dot_latlng__pb2._LATLNG +_VALUE.fields_by_name["array_value"].message_type = _ARRAYVALUE +_VALUE.fields_by_name["map_value"].message_type = _MAPVALUE +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["null_value"]) +_VALUE.fields_by_name["null_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append( + _VALUE.fields_by_name["boolean_value"] +) +_VALUE.fields_by_name["boolean_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append( + _VALUE.fields_by_name["integer_value"] +) +_VALUE.fields_by_name["integer_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["double_value"]) +_VALUE.fields_by_name["double_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append( + _VALUE.fields_by_name["timestamp_value"] +) +_VALUE.fields_by_name["timestamp_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["string_value"]) +_VALUE.fields_by_name["string_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["bytes_value"]) +_VALUE.fields_by_name["bytes_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append( + _VALUE.fields_by_name["reference_value"] +) +_VALUE.fields_by_name["reference_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append( + _VALUE.fields_by_name["geo_point_value"] +) +_VALUE.fields_by_name["geo_point_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["array_value"]) +_VALUE.fields_by_name["array_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_VALUE.oneofs_by_name["value_type"].fields.append(_VALUE.fields_by_name["map_value"]) +_VALUE.fields_by_name["map_value"].containing_oneof = _VALUE.oneofs_by_name[ + "value_type" +] +_ARRAYVALUE.fields_by_name["values"].message_type = _VALUE +_MAPVALUE_FIELDSENTRY.fields_by_name["value"].message_type = _VALUE _MAPVALUE_FIELDSENTRY.containing_type = _MAPVALUE -_MAPVALUE.fields_by_name['fields'].message_type = _MAPVALUE_FIELDSENTRY -DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT -DESCRIPTOR.message_types_by_name['Value'] = _VALUE -DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE -DESCRIPTOR.message_types_by_name['MapValue'] = _MAPVALUE +_MAPVALUE.fields_by_name["fields"].message_type = _MAPVALUE_FIELDSENTRY +DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT +DESCRIPTOR.message_types_by_name["Value"] = _VALUE +DESCRIPTOR.message_types_by_name["ArrayValue"] = _ARRAYVALUE +DESCRIPTOR.message_types_by_name["MapValue"] = _MAPVALUE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( - - FieldsEntry = _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENT_FIELDSENTRY, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document.FieldsEntry) - )) - , - DESCRIPTOR = _DOCUMENT, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - , - __doc__ = """A Firestore document. +Document = _reflection.GeneratedProtocolMessageType( + "Document", + (_message.Message,), + dict( + FieldsEntry=_reflection.GeneratedProtocolMessageType( + "FieldsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_FIELDSENTRY, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2" + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document.FieldsEntry) + ), + ), + DESCRIPTOR=_DOCUMENT, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2", + __doc__="""A Firestore document. Must not exceed 1 MiB - 4 bytes. @@ -425,16 +685,19 @@ can also be compared to values from other documents and the ``read_time`` of a query. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Document) + ), +) _sym_db.RegisterMessage(Document) _sym_db.RegisterMessage(Document.FieldsEntry) -Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( - DESCRIPTOR = _VALUE, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - , - __doc__ = """A message that can hold any of the supported value types. +Value = _reflection.GeneratedProtocolMessageType( + "Value", + (_message.Message,), + dict( + DESCRIPTOR=_VALUE, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2", + __doc__="""A message that can hold any of the supported value types. Attributes: @@ -470,37 +733,45 @@ map_value: A map value. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Value) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Value) + ), +) _sym_db.RegisterMessage(Value) -ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict( - DESCRIPTOR = _ARRAYVALUE, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - , - __doc__ = """An array value. +ArrayValue = _reflection.GeneratedProtocolMessageType( + "ArrayValue", + (_message.Message,), + dict( + DESCRIPTOR=_ARRAYVALUE, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2", + __doc__="""An array value. Attributes: values: Values in the array. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ArrayValue) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ArrayValue) + ), +) _sym_db.RegisterMessage(ArrayValue) -MapValue = _reflection.GeneratedProtocolMessageType('MapValue', (_message.Message,), dict( - - FieldsEntry = _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), dict( - DESCRIPTOR = _MAPVALUE_FIELDSENTRY, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue.FieldsEntry) - )) - , - DESCRIPTOR = _MAPVALUE, - __module__ = 'google.cloud.firestore_v1beta1.proto.document_pb2' - , - __doc__ = """A map value. +MapValue = _reflection.GeneratedProtocolMessageType( + "MapValue", + (_message.Message,), + dict( + FieldsEntry=_reflection.GeneratedProtocolMessageType( + "FieldsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MAPVALUE_FIELDSENTRY, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2" + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue.FieldsEntry) + ), + ), + DESCRIPTOR=_MAPVALUE, + __module__="google.cloud.firestore_v1beta1.proto.document_pb2", + __doc__="""A map value. Attributes: @@ -511,16 +782,26 @@ documented contexts. The map keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be empty. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.MapValue) + ), +) _sym_db.RegisterMessage(MapValue) _sym_db.RegisterMessage(MapValue.FieldsEntry) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\rDocumentProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\rDocumentProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" + ), +) _DOCUMENT_FIELDSENTRY.has_options = True -_DOCUMENT_FIELDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_DOCUMENT_FIELDSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _MAPVALUE_FIELDSENTRY.has_options = True -_MAPVALUE_FIELDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_MAPVALUE_FIELDSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/document_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py index 3d26d9c8f8e4..957acef2695c 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2.py @@ -2,45 +2,61 @@ # source: google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.firestore_v1beta1.proto import common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 +from google.cloud.firestore_v1beta1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\nEgoogle/cloud/firestore_v1beta1/proto/event_flow_document_change.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.protoB\xa2\x01\n\x1c\x63om.google.firestore.v1beta1B\x1c\x45ventFlowDocumentChangeProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/event_flow_document_change.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + "\nEgoogle/cloud/firestore_v1beta1/proto/event_flow_document_change.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.protoB\xa2\x01\n\x1c\x63om.google.firestore.v1beta1B\x1c\x45ventFlowDocumentChangeProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1b\x06proto3" + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, + ], +) _sym_db.RegisterFileDescriptor(DESCRIPTOR) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\034EventFlowDocumentChangeProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\252\002\036Google.Cloud.Firestore.V1Beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\034EventFlowDocumentChangeProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\252\002\036Google.Cloud.Firestore.V1Beta1" + ), +) try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities except ImportError: - pass + pass # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/event_flow_document_change_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py index 80f82785c417..8ebeb4edebbc 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py @@ -2,1575 +2,2814 @@ # source: google/cloud/firestore_v1beta1/proto/firestore.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.firestore_v1beta1.proto import common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 -from google.cloud.firestore_v1beta1.proto import query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2 -from google.cloud.firestore_v1beta1.proto import write_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2 +from google.cloud.firestore_v1beta1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + write_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/firestore.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xb8\x01\n\x12GetDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector\"\xa2\x02\n\x14ListDocumentsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector\"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32\".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\xbf\x01\n\x15\x43reateDocumentRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x34\n\x08\x64ocument\x18\x04 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\"\x82\x02\n\x15UpdateDocumentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\"g\n\x15\x44\x65leteDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\"\x9e\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector\"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32\".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result\"j\n\x17\x42\x65ginTransactionRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"g\n\rCommitRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"8\n\x0fRollbackRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"\x9f\x02\n\x0fRunQueryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector\"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05\"\xed\x01\n\x0cWriteRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xf7\x01\n\rListenRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change\"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type\"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type\"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04\"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xc8\x13\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a\".google.firestore.v1beta1.Document\"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse\"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a\".google.firestore.v1beta1.Document\"W\x82\xd3\xe4\x93\x02Q\"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xb7\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a\".google.firestore.v1beta1.Document\"P\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x98\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty\"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse\"H\x82\xd3\xe4\x93\x02\x42\"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xcb\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse\"P\x82\xd3\xe4\x93\x02J\"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\xa3\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse\"F\x82\xd3\xe4\x93\x02@\";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x97\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty\"H\x82\xd3\xe4\x93\x02\x42\"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse\"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01\";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE\"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse\"E\x82\xd3\xe4\x93\x02?\":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse\"F\x82\xd3\xe4\x93\x02@\";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x9f\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse\"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01\"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN\"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*B\xbc\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/firestore.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xb8\x01\n\x12GetDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xa2\x02\n\x14ListDocumentsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xbf\x01\n\x15\x43reateDocumentRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x34\n\x08\x64ocument\x18\x04 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask"\x82\x02\n\x15UpdateDocumentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"g\n\x15\x44\x65leteDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"\x9e\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result"j\n\x17\x42\x65ginTransactionRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c"g\n\rCommitRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x0fRollbackRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c"\x9f\x02\n\x0fRunQueryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05"\xed\x01\n\x0cWriteRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xf7\x01\n\rListenRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xc8\x13\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a".google.firestore.v1beta1.Document"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a".google.firestore.v1beta1.Document"W\x82\xd3\xe4\x93\x02Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xb7\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a".google.firestore.v1beta1.Document"P\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x98\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xcb\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse"P\x82\xd3\xe4\x93\x02J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\xa3\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x97\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse"E\x82\xd3\xe4\x93\x02?":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x9f\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*B\xbc\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + ], +) _TARGETCHANGE_TARGETCHANGETYPE = _descriptor.EnumDescriptor( - name='TargetChangeType', - full_name='google.firestore.v1beta1.TargetChange.TargetChangeType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='NO_CHANGE', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ADD', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='REMOVE', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='CURRENT', index=3, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='RESET', index=4, number=4, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=4614, - serialized_end=4692, + name="TargetChangeType", + full_name="google.firestore.v1beta1.TargetChange.TargetChangeType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="NO_CHANGE", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ADD", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="REMOVE", index=2, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CURRENT", index=3, number=3, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="RESET", index=4, number=4, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=4614, + serialized_end=4692, ) _sym_db.RegisterEnumDescriptor(_TARGETCHANGE_TARGETCHANGETYPE) _GETDOCUMENTREQUEST = _descriptor.Descriptor( - name='GetDocumentRequest', - full_name='google.firestore.v1beta1.GetDocumentRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.v1beta1.GetDocumentRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='mask', full_name='google.firestore.v1beta1.GetDocumentRequest.mask', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.GetDocumentRequest.transaction', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.GetDocumentRequest.read_time', index=3, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_selector', full_name='google.firestore.v1beta1.GetDocumentRequest.consistency_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=404, - serialized_end=588, + name="GetDocumentRequest", + full_name="google.firestore.v1beta1.GetDocumentRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.v1beta1.GetDocumentRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mask", + full_name="google.firestore.v1beta1.GetDocumentRequest.mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.GetDocumentRequest.transaction", + index=2, + number=3, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.GetDocumentRequest.read_time", + index=3, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="consistency_selector", + full_name="google.firestore.v1beta1.GetDocumentRequest.consistency_selector", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=404, + serialized_end=588, ) _LISTDOCUMENTSREQUEST = _descriptor.Descriptor( - name='ListDocumentsRequest', - full_name='google.firestore.v1beta1.ListDocumentsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.v1beta1.ListDocumentsRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='collection_id', full_name='google.firestore.v1beta1.ListDocumentsRequest.collection_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.firestore.v1beta1.ListDocumentsRequest.page_size', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.firestore.v1beta1.ListDocumentsRequest.page_token', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='order_by', full_name='google.firestore.v1beta1.ListDocumentsRequest.order_by', index=4, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='mask', full_name='google.firestore.v1beta1.ListDocumentsRequest.mask', index=5, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.ListDocumentsRequest.transaction', index=6, - number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.ListDocumentsRequest.read_time', index=7, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='show_missing', full_name='google.firestore.v1beta1.ListDocumentsRequest.show_missing', index=8, - number=12, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_selector', full_name='google.firestore.v1beta1.ListDocumentsRequest.consistency_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=591, - serialized_end=881, + name="ListDocumentsRequest", + full_name="google.firestore.v1beta1.ListDocumentsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.v1beta1.ListDocumentsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="collection_id", + full_name="google.firestore.v1beta1.ListDocumentsRequest.collection_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.firestore.v1beta1.ListDocumentsRequest.page_size", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.firestore.v1beta1.ListDocumentsRequest.page_token", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="google.firestore.v1beta1.ListDocumentsRequest.order_by", + index=4, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mask", + full_name="google.firestore.v1beta1.ListDocumentsRequest.mask", + index=5, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.ListDocumentsRequest.transaction", + index=6, + number=8, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.ListDocumentsRequest.read_time", + index=7, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="show_missing", + full_name="google.firestore.v1beta1.ListDocumentsRequest.show_missing", + index=8, + number=12, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="consistency_selector", + full_name="google.firestore.v1beta1.ListDocumentsRequest.consistency_selector", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=591, + serialized_end=881, ) _LISTDOCUMENTSRESPONSE = _descriptor.Descriptor( - name='ListDocumentsResponse', - full_name='google.firestore.v1beta1.ListDocumentsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='documents', full_name='google.firestore.v1beta1.ListDocumentsResponse.documents', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.firestore.v1beta1.ListDocumentsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=883, - serialized_end=986, + name="ListDocumentsResponse", + full_name="google.firestore.v1beta1.ListDocumentsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="documents", + full_name="google.firestore.v1beta1.ListDocumentsResponse.documents", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.firestore.v1beta1.ListDocumentsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=883, + serialized_end=986, ) _CREATEDOCUMENTREQUEST = _descriptor.Descriptor( - name='CreateDocumentRequest', - full_name='google.firestore.v1beta1.CreateDocumentRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.v1beta1.CreateDocumentRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='collection_id', full_name='google.firestore.v1beta1.CreateDocumentRequest.collection_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document_id', full_name='google.firestore.v1beta1.CreateDocumentRequest.document_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.CreateDocumentRequest.document', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='mask', full_name='google.firestore.v1beta1.CreateDocumentRequest.mask', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=989, - serialized_end=1180, + name="CreateDocumentRequest", + full_name="google.firestore.v1beta1.CreateDocumentRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.v1beta1.CreateDocumentRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="collection_id", + full_name="google.firestore.v1beta1.CreateDocumentRequest.collection_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document_id", + full_name="google.firestore.v1beta1.CreateDocumentRequest.document_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.CreateDocumentRequest.document", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mask", + full_name="google.firestore.v1beta1.CreateDocumentRequest.mask", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=989, + serialized_end=1180, ) _UPDATEDOCUMENTREQUEST = _descriptor.Descriptor( - name='UpdateDocumentRequest', - full_name='google.firestore.v1beta1.UpdateDocumentRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.UpdateDocumentRequest.document', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.firestore.v1beta1.UpdateDocumentRequest.update_mask', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='mask', full_name='google.firestore.v1beta1.UpdateDocumentRequest.mask', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='current_document', full_name='google.firestore.v1beta1.UpdateDocumentRequest.current_document', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1183, - serialized_end=1441, + name="UpdateDocumentRequest", + full_name="google.firestore.v1beta1.UpdateDocumentRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.UpdateDocumentRequest.document", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.firestore.v1beta1.UpdateDocumentRequest.update_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mask", + full_name="google.firestore.v1beta1.UpdateDocumentRequest.mask", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="current_document", + full_name="google.firestore.v1beta1.UpdateDocumentRequest.current_document", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1183, + serialized_end=1441, ) _DELETEDOCUMENTREQUEST = _descriptor.Descriptor( - name='DeleteDocumentRequest', - full_name='google.firestore.v1beta1.DeleteDocumentRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.firestore.v1beta1.DeleteDocumentRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='current_document', full_name='google.firestore.v1beta1.DeleteDocumentRequest.current_document', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1443, - serialized_end=1546, + name="DeleteDocumentRequest", + full_name="google.firestore.v1beta1.DeleteDocumentRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.firestore.v1beta1.DeleteDocumentRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="current_document", + full_name="google.firestore.v1beta1.DeleteDocumentRequest.current_document", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1443, + serialized_end=1546, ) _BATCHGETDOCUMENTSREQUEST = _descriptor.Descriptor( - name='BatchGetDocumentsRequest', - full_name='google.firestore.v1beta1.BatchGetDocumentsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='documents', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.documents', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='mask', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.mask', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.transaction', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='new_transaction', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.read_time', index=5, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='consistency_selector', full_name='google.firestore.v1beta1.BatchGetDocumentsRequest.consistency_selector', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1549, - serialized_end=1835, + name="BatchGetDocumentsRequest", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="documents", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.documents", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mask", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.mask", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.transaction", + index=3, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="new_transaction", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.read_time", + index=5, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="consistency_selector", + full_name="google.firestore.v1beta1.BatchGetDocumentsRequest.consistency_selector", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1549, + serialized_end=1835, ) _BATCHGETDOCUMENTSRESPONSE = _descriptor.Descriptor( - name='BatchGetDocumentsResponse', - full_name='google.firestore.v1beta1.BatchGetDocumentsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='found', full_name='google.firestore.v1beta1.BatchGetDocumentsResponse.found', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='missing', full_name='google.firestore.v1beta1.BatchGetDocumentsResponse.missing', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.BatchGetDocumentsResponse.transaction', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.BatchGetDocumentsResponse.read_time', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='result', full_name='google.firestore.v1beta1.BatchGetDocumentsResponse.result', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1838, - serialized_end=2015, + name="BatchGetDocumentsResponse", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="found", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.found", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="missing", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.missing", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.transaction", + index=2, + number=3, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.read_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="result", + full_name="google.firestore.v1beta1.BatchGetDocumentsResponse.result", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1838, + serialized_end=2015, ) _BEGINTRANSACTIONREQUEST = _descriptor.Descriptor( - name='BeginTransactionRequest', - full_name='google.firestore.v1beta1.BeginTransactionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.BeginTransactionRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='options', full_name='google.firestore.v1beta1.BeginTransactionRequest.options', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2017, - serialized_end=2123, + name="BeginTransactionRequest", + full_name="google.firestore.v1beta1.BeginTransactionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.BeginTransactionRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="options", + full_name="google.firestore.v1beta1.BeginTransactionRequest.options", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2017, + serialized_end=2123, ) _BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor( - name='BeginTransactionResponse', - full_name='google.firestore.v1beta1.BeginTransactionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.BeginTransactionResponse.transaction', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2125, - serialized_end=2172, + name="BeginTransactionResponse", + full_name="google.firestore.v1beta1.BeginTransactionResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.BeginTransactionResponse.transaction", + index=0, + number=1, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2125, + serialized_end=2172, ) _COMMITREQUEST = _descriptor.Descriptor( - name='CommitRequest', - full_name='google.firestore.v1beta1.CommitRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.CommitRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='writes', full_name='google.firestore.v1beta1.CommitRequest.writes', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.CommitRequest.transaction', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2174, - serialized_end=2277, + name="CommitRequest", + full_name="google.firestore.v1beta1.CommitRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.CommitRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="writes", + full_name="google.firestore.v1beta1.CommitRequest.writes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.CommitRequest.transaction", + index=2, + number=3, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2174, + serialized_end=2277, ) _COMMITRESPONSE = _descriptor.Descriptor( - name='CommitResponse', - full_name='google.firestore.v1beta1.CommitResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='write_results', full_name='google.firestore.v1beta1.CommitResponse.write_results', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='commit_time', full_name='google.firestore.v1beta1.CommitResponse.commit_time', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2279, - serialized_end=2406, + name="CommitResponse", + full_name="google.firestore.v1beta1.CommitResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="write_results", + full_name="google.firestore.v1beta1.CommitResponse.write_results", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="commit_time", + full_name="google.firestore.v1beta1.CommitResponse.commit_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2279, + serialized_end=2406, ) _ROLLBACKREQUEST = _descriptor.Descriptor( - name='RollbackRequest', - full_name='google.firestore.v1beta1.RollbackRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.RollbackRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.RollbackRequest.transaction', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2408, - serialized_end=2464, + name="RollbackRequest", + full_name="google.firestore.v1beta1.RollbackRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.RollbackRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.RollbackRequest.transaction", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2408, + serialized_end=2464, ) _RUNQUERYREQUEST = _descriptor.Descriptor( - name='RunQueryRequest', - full_name='google.firestore.v1beta1.RunQueryRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.v1beta1.RunQueryRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='structured_query', full_name='google.firestore.v1beta1.RunQueryRequest.structured_query', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.RunQueryRequest.transaction', index=2, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='new_transaction', full_name='google.firestore.v1beta1.RunQueryRequest.new_transaction', index=3, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.RunQueryRequest.read_time', index=4, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='query_type', full_name='google.firestore.v1beta1.RunQueryRequest.query_type', - index=0, containing_type=None, fields=[]), - _descriptor.OneofDescriptor( - name='consistency_selector', full_name='google.firestore.v1beta1.RunQueryRequest.consistency_selector', - index=1, containing_type=None, fields=[]), - ], - serialized_start=2467, - serialized_end=2754, + name="RunQueryRequest", + full_name="google.firestore.v1beta1.RunQueryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.v1beta1.RunQueryRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="structured_query", + full_name="google.firestore.v1beta1.RunQueryRequest.structured_query", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.RunQueryRequest.transaction", + index=2, + number=5, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="new_transaction", + full_name="google.firestore.v1beta1.RunQueryRequest.new_transaction", + index=3, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.RunQueryRequest.read_time", + index=4, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="query_type", + full_name="google.firestore.v1beta1.RunQueryRequest.query_type", + index=0, + containing_type=None, + fields=[], + ), + _descriptor.OneofDescriptor( + name="consistency_selector", + full_name="google.firestore.v1beta1.RunQueryRequest.consistency_selector", + index=1, + containing_type=None, + fields=[], + ), + ], + serialized_start=2467, + serialized_end=2754, ) _RUNQUERYRESPONSE = _descriptor.Descriptor( - name='RunQueryResponse', - full_name='google.firestore.v1beta1.RunQueryResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='transaction', full_name='google.firestore.v1beta1.RunQueryResponse.transaction', index=0, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.RunQueryResponse.document', index=1, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.RunQueryResponse.read_time', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='skipped_results', full_name='google.firestore.v1beta1.RunQueryResponse.skipped_results', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2757, - serialized_end=2922, + name="RunQueryResponse", + full_name="google.firestore.v1beta1.RunQueryResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="transaction", + full_name="google.firestore.v1beta1.RunQueryResponse.transaction", + index=0, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.RunQueryResponse.document", + index=1, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.RunQueryResponse.read_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="skipped_results", + full_name="google.firestore.v1beta1.RunQueryResponse.skipped_results", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2757, + serialized_end=2922, ) _WRITEREQUEST_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.firestore.v1beta1.WriteRequest.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.firestore.v1beta1.WriteRequest.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.firestore.v1beta1.WriteRequest.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3117, - serialized_end=3162, + name="LabelsEntry", + full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.firestore.v1beta1.WriteRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3117, + serialized_end=3162, ) _WRITEREQUEST = _descriptor.Descriptor( - name='WriteRequest', - full_name='google.firestore.v1beta1.WriteRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.WriteRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='stream_id', full_name='google.firestore.v1beta1.WriteRequest.stream_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='writes', full_name='google.firestore.v1beta1.WriteRequest.writes', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='stream_token', full_name='google.firestore.v1beta1.WriteRequest.stream_token', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.firestore.v1beta1.WriteRequest.labels', index=4, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_WRITEREQUEST_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2925, - serialized_end=3162, + name="WriteRequest", + full_name="google.firestore.v1beta1.WriteRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.WriteRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="stream_id", + full_name="google.firestore.v1beta1.WriteRequest.stream_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="writes", + full_name="google.firestore.v1beta1.WriteRequest.writes", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="stream_token", + full_name="google.firestore.v1beta1.WriteRequest.stream_token", + index=3, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.firestore.v1beta1.WriteRequest.labels", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_WRITEREQUEST_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2925, + serialized_end=3162, ) _WRITERESPONSE = _descriptor.Descriptor( - name='WriteResponse', - full_name='google.firestore.v1beta1.WriteResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='stream_id', full_name='google.firestore.v1beta1.WriteResponse.stream_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='stream_token', full_name='google.firestore.v1beta1.WriteResponse.stream_token', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='write_results', full_name='google.firestore.v1beta1.WriteResponse.write_results', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='commit_time', full_name='google.firestore.v1beta1.WriteResponse.commit_time', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3165, - serialized_end=3332, + name="WriteResponse", + full_name="google.firestore.v1beta1.WriteResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="stream_id", + full_name="google.firestore.v1beta1.WriteResponse.stream_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="stream_token", + full_name="google.firestore.v1beta1.WriteResponse.stream_token", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="write_results", + full_name="google.firestore.v1beta1.WriteResponse.write_results", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="commit_time", + full_name="google.firestore.v1beta1.WriteResponse.commit_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3165, + serialized_end=3332, ) _LISTENREQUEST_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.firestore.v1beta1.ListenRequest.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.firestore.v1beta1.ListenRequest.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.firestore.v1beta1.ListenRequest.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3117, - serialized_end=3162, + name="LabelsEntry", + full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.firestore.v1beta1.ListenRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3117, + serialized_end=3162, ) _LISTENREQUEST = _descriptor.Descriptor( - name='ListenRequest', - full_name='google.firestore.v1beta1.ListenRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='database', full_name='google.firestore.v1beta1.ListenRequest.database', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='add_target', full_name='google.firestore.v1beta1.ListenRequest.add_target', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='remove_target', full_name='google.firestore.v1beta1.ListenRequest.remove_target', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.firestore.v1beta1.ListenRequest.labels', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_LISTENREQUEST_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='target_change', full_name='google.firestore.v1beta1.ListenRequest.target_change', - index=0, containing_type=None, fields=[]), - ], - serialized_start=3335, - serialized_end=3582, + name="ListenRequest", + full_name="google.firestore.v1beta1.ListenRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="database", + full_name="google.firestore.v1beta1.ListenRequest.database", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="add_target", + full_name="google.firestore.v1beta1.ListenRequest.add_target", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="remove_target", + full_name="google.firestore.v1beta1.ListenRequest.remove_target", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.firestore.v1beta1.ListenRequest.labels", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_LISTENREQUEST_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="target_change", + full_name="google.firestore.v1beta1.ListenRequest.target_change", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3335, + serialized_end=3582, ) _LISTENRESPONSE = _descriptor.Descriptor( - name='ListenResponse', - full_name='google.firestore.v1beta1.ListenResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='target_change', full_name='google.firestore.v1beta1.ListenResponse.target_change', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document_change', full_name='google.firestore.v1beta1.ListenResponse.document_change', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document_delete', full_name='google.firestore.v1beta1.ListenResponse.document_delete', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='document_remove', full_name='google.firestore.v1beta1.ListenResponse.document_remove', index=3, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filter', full_name='google.firestore.v1beta1.ListenResponse.filter', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='response_type', full_name='google.firestore.v1beta1.ListenResponse.response_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=3585, - serialized_end=3951, + name="ListenResponse", + full_name="google.firestore.v1beta1.ListenResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="target_change", + full_name="google.firestore.v1beta1.ListenResponse.target_change", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document_change", + full_name="google.firestore.v1beta1.ListenResponse.document_change", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document_delete", + full_name="google.firestore.v1beta1.ListenResponse.document_delete", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document_remove", + full_name="google.firestore.v1beta1.ListenResponse.document_remove", + index=3, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.firestore.v1beta1.ListenResponse.filter", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="response_type", + full_name="google.firestore.v1beta1.ListenResponse.response_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3585, + serialized_end=3951, ) _TARGET_DOCUMENTSTARGET = _descriptor.Descriptor( - name='DocumentsTarget', - full_name='google.firestore.v1beta1.Target.DocumentsTarget', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='documents', full_name='google.firestore.v1beta1.Target.DocumentsTarget.documents', index=0, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4204, - serialized_end=4240, + name="DocumentsTarget", + full_name="google.firestore.v1beta1.Target.DocumentsTarget", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="documents", + full_name="google.firestore.v1beta1.Target.DocumentsTarget.documents", + index=0, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4204, + serialized_end=4240, ) _TARGET_QUERYTARGET = _descriptor.Descriptor( - name='QueryTarget', - full_name='google.firestore.v1beta1.Target.QueryTarget', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.v1beta1.Target.QueryTarget.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='structured_query', full_name='google.firestore.v1beta1.Target.QueryTarget.structured_query', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='query_type', full_name='google.firestore.v1beta1.Target.QueryTarget.query_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=4242, - serialized_end=4356, + name="QueryTarget", + full_name="google.firestore.v1beta1.Target.QueryTarget", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.v1beta1.Target.QueryTarget.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="structured_query", + full_name="google.firestore.v1beta1.Target.QueryTarget.structured_query", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="query_type", + full_name="google.firestore.v1beta1.Target.QueryTarget.query_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=4242, + serialized_end=4356, ) _TARGET = _descriptor.Descriptor( - name='Target', - full_name='google.firestore.v1beta1.Target', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='query', full_name='google.firestore.v1beta1.Target.query', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='documents', full_name='google.firestore.v1beta1.Target.documents', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resume_token', full_name='google.firestore.v1beta1.Target.resume_token', index=2, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.Target.read_time', index=3, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='target_id', full_name='google.firestore.v1beta1.Target.target_id', index=4, - number=5, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='once', full_name='google.firestore.v1beta1.Target.once', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_TARGET_DOCUMENTSTARGET, _TARGET_QUERYTARGET, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='target_type', full_name='google.firestore.v1beta1.Target.target_type', - index=0, containing_type=None, fields=[]), - _descriptor.OneofDescriptor( - name='resume_type', full_name='google.firestore.v1beta1.Target.resume_type', - index=1, containing_type=None, fields=[]), - ], - serialized_start=3954, - serialized_end=4386, + name="Target", + full_name="google.firestore.v1beta1.Target", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="query", + full_name="google.firestore.v1beta1.Target.query", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="documents", + full_name="google.firestore.v1beta1.Target.documents", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resume_token", + full_name="google.firestore.v1beta1.Target.resume_token", + index=2, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.Target.read_time", + index=3, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_id", + full_name="google.firestore.v1beta1.Target.target_id", + index=4, + number=5, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="once", + full_name="google.firestore.v1beta1.Target.once", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_TARGET_DOCUMENTSTARGET, _TARGET_QUERYTARGET], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="target_type", + full_name="google.firestore.v1beta1.Target.target_type", + index=0, + containing_type=None, + fields=[], + ), + _descriptor.OneofDescriptor( + name="resume_type", + full_name="google.firestore.v1beta1.Target.resume_type", + index=1, + containing_type=None, + fields=[], + ), + ], + serialized_start=3954, + serialized_end=4386, ) _TARGETCHANGE = _descriptor.Descriptor( - name='TargetChange', - full_name='google.firestore.v1beta1.TargetChange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='target_change_type', full_name='google.firestore.v1beta1.TargetChange.target_change_type', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='target_ids', full_name='google.firestore.v1beta1.TargetChange.target_ids', index=1, - number=2, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='cause', full_name='google.firestore.v1beta1.TargetChange.cause', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resume_token', full_name='google.firestore.v1beta1.TargetChange.resume_token', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.TargetChange.read_time', index=4, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _TARGETCHANGE_TARGETCHANGETYPE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4389, - serialized_end=4692, + name="TargetChange", + full_name="google.firestore.v1beta1.TargetChange", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="target_change_type", + full_name="google.firestore.v1beta1.TargetChange.target_change_type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_ids", + full_name="google.firestore.v1beta1.TargetChange.target_ids", + index=1, + number=2, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cause", + full_name="google.firestore.v1beta1.TargetChange.cause", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resume_token", + full_name="google.firestore.v1beta1.TargetChange.resume_token", + index=3, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.TargetChange.read_time", + index=4, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_TARGETCHANGE_TARGETCHANGETYPE], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4389, + serialized_end=4692, ) _LISTCOLLECTIONIDSREQUEST = _descriptor.Descriptor( - name='ListCollectionIdsRequest', - full_name='google.firestore.v1beta1.ListCollectionIdsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='parent', full_name='google.firestore.v1beta1.ListCollectionIdsRequest.parent', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.firestore.v1beta1.ListCollectionIdsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.firestore.v1beta1.ListCollectionIdsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4694, - serialized_end=4775, + name="ListCollectionIdsRequest", + full_name="google.firestore.v1beta1.ListCollectionIdsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.firestore.v1beta1.ListCollectionIdsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.firestore.v1beta1.ListCollectionIdsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4694, + serialized_end=4775, ) _LISTCOLLECTIONIDSRESPONSE = _descriptor.Descriptor( - name='ListCollectionIdsResponse', - full_name='google.firestore.v1beta1.ListCollectionIdsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='collection_ids', full_name='google.firestore.v1beta1.ListCollectionIdsResponse.collection_ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.firestore.v1beta1.ListCollectionIdsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4777, - serialized_end=4853, -) - -_GETDOCUMENTREQUEST.fields_by_name['mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_GETDOCUMENTREQUEST.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_GETDOCUMENTREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _GETDOCUMENTREQUEST.fields_by_name['transaction']) -_GETDOCUMENTREQUEST.fields_by_name['transaction'].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name['consistency_selector'] -_GETDOCUMENTREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _GETDOCUMENTREQUEST.fields_by_name['read_time']) -_GETDOCUMENTREQUEST.fields_by_name['read_time'].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name['consistency_selector'] -_LISTDOCUMENTSREQUEST.fields_by_name['mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_LISTDOCUMENTSREQUEST.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _LISTDOCUMENTSREQUEST.fields_by_name['transaction']) -_LISTDOCUMENTSREQUEST.fields_by_name['transaction'].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'] -_LISTDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _LISTDOCUMENTSREQUEST.fields_by_name['read_time']) -_LISTDOCUMENTSREQUEST.fields_by_name['read_time'].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'] -_LISTDOCUMENTSRESPONSE.fields_by_name['documents'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_CREATEDOCUMENTREQUEST.fields_by_name['document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_CREATEDOCUMENTREQUEST.fields_by_name['mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_UPDATEDOCUMENTREQUEST.fields_by_name['document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_UPDATEDOCUMENTREQUEST.fields_by_name['update_mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_UPDATEDOCUMENTREQUEST.fields_by_name['mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_UPDATEDOCUMENTREQUEST.fields_by_name['current_document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_DELETEDOCUMENTREQUEST.fields_by_name['current_document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_BATCHGETDOCUMENTSREQUEST.fields_by_name['mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_BATCHGETDOCUMENTSREQUEST.fields_by_name['new_transaction'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS -_BATCHGETDOCUMENTSREQUEST.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _BATCHGETDOCUMENTSREQUEST.fields_by_name['transaction']) -_BATCHGETDOCUMENTSREQUEST.fields_by_name['transaction'].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'] -_BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _BATCHGETDOCUMENTSREQUEST.fields_by_name['new_transaction']) -_BATCHGETDOCUMENTSREQUEST.fields_by_name['new_transaction'].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'] -_BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _BATCHGETDOCUMENTSREQUEST.fields_by_name['read_time']) -_BATCHGETDOCUMENTSREQUEST.fields_by_name['read_time'].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name['consistency_selector'] -_BATCHGETDOCUMENTSRESPONSE.fields_by_name['found'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_BATCHGETDOCUMENTSRESPONSE.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name['result'].fields.append( - _BATCHGETDOCUMENTSRESPONSE.fields_by_name['found']) -_BATCHGETDOCUMENTSRESPONSE.fields_by_name['found'].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name['result'] -_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name['result'].fields.append( - _BATCHGETDOCUMENTSRESPONSE.fields_by_name['missing']) -_BATCHGETDOCUMENTSRESPONSE.fields_by_name['missing'].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name['result'] -_BEGINTRANSACTIONREQUEST.fields_by_name['options'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS -_COMMITREQUEST.fields_by_name['writes'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE -_COMMITRESPONSE.fields_by_name['write_results'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT -_COMMITRESPONSE.fields_by_name['commit_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_RUNQUERYREQUEST.fields_by_name['structured_query'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY -_RUNQUERYREQUEST.fields_by_name['new_transaction'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS -_RUNQUERYREQUEST.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append( - _RUNQUERYREQUEST.fields_by_name['structured_query']) -_RUNQUERYREQUEST.fields_by_name['structured_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type'] -_RUNQUERYREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _RUNQUERYREQUEST.fields_by_name['transaction']) -_RUNQUERYREQUEST.fields_by_name['transaction'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['consistency_selector'] -_RUNQUERYREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _RUNQUERYREQUEST.fields_by_name['new_transaction']) -_RUNQUERYREQUEST.fields_by_name['new_transaction'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['consistency_selector'] -_RUNQUERYREQUEST.oneofs_by_name['consistency_selector'].fields.append( - _RUNQUERYREQUEST.fields_by_name['read_time']) -_RUNQUERYREQUEST.fields_by_name['read_time'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['consistency_selector'] -_RUNQUERYRESPONSE.fields_by_name['document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_RUNQUERYRESPONSE.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP + name="ListCollectionIdsResponse", + full_name="google.firestore.v1beta1.ListCollectionIdsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="collection_ids", + full_name="google.firestore.v1beta1.ListCollectionIdsResponse.collection_ids", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.firestore.v1beta1.ListCollectionIdsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4777, + serialized_end=4853, +) + +_GETDOCUMENTREQUEST.fields_by_name[ + "mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_GETDOCUMENTREQUEST.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _GETDOCUMENTREQUEST.fields_by_name["transaction"] +) +_GETDOCUMENTREQUEST.fields_by_name[ + "transaction" +].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"] +_GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _GETDOCUMENTREQUEST.fields_by_name["read_time"] +) +_GETDOCUMENTREQUEST.fields_by_name[ + "read_time" +].containing_oneof = _GETDOCUMENTREQUEST.oneofs_by_name["consistency_selector"] +_LISTDOCUMENTSREQUEST.fields_by_name[ + "mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_LISTDOCUMENTSREQUEST.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _LISTDOCUMENTSREQUEST.fields_by_name["transaction"] +) +_LISTDOCUMENTSREQUEST.fields_by_name[ + "transaction" +].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"] +_LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _LISTDOCUMENTSREQUEST.fields_by_name["read_time"] +) +_LISTDOCUMENTSREQUEST.fields_by_name[ + "read_time" +].containing_oneof = _LISTDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"] +_LISTDOCUMENTSRESPONSE.fields_by_name[ + "documents" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_CREATEDOCUMENTREQUEST.fields_by_name[ + "document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_CREATEDOCUMENTREQUEST.fields_by_name[ + "mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_UPDATEDOCUMENTREQUEST.fields_by_name[ + "document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_UPDATEDOCUMENTREQUEST.fields_by_name[ + "update_mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_UPDATEDOCUMENTREQUEST.fields_by_name[ + "mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_UPDATEDOCUMENTREQUEST.fields_by_name[ + "current_document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_DELETEDOCUMENTREQUEST.fields_by_name[ + "current_document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "new_transaction" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _BATCHGETDOCUMENTSREQUEST.fields_by_name["transaction"] +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "transaction" +].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"] +_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _BATCHGETDOCUMENTSREQUEST.fields_by_name["new_transaction"] +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "new_transaction" +].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"] +_BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _BATCHGETDOCUMENTSREQUEST.fields_by_name["read_time"] +) +_BATCHGETDOCUMENTSREQUEST.fields_by_name[ + "read_time" +].containing_oneof = _BATCHGETDOCUMENTSREQUEST.oneofs_by_name["consistency_selector"] +_BATCHGETDOCUMENTSRESPONSE.fields_by_name[ + "found" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_BATCHGETDOCUMENTSRESPONSE.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append( + _BATCHGETDOCUMENTSRESPONSE.fields_by_name["found"] +) +_BATCHGETDOCUMENTSRESPONSE.fields_by_name[ + "found" +].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"] +_BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"].fields.append( + _BATCHGETDOCUMENTSRESPONSE.fields_by_name["missing"] +) +_BATCHGETDOCUMENTSRESPONSE.fields_by_name[ + "missing" +].containing_oneof = _BATCHGETDOCUMENTSRESPONSE.oneofs_by_name["result"] +_BEGINTRANSACTIONREQUEST.fields_by_name[ + "options" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS +) +_COMMITREQUEST.fields_by_name[ + "writes" +].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE +_COMMITRESPONSE.fields_by_name[ + "write_results" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT +) +_COMMITRESPONSE.fields_by_name[ + "commit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RUNQUERYREQUEST.fields_by_name[ + "structured_query" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +) +_RUNQUERYREQUEST.fields_by_name[ + "new_transaction" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._TRANSACTIONOPTIONS +) +_RUNQUERYREQUEST.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RUNQUERYREQUEST.oneofs_by_name["query_type"].fields.append( + _RUNQUERYREQUEST.fields_by_name["structured_query"] +) +_RUNQUERYREQUEST.fields_by_name[ + "structured_query" +].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["query_type"] +_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _RUNQUERYREQUEST.fields_by_name["transaction"] +) +_RUNQUERYREQUEST.fields_by_name[ + "transaction" +].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"] +_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _RUNQUERYREQUEST.fields_by_name["new_transaction"] +) +_RUNQUERYREQUEST.fields_by_name[ + "new_transaction" +].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"] +_RUNQUERYREQUEST.oneofs_by_name["consistency_selector"].fields.append( + _RUNQUERYREQUEST.fields_by_name["read_time"] +) +_RUNQUERYREQUEST.fields_by_name[ + "read_time" +].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name["consistency_selector"] +_RUNQUERYRESPONSE.fields_by_name[ + "document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_RUNQUERYRESPONSE.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _WRITEREQUEST_LABELSENTRY.containing_type = _WRITEREQUEST -_WRITEREQUEST.fields_by_name['writes'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE -_WRITEREQUEST.fields_by_name['labels'].message_type = _WRITEREQUEST_LABELSENTRY -_WRITERESPONSE.fields_by_name['write_results'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT -_WRITERESPONSE.fields_by_name['commit_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_WRITEREQUEST.fields_by_name[ + "writes" +].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITE +_WRITEREQUEST.fields_by_name["labels"].message_type = _WRITEREQUEST_LABELSENTRY +_WRITERESPONSE.fields_by_name[ + "write_results" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._WRITERESULT +) +_WRITERESPONSE.fields_by_name[ + "commit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LISTENREQUEST_LABELSENTRY.containing_type = _LISTENREQUEST -_LISTENREQUEST.fields_by_name['add_target'].message_type = _TARGET -_LISTENREQUEST.fields_by_name['labels'].message_type = _LISTENREQUEST_LABELSENTRY -_LISTENREQUEST.oneofs_by_name['target_change'].fields.append( - _LISTENREQUEST.fields_by_name['add_target']) -_LISTENREQUEST.fields_by_name['add_target'].containing_oneof = _LISTENREQUEST.oneofs_by_name['target_change'] -_LISTENREQUEST.oneofs_by_name['target_change'].fields.append( - _LISTENREQUEST.fields_by_name['remove_target']) -_LISTENREQUEST.fields_by_name['remove_target'].containing_oneof = _LISTENREQUEST.oneofs_by_name['target_change'] -_LISTENRESPONSE.fields_by_name['target_change'].message_type = _TARGETCHANGE -_LISTENRESPONSE.fields_by_name['document_change'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTCHANGE -_LISTENRESPONSE.fields_by_name['document_delete'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTDELETE -_LISTENRESPONSE.fields_by_name['document_remove'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTREMOVE -_LISTENRESPONSE.fields_by_name['filter'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._EXISTENCEFILTER -_LISTENRESPONSE.oneofs_by_name['response_type'].fields.append( - _LISTENRESPONSE.fields_by_name['target_change']) -_LISTENRESPONSE.fields_by_name['target_change'].containing_oneof = _LISTENRESPONSE.oneofs_by_name['response_type'] -_LISTENRESPONSE.oneofs_by_name['response_type'].fields.append( - _LISTENRESPONSE.fields_by_name['document_change']) -_LISTENRESPONSE.fields_by_name['document_change'].containing_oneof = _LISTENRESPONSE.oneofs_by_name['response_type'] -_LISTENRESPONSE.oneofs_by_name['response_type'].fields.append( - _LISTENRESPONSE.fields_by_name['document_delete']) -_LISTENRESPONSE.fields_by_name['document_delete'].containing_oneof = _LISTENRESPONSE.oneofs_by_name['response_type'] -_LISTENRESPONSE.oneofs_by_name['response_type'].fields.append( - _LISTENRESPONSE.fields_by_name['document_remove']) -_LISTENRESPONSE.fields_by_name['document_remove'].containing_oneof = _LISTENRESPONSE.oneofs_by_name['response_type'] -_LISTENRESPONSE.oneofs_by_name['response_type'].fields.append( - _LISTENRESPONSE.fields_by_name['filter']) -_LISTENRESPONSE.fields_by_name['filter'].containing_oneof = _LISTENRESPONSE.oneofs_by_name['response_type'] +_LISTENREQUEST.fields_by_name["add_target"].message_type = _TARGET +_LISTENREQUEST.fields_by_name["labels"].message_type = _LISTENREQUEST_LABELSENTRY +_LISTENREQUEST.oneofs_by_name["target_change"].fields.append( + _LISTENREQUEST.fields_by_name["add_target"] +) +_LISTENREQUEST.fields_by_name[ + "add_target" +].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"] +_LISTENREQUEST.oneofs_by_name["target_change"].fields.append( + _LISTENREQUEST.fields_by_name["remove_target"] +) +_LISTENREQUEST.fields_by_name[ + "remove_target" +].containing_oneof = _LISTENREQUEST.oneofs_by_name["target_change"] +_LISTENRESPONSE.fields_by_name["target_change"].message_type = _TARGETCHANGE +_LISTENRESPONSE.fields_by_name[ + "document_change" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTCHANGE +) +_LISTENRESPONSE.fields_by_name[ + "document_delete" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTDELETE +) +_LISTENRESPONSE.fields_by_name[ + "document_remove" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._DOCUMENTREMOVE +) +_LISTENRESPONSE.fields_by_name[ + "filter" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_write__pb2._EXISTENCEFILTER +) +_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append( + _LISTENRESPONSE.fields_by_name["target_change"] +) +_LISTENRESPONSE.fields_by_name[ + "target_change" +].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"] +_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append( + _LISTENRESPONSE.fields_by_name["document_change"] +) +_LISTENRESPONSE.fields_by_name[ + "document_change" +].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"] +_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append( + _LISTENRESPONSE.fields_by_name["document_delete"] +) +_LISTENRESPONSE.fields_by_name[ + "document_delete" +].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"] +_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append( + _LISTENRESPONSE.fields_by_name["document_remove"] +) +_LISTENRESPONSE.fields_by_name[ + "document_remove" +].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"] +_LISTENRESPONSE.oneofs_by_name["response_type"].fields.append( + _LISTENRESPONSE.fields_by_name["filter"] +) +_LISTENRESPONSE.fields_by_name[ + "filter" +].containing_oneof = _LISTENRESPONSE.oneofs_by_name["response_type"] _TARGET_DOCUMENTSTARGET.containing_type = _TARGET -_TARGET_QUERYTARGET.fields_by_name['structured_query'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +_TARGET_QUERYTARGET.fields_by_name[ + "structured_query" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +) _TARGET_QUERYTARGET.containing_type = _TARGET -_TARGET_QUERYTARGET.oneofs_by_name['query_type'].fields.append( - _TARGET_QUERYTARGET.fields_by_name['structured_query']) -_TARGET_QUERYTARGET.fields_by_name['structured_query'].containing_oneof = _TARGET_QUERYTARGET.oneofs_by_name['query_type'] -_TARGET.fields_by_name['query'].message_type = _TARGET_QUERYTARGET -_TARGET.fields_by_name['documents'].message_type = _TARGET_DOCUMENTSTARGET -_TARGET.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TARGET.oneofs_by_name['target_type'].fields.append( - _TARGET.fields_by_name['query']) -_TARGET.fields_by_name['query'].containing_oneof = _TARGET.oneofs_by_name['target_type'] -_TARGET.oneofs_by_name['target_type'].fields.append( - _TARGET.fields_by_name['documents']) -_TARGET.fields_by_name['documents'].containing_oneof = _TARGET.oneofs_by_name['target_type'] -_TARGET.oneofs_by_name['resume_type'].fields.append( - _TARGET.fields_by_name['resume_token']) -_TARGET.fields_by_name['resume_token'].containing_oneof = _TARGET.oneofs_by_name['resume_type'] -_TARGET.oneofs_by_name['resume_type'].fields.append( - _TARGET.fields_by_name['read_time']) -_TARGET.fields_by_name['read_time'].containing_oneof = _TARGET.oneofs_by_name['resume_type'] -_TARGETCHANGE.fields_by_name['target_change_type'].enum_type = _TARGETCHANGE_TARGETCHANGETYPE -_TARGETCHANGE.fields_by_name['cause'].message_type = google_dot_rpc_dot_status__pb2._STATUS -_TARGETCHANGE.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TARGET_QUERYTARGET.oneofs_by_name["query_type"].fields.append( + _TARGET_QUERYTARGET.fields_by_name["structured_query"] +) +_TARGET_QUERYTARGET.fields_by_name[ + "structured_query" +].containing_oneof = _TARGET_QUERYTARGET.oneofs_by_name["query_type"] +_TARGET.fields_by_name["query"].message_type = _TARGET_QUERYTARGET +_TARGET.fields_by_name["documents"].message_type = _TARGET_DOCUMENTSTARGET +_TARGET.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["query"]) +_TARGET.fields_by_name["query"].containing_oneof = _TARGET.oneofs_by_name["target_type"] +_TARGET.oneofs_by_name["target_type"].fields.append(_TARGET.fields_by_name["documents"]) +_TARGET.fields_by_name["documents"].containing_oneof = _TARGET.oneofs_by_name[ + "target_type" +] +_TARGET.oneofs_by_name["resume_type"].fields.append( + _TARGET.fields_by_name["resume_token"] +) +_TARGET.fields_by_name["resume_token"].containing_oneof = _TARGET.oneofs_by_name[ + "resume_type" +] +_TARGET.oneofs_by_name["resume_type"].fields.append(_TARGET.fields_by_name["read_time"]) +_TARGET.fields_by_name["read_time"].containing_oneof = _TARGET.oneofs_by_name[ + "resume_type" +] +_TARGETCHANGE.fields_by_name[ + "target_change_type" +].enum_type = _TARGETCHANGE_TARGETCHANGETYPE +_TARGETCHANGE.fields_by_name[ + "cause" +].message_type = google_dot_rpc_dot_status__pb2._STATUS +_TARGETCHANGE.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _TARGETCHANGE_TARGETCHANGETYPE.containing_type = _TARGETCHANGE -DESCRIPTOR.message_types_by_name['GetDocumentRequest'] = _GETDOCUMENTREQUEST -DESCRIPTOR.message_types_by_name['ListDocumentsRequest'] = _LISTDOCUMENTSREQUEST -DESCRIPTOR.message_types_by_name['ListDocumentsResponse'] = _LISTDOCUMENTSRESPONSE -DESCRIPTOR.message_types_by_name['CreateDocumentRequest'] = _CREATEDOCUMENTREQUEST -DESCRIPTOR.message_types_by_name['UpdateDocumentRequest'] = _UPDATEDOCUMENTREQUEST -DESCRIPTOR.message_types_by_name['DeleteDocumentRequest'] = _DELETEDOCUMENTREQUEST -DESCRIPTOR.message_types_by_name['BatchGetDocumentsRequest'] = _BATCHGETDOCUMENTSREQUEST -DESCRIPTOR.message_types_by_name['BatchGetDocumentsResponse'] = _BATCHGETDOCUMENTSRESPONSE -DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST -DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE -DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST -DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE -DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST -DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST -DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE -DESCRIPTOR.message_types_by_name['WriteRequest'] = _WRITEREQUEST -DESCRIPTOR.message_types_by_name['WriteResponse'] = _WRITERESPONSE -DESCRIPTOR.message_types_by_name['ListenRequest'] = _LISTENREQUEST -DESCRIPTOR.message_types_by_name['ListenResponse'] = _LISTENRESPONSE -DESCRIPTOR.message_types_by_name['Target'] = _TARGET -DESCRIPTOR.message_types_by_name['TargetChange'] = _TARGETCHANGE -DESCRIPTOR.message_types_by_name['ListCollectionIdsRequest'] = _LISTCOLLECTIONIDSREQUEST -DESCRIPTOR.message_types_by_name['ListCollectionIdsResponse'] = _LISTCOLLECTIONIDSRESPONSE +DESCRIPTOR.message_types_by_name["GetDocumentRequest"] = _GETDOCUMENTREQUEST +DESCRIPTOR.message_types_by_name["ListDocumentsRequest"] = _LISTDOCUMENTSREQUEST +DESCRIPTOR.message_types_by_name["ListDocumentsResponse"] = _LISTDOCUMENTSRESPONSE +DESCRIPTOR.message_types_by_name["CreateDocumentRequest"] = _CREATEDOCUMENTREQUEST +DESCRIPTOR.message_types_by_name["UpdateDocumentRequest"] = _UPDATEDOCUMENTREQUEST +DESCRIPTOR.message_types_by_name["DeleteDocumentRequest"] = _DELETEDOCUMENTREQUEST +DESCRIPTOR.message_types_by_name["BatchGetDocumentsRequest"] = _BATCHGETDOCUMENTSREQUEST +DESCRIPTOR.message_types_by_name[ + "BatchGetDocumentsResponse" +] = _BATCHGETDOCUMENTSRESPONSE +DESCRIPTOR.message_types_by_name["BeginTransactionRequest"] = _BEGINTRANSACTIONREQUEST +DESCRIPTOR.message_types_by_name["BeginTransactionResponse"] = _BEGINTRANSACTIONRESPONSE +DESCRIPTOR.message_types_by_name["CommitRequest"] = _COMMITREQUEST +DESCRIPTOR.message_types_by_name["CommitResponse"] = _COMMITRESPONSE +DESCRIPTOR.message_types_by_name["RollbackRequest"] = _ROLLBACKREQUEST +DESCRIPTOR.message_types_by_name["RunQueryRequest"] = _RUNQUERYREQUEST +DESCRIPTOR.message_types_by_name["RunQueryResponse"] = _RUNQUERYRESPONSE +DESCRIPTOR.message_types_by_name["WriteRequest"] = _WRITEREQUEST +DESCRIPTOR.message_types_by_name["WriteResponse"] = _WRITERESPONSE +DESCRIPTOR.message_types_by_name["ListenRequest"] = _LISTENREQUEST +DESCRIPTOR.message_types_by_name["ListenResponse"] = _LISTENRESPONSE +DESCRIPTOR.message_types_by_name["Target"] = _TARGET +DESCRIPTOR.message_types_by_name["TargetChange"] = _TARGETCHANGE +DESCRIPTOR.message_types_by_name["ListCollectionIdsRequest"] = _LISTCOLLECTIONIDSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListCollectionIdsResponse" +] = _LISTCOLLECTIONIDSRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -GetDocumentRequest = _reflection.GeneratedProtocolMessageType('GetDocumentRequest', (_message.Message,), dict( - DESCRIPTOR = _GETDOCUMENTREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +GetDocumentRequest = _reflection.GeneratedProtocolMessageType( + "GetDocumentRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETDOCUMENTREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. @@ -1592,15 +2831,18 @@ Reads the version of the document at the given time. This may not be older than 60 seconds. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.GetDocumentRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.GetDocumentRequest) + ), +) _sym_db.RegisterMessage(GetDocumentRequest) -ListDocumentsRequest = _reflection.GeneratedProtocolMessageType('ListDocumentsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTDOCUMENTSREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +ListDocumentsRequest = _reflection.GeneratedProtocolMessageType( + "ListDocumentsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTDOCUMENTSREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. @@ -1645,15 +2887,18 @@ .v1beta1.Document.update\_time] set. Requests with ``show_missing`` may not specify ``where`` or ``order_by``. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsRequest) + ), +) _sym_db.RegisterMessage(ListDocumentsRequest) -ListDocumentsResponse = _reflection.GeneratedProtocolMessageType('ListDocumentsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTDOCUMENTSRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +ListDocumentsResponse = _reflection.GeneratedProtocolMessageType( + "ListDocumentsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTDOCUMENTSRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. @@ -1663,15 +2908,18 @@ next_page_token: The next page token. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListDocumentsResponse) + ), +) _sym_db.RegisterMessage(ListDocumentsResponse) -CreateDocumentRequest = _reflection.GeneratedProtocolMessageType('CreateDocumentRequest', (_message.Message,), dict( - DESCRIPTOR = _CREATEDOCUMENTREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +CreateDocumentRequest = _reflection.GeneratedProtocolMessageType( + "CreateDocumentRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEDOCUMENTREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. @@ -1695,15 +2943,18 @@ document has a field that is not present in this mask, that field will not be returned in the response. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CreateDocumentRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CreateDocumentRequest) + ), +) _sym_db.RegisterMessage(CreateDocumentRequest) -UpdateDocumentRequest = _reflection.GeneratedProtocolMessageType('UpdateDocumentRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATEDOCUMENTREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +UpdateDocumentRequest = _reflection.GeneratedProtocolMessageType( + "UpdateDocumentRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEDOCUMENTREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. @@ -1726,15 +2977,18 @@ An optional precondition on the document. The request will fail if this is set and not met by the target document. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.UpdateDocumentRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.UpdateDocumentRequest) + ), +) _sym_db.RegisterMessage(UpdateDocumentRequest) -DeleteDocumentRequest = _reflection.GeneratedProtocolMessageType('DeleteDocumentRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETEDOCUMENTREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +DeleteDocumentRequest = _reflection.GeneratedProtocolMessageType( + "DeleteDocumentRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEDOCUMENTREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. @@ -1747,15 +3001,18 @@ An optional precondition on the document. The request will fail if this is set and not met by the target document. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DeleteDocumentRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DeleteDocumentRequest) + ), +) _sym_db.RegisterMessage(DeleteDocumentRequest) -BatchGetDocumentsRequest = _reflection.GeneratedProtocolMessageType('BatchGetDocumentsRequest', (_message.Message,), dict( - DESCRIPTOR = _BATCHGETDOCUMENTSREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +BatchGetDocumentsRequest = _reflection.GeneratedProtocolMessageType( + "BatchGetDocumentsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHGETDOCUMENTSREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. @@ -1786,15 +3043,18 @@ Reads documents as they were at the given time. This may not be older than 60 seconds. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsRequest) + ), +) _sym_db.RegisterMessage(BatchGetDocumentsRequest) -BatchGetDocumentsResponse = _reflection.GeneratedProtocolMessageType('BatchGetDocumentsResponse', (_message.Message,), dict( - DESCRIPTOR = _BATCHGETDOCUMENTSRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The streamed response for +BatchGetDocumentsResponse = _reflection.GeneratedProtocolMessageType( + "BatchGetDocumentsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHGETDOCUMENTSRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. @@ -1819,15 +3079,18 @@ the result stream are guaranteed not to have changed between their read\_time and this one. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BatchGetDocumentsResponse) + ), +) _sym_db.RegisterMessage(BatchGetDocumentsResponse) -BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +BeginTransactionRequest = _reflection.GeneratedProtocolMessageType( + "BeginTransactionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_BEGINTRANSACTIONREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. @@ -1839,15 +3102,18 @@ The options for the transaction. Defaults to a read-write transaction. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionRequest) + ), +) _sym_db.RegisterMessage(BeginTransactionRequest) -BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict( - DESCRIPTOR = _BEGINTRANSACTIONRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +BeginTransactionResponse = _reflection.GeneratedProtocolMessageType( + "BeginTransactionResponse", + (_message.Message,), + dict( + DESCRIPTOR=_BEGINTRANSACTIONRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. @@ -1855,15 +3121,18 @@ transaction: The transaction that was started. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.BeginTransactionResponse) + ), +) _sym_db.RegisterMessage(BeginTransactionResponse) -CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict( - DESCRIPTOR = _COMMITREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +CommitRequest = _reflection.GeneratedProtocolMessageType( + "CommitRequest", + (_message.Message,), + dict( + DESCRIPTOR=_COMMITREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. @@ -1877,15 +3146,18 @@ If set, applies all writes in this transaction, and commits it. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitRequest) + ), +) _sym_db.RegisterMessage(CommitRequest) -CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict( - DESCRIPTOR = _COMMITRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +CommitResponse = _reflection.GeneratedProtocolMessageType( + "CommitResponse", + (_message.Message,), + dict( + DESCRIPTOR=_COMMITRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. @@ -1896,15 +3168,18 @@ commit_time: The time at which the commit occurred. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.CommitResponse) + ), +) _sym_db.RegisterMessage(CommitResponse) -RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict( - DESCRIPTOR = _ROLLBACKREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +RollbackRequest = _reflection.GeneratedProtocolMessageType( + "RollbackRequest", + (_message.Message,), + dict( + DESCRIPTOR=_ROLLBACKREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. @@ -1915,15 +3190,18 @@ transaction: The transaction to roll back. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RollbackRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RollbackRequest) + ), +) _sym_db.RegisterMessage(RollbackRequest) -RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +RunQueryRequest = _reflection.GeneratedProtocolMessageType( + "RunQueryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_RUNQUERYREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. @@ -1953,15 +3231,18 @@ Reads documents as they were at the given time. This may not be older than 60 seconds. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryRequest) + ), +) _sym_db.RegisterMessage(RunQueryRequest) -RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict( - DESCRIPTOR = _RUNQUERYRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +RunQueryResponse = _reflection.GeneratedProtocolMessageType( + "RunQueryResponse", + (_message.Message,), + dict( + DESCRIPTOR=_RUNQUERYRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. @@ -1986,22 +3267,27 @@ The number of results that have been skipped due to an offset between the last response and the current response. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.RunQueryResponse) + ), +) _sym_db.RegisterMessage(RunQueryResponse) -WriteRequest = _reflection.GeneratedProtocolMessageType('WriteRequest', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _WRITEREQUEST_LABELSENTRY, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest.LabelsEntry) - )) - , - DESCRIPTOR = _WRITEREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +WriteRequest = _reflection.GeneratedProtocolMessageType( + "WriteRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_WRITEREQUEST_LABELSENTRY, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2" + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_WRITEREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. The first request creates a stream, or resumes an existing one from a @@ -2044,16 +3330,19 @@ labels: Labels associated with this write request. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteRequest) + ), +) _sym_db.RegisterMessage(WriteRequest) _sym_db.RegisterMessage(WriteRequest.LabelsEntry) -WriteResponse = _reflection.GeneratedProtocolMessageType('WriteResponse', (_message.Message,), dict( - DESCRIPTOR = _WRITERESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +WriteResponse = _reflection.GeneratedProtocolMessageType( + "WriteResponse", + (_message.Message,), + dict( + DESCRIPTOR=_WRITERESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. @@ -2071,22 +3360,27 @@ commit_time: The time at which the commit occurred. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResponse) + ), +) _sym_db.RegisterMessage(WriteResponse) -ListenRequest = _reflection.GeneratedProtocolMessageType('ListenRequest', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _LISTENREQUEST_LABELSENTRY, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest.LabelsEntry) - )) - , - DESCRIPTOR = _LISTENREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """A request for +ListenRequest = _reflection.GeneratedProtocolMessageType( + "ListenRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_LISTENREQUEST_LABELSENTRY, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2" + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_LISTENREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""A request for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen] @@ -2103,16 +3397,19 @@ labels: Labels associated with this target change. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenRequest) + ), +) _sym_db.RegisterMessage(ListenRequest) _sym_db.RegisterMessage(ListenRequest.LabelsEntry) -ListenResponse = _reflection.GeneratedProtocolMessageType('ListenResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTENRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response for +ListenResponse = _reflection.GeneratedProtocolMessageType( + "ListenResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTENRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. @@ -2136,17 +3433,22 @@ removed from the given target, but the exact documents are unknown. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListenResponse) + ), +) _sym_db.RegisterMessage(ListenResponse) -Target = _reflection.GeneratedProtocolMessageType('Target', (_message.Message,), dict( - - DocumentsTarget = _reflection.GeneratedProtocolMessageType('DocumentsTarget', (_message.Message,), dict( - DESCRIPTOR = _TARGET_DOCUMENTSTARGET, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """A target specified by a set of documents names. +Target = _reflection.GeneratedProtocolMessageType( + "Target", + (_message.Message,), + dict( + DocumentsTarget=_reflection.GeneratedProtocolMessageType( + "DocumentsTarget", + (_message.Message,), + dict( + DESCRIPTOR=_TARGET_DOCUMENTSTARGET, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""A target specified by a set of documents names. Attributes: @@ -2157,15 +3459,16 @@ child resource of the given ``database``. Duplicate names will be elided. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.DocumentsTarget) - )) - , - - QueryTarget = _reflection.GeneratedProtocolMessageType('QueryTarget', (_message.Message,), dict( - DESCRIPTOR = _TARGET_QUERYTARGET, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """A target specified by a query. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.DocumentsTarget) + ), + ), + QueryTarget=_reflection.GeneratedProtocolMessageType( + "QueryTarget", + (_message.Message,), + dict( + DESCRIPTOR=_TARGET_QUERYTARGET, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""A target specified by a query. Attributes: @@ -2182,13 +3485,12 @@ structured_query: A structured query. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.QueryTarget) - )) - , - DESCRIPTOR = _TARGET, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """A specification of a set of documents to listen to. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target.QueryTarget) + ), + ), + DESCRIPTOR=_TARGET, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""A specification of a set of documents to listen to. Attributes: @@ -2220,17 +3522,20 @@ If the target should be removed once it is current and consistent. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Target) + ), +) _sym_db.RegisterMessage(Target) _sym_db.RegisterMessage(Target.DocumentsTarget) _sym_db.RegisterMessage(Target.QueryTarget) -TargetChange = _reflection.GeneratedProtocolMessageType('TargetChange', (_message.Message,), dict( - DESCRIPTOR = _TARGETCHANGE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """Targets being watched have changed. +TargetChange = _reflection.GeneratedProtocolMessageType( + "TargetChange", + (_message.Message,), + dict( + DESCRIPTOR=_TARGETCHANGE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""Targets being watched have changed. Attributes: @@ -2261,15 +3566,18 @@ stream, ``read_time`` is guaranteed to be monotonically increasing. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TargetChange) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.TargetChange) + ), +) _sym_db.RegisterMessage(TargetChange) -ListCollectionIdsRequest = _reflection.GeneratedProtocolMessageType('ListCollectionIdsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTCOLLECTIONIDSREQUEST, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The request for +ListCollectionIdsRequest = _reflection.GeneratedProtocolMessageType( + "ListCollectionIdsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTCOLLECTIONIDSREQUEST, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. @@ -2285,15 +3593,18 @@ A page token. Must be a value from [ListCollectionIdsResponse] [google.firestore.v1beta1.ListCollectionIdsResponse]. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsRequest) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsRequest) + ), +) _sym_db.RegisterMessage(ListCollectionIdsRequest) -ListCollectionIdsResponse = _reflection.GeneratedProtocolMessageType('ListCollectionIdsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTCOLLECTIONIDSRESPONSE, - __module__ = 'google.cloud.firestore_v1beta1.proto.firestore_pb2' - , - __doc__ = """The response from +ListCollectionIdsResponse = _reflection.GeneratedProtocolMessageType( + "ListCollectionIdsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTCOLLECTIONIDSRESPONSE, + __module__="google.cloud.firestore_v1beta1.proto.firestore_pb2", + __doc__="""The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. @@ -2303,147 +3614,223 @@ next_page_token: A page token that may be used to continue the list. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsResponse) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ListCollectionIdsResponse) + ), +) _sym_db.RegisterMessage(ListCollectionIdsResponse) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\016FirestoreProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\016FirestoreProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" + ), +) _WRITEREQUEST_LABELSENTRY.has_options = True -_WRITEREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_WRITEREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _LISTENREQUEST_LABELSENTRY.has_options = True -_LISTENREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_LISTENREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _FIRESTORE = _descriptor.ServiceDescriptor( - name='Firestore', - full_name='google.firestore.v1beta1.Firestore', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=4856, - serialized_end=7360, - methods=[ - _descriptor.MethodDescriptor( - name='GetDocument', - full_name='google.firestore.v1beta1.Firestore.GetDocument', + name="Firestore", + full_name="google.firestore.v1beta1.Firestore", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_GETDOCUMENTREQUEST, - output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\0225/v1beta1/{name=projects/*/databases/*/documents/*/**}')), - ), - _descriptor.MethodDescriptor( - name='ListDocuments', - full_name='google.firestore.v1beta1.Firestore.ListDocuments', - index=1, - containing_service=None, - input_type=_LISTDOCUMENTSREQUEST, - output_type=_LISTDOCUMENTSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}')), - ), - _descriptor.MethodDescriptor( - name='CreateDocument', - full_name='google.firestore.v1beta1.Firestore.CreateDocument', - index=2, - containing_service=None, - input_type=_CREATEDOCUMENTREQUEST, - output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002Q\"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\010document')), - ), - _descriptor.MethodDescriptor( - name='UpdateDocument', - full_name='google.firestore.v1beta1.Firestore.UpdateDocument', - index=3, - containing_service=None, - input_type=_UPDATEDOCUMENTREQUEST, - output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\010document')), - ), - _descriptor.MethodDescriptor( - name='DeleteDocument', - full_name='google.firestore.v1beta1.Firestore.DeleteDocument', - index=4, - containing_service=None, - input_type=_DELETEDOCUMENTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027*5/v1beta1/{name=projects/*/databases/*/documents/*/**}')), - ), - _descriptor.MethodDescriptor( - name='BatchGetDocuments', - full_name='google.firestore.v1beta1.Firestore.BatchGetDocuments', - index=5, - containing_service=None, - input_type=_BATCHGETDOCUMENTSREQUEST, - output_type=_BATCHGETDOCUMENTSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002B\"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\001*')), - ), - _descriptor.MethodDescriptor( - name='BeginTransaction', - full_name='google.firestore.v1beta1.Firestore.BeginTransaction', - index=6, - containing_service=None, - input_type=_BEGINTRANSACTIONREQUEST, - output_type=_BEGINTRANSACTIONRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002J\"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\001*')), - ), - _descriptor.MethodDescriptor( - name='Commit', - full_name='google.firestore.v1beta1.Firestore.Commit', - index=7, - containing_service=None, - input_type=_COMMITREQUEST, - output_type=_COMMITRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002@\";/v1beta1/{database=projects/*/databases/*}/documents:commit:\001*')), - ), - _descriptor.MethodDescriptor( - name='Rollback', - full_name='google.firestore.v1beta1.Firestore.Rollback', - index=8, - containing_service=None, - input_type=_ROLLBACKREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002B\"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\001*')), - ), - _descriptor.MethodDescriptor( - name='RunQuery', - full_name='google.firestore.v1beta1.Firestore.RunQuery', - index=9, - containing_service=None, - input_type=_RUNQUERYREQUEST, - output_type=_RUNQUERYRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\207\001\";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\001*ZE\"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\001*')), - ), - _descriptor.MethodDescriptor( - name='Write', - full_name='google.firestore.v1beta1.Firestore.Write', - index=10, - containing_service=None, - input_type=_WRITEREQUEST, - output_type=_WRITERESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002?\":/v1beta1/{database=projects/*/databases/*}/documents:write:\001*')), - ), - _descriptor.MethodDescriptor( - name='Listen', - full_name='google.firestore.v1beta1.Firestore.Listen', - index=11, - containing_service=None, - input_type=_LISTENREQUEST, - output_type=_LISTENRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002@\";/v1beta1/{database=projects/*/databases/*}/documents:listen:\001*')), - ), - _descriptor.MethodDescriptor( - name='ListCollectionIds', - full_name='google.firestore.v1beta1.Firestore.ListCollectionIds', - index=12, - containing_service=None, - input_type=_LISTCOLLECTIONIDSREQUEST, - output_type=_LISTCOLLECTIONIDSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\231\001\"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\001*ZN\"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\001*')), - ), -]) + options=None, + serialized_start=4856, + serialized_end=7360, + methods=[ + _descriptor.MethodDescriptor( + name="GetDocument", + full_name="google.firestore.v1beta1.Firestore.GetDocument", + index=0, + containing_service=None, + input_type=_GETDOCUMENTREQUEST, + output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/databases/*/documents/*/**}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListDocuments", + full_name="google.firestore.v1beta1.Firestore.ListDocuments", + index=1, + containing_service=None, + input_type=_LISTDOCUMENTSREQUEST, + output_type=_LISTDOCUMENTSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="CreateDocument", + full_name="google.firestore.v1beta1.Firestore.CreateDocument", + index=2, + containing_service=None, + input_type=_CREATEDOCUMENTREQUEST, + output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\010document' + ), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateDocument", + full_name="google.firestore.v1beta1.Firestore.UpdateDocument", + index=3, + containing_service=None, + input_type=_UPDATEDOCUMENTREQUEST, + output_type=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\010document" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteDocument", + full_name="google.firestore.v1beta1.Firestore.DeleteDocument", + index=4, + containing_service=None, + input_type=_DELETEDOCUMENTREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\0027*5/v1beta1/{name=projects/*/databases/*/documents/*/**}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="BatchGetDocuments", + full_name="google.firestore.v1beta1.Firestore.BatchGetDocuments", + index=5, + containing_service=None, + input_type=_BATCHGETDOCUMENTSREQUEST, + output_type=_BATCHGETDOCUMENTSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="BeginTransaction", + full_name="google.firestore.v1beta1.Firestore.BeginTransaction", + index=6, + containing_service=None, + input_type=_BEGINTRANSACTIONREQUEST, + output_type=_BEGINTRANSACTIONRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Commit", + full_name="google.firestore.v1beta1.Firestore.Commit", + index=7, + containing_service=None, + input_type=_COMMITREQUEST, + output_type=_COMMITRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Rollback", + full_name="google.firestore.v1beta1.Firestore.Rollback", + index=8, + containing_service=None, + input_type=_ROLLBACKREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002B"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="RunQuery", + full_name="google.firestore.v1beta1.Firestore.RunQuery", + index=9, + containing_service=None, + input_type=_RUNQUERYREQUEST, + output_type=_RUNQUERYRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002\207\001";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\001*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Write", + full_name="google.firestore.v1beta1.Firestore.Write", + index=10, + containing_service=None, + input_type=_WRITEREQUEST, + output_type=_WRITERESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002?":/v1beta1/{database=projects/*/databases/*}/documents:write:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Listen", + full_name="google.firestore.v1beta1.Firestore.Listen", + index=11, + containing_service=None, + input_type=_LISTENREQUEST, + output_type=_LISTENRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListCollectionIds", + full_name="google.firestore.v1beta1.Firestore.ListCollectionIds", + index=12, + containing_service=None, + input_type=_LISTCOLLECTIONIDSREQUEST, + output_type=_LISTCOLLECTIONIDSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002\231\001"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\001*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\001*' + ), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_FIRESTORE) -DESCRIPTOR.services_by_name['Firestore'] = _FIRESTORE +DESCRIPTOR.services_by_name["Firestore"] = _FIRESTORE # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py index c14b471b9d11..e3bd63b73f35 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py @@ -1,13 +1,17 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 -from google.cloud.firestore_v1beta1.proto import firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2 +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class FirestoreStub(object): - """The Cloud Firestore service. + """The Cloud Firestore service. This service exposes several types of comparable timestamps: @@ -25,81 +29,81 @@ class FirestoreStub(object): to see the effects of the transaction. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.GetDocument = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/GetDocument', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, + self.GetDocument = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/GetDocument", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, ) - self.ListDocuments = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/ListDocuments', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.FromString, + self.ListDocuments = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/ListDocuments", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.FromString, ) - self.CreateDocument = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/CreateDocument', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, + self.CreateDocument = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/CreateDocument", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, ) - self.UpdateDocument = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/UpdateDocument', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, + self.UpdateDocument = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/UpdateDocument", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.FromString, ) - self.DeleteDocument = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/DeleteDocument', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteDocument = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/DeleteDocument", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.BatchGetDocuments = channel.unary_stream( - '/google.firestore.v1beta1.Firestore/BatchGetDocuments', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.FromString, + self.BatchGetDocuments = channel.unary_stream( + "/google.firestore.v1beta1.Firestore/BatchGetDocuments", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.FromString, ) - self.BeginTransaction = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/BeginTransaction', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.FromString, + self.BeginTransaction = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/BeginTransaction", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.FromString, ) - self.Commit = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/Commit', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.FromString, + self.Commit = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/Commit", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.FromString, ) - self.Rollback = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/Rollback', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.Rollback = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/Rollback", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.RunQuery = channel.unary_stream( - '/google.firestore.v1beta1.Firestore/RunQuery', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.FromString, + self.RunQuery = channel.unary_stream( + "/google.firestore.v1beta1.Firestore/RunQuery", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.FromString, ) - self.Write = channel.stream_stream( - '/google.firestore.v1beta1.Firestore/Write', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.FromString, + self.Write = channel.stream_stream( + "/google.firestore.v1beta1.Firestore/Write", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.FromString, ) - self.Listen = channel.stream_stream( - '/google.firestore.v1beta1.Firestore/Listen', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.FromString, + self.Listen = channel.stream_stream( + "/google.firestore.v1beta1.Firestore/Listen", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.FromString, ) - self.ListCollectionIds = channel.unary_unary( - '/google.firestore.v1beta1.Firestore/ListCollectionIds', - request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.FromString, + self.ListCollectionIds = channel.unary_unary( + "/google.firestore.v1beta1.Firestore/ListCollectionIds", + request_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.FromString, ) class FirestoreServicer(object): - """The Cloud Firestore service. + """The Cloud Firestore service. This service exposes several types of comparable timestamps: @@ -117,169 +121,170 @@ class FirestoreServicer(object): to see the effects of the transaction. """ - def GetDocument(self, request, context): - """Gets a single document. + def GetDocument(self, request, context): + """Gets a single document. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListDocuments(self, request, context): - """Lists documents. + def ListDocuments(self, request, context): + """Lists documents. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def CreateDocument(self, request, context): - """Creates a new document. + def CreateDocument(self, request, context): + """Creates a new document. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateDocument(self, request, context): - """Updates or inserts a document. + def UpdateDocument(self, request, context): + """Updates or inserts a document. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteDocument(self, request, context): - """Deletes a document. + def DeleteDocument(self, request, context): + """Deletes a document. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def BatchGetDocuments(self, request, context): - """Gets multiple documents. + def BatchGetDocuments(self, request, context): + """Gets multiple documents. Documents returned by this method are not guaranteed to be returned in the same order that they were requested. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def BeginTransaction(self, request, context): - """Starts a new transaction. + def BeginTransaction(self, request, context): + """Starts a new transaction. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Commit(self, request, context): - """Commits a transaction, while optionally updating documents. + def Commit(self, request, context): + """Commits a transaction, while optionally updating documents. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Rollback(self, request, context): - """Rolls back a transaction. + def Rollback(self, request, context): + """Rolls back a transaction. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def RunQuery(self, request, context): - """Runs a query. + def RunQuery(self, request, context): + """Runs a query. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Write(self, request_iterator, context): - """Streams batches of document updates and deletes, in order. + def Write(self, request_iterator, context): + """Streams batches of document updates and deletes, in order. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Listen(self, request_iterator, context): - """Listens to changes. + def Listen(self, request_iterator, context): + """Listens to changes. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListCollectionIds(self, request, context): - """Lists all the collection IDs underneath a document. + def ListCollectionIds(self, request, context): + """Lists all the collection IDs underneath a document. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_FirestoreServicer_to_server(servicer, server): - rpc_method_handlers = { - 'GetDocument': grpc.unary_unary_rpc_method_handler( - servicer.GetDocument, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, - ), - 'ListDocuments': grpc.unary_unary_rpc_method_handler( - servicer.ListDocuments, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.SerializeToString, - ), - 'CreateDocument': grpc.unary_unary_rpc_method_handler( - servicer.CreateDocument, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, - ), - 'UpdateDocument': grpc.unary_unary_rpc_method_handler( - servicer.UpdateDocument, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, - ), - 'DeleteDocument': grpc.unary_unary_rpc_method_handler( - servicer.DeleteDocument, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'BatchGetDocuments': grpc.unary_stream_rpc_method_handler( - servicer.BatchGetDocuments, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.SerializeToString, - ), - 'BeginTransaction': grpc.unary_unary_rpc_method_handler( - servicer.BeginTransaction, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.SerializeToString, - ), - 'Commit': grpc.unary_unary_rpc_method_handler( - servicer.Commit, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.SerializeToString, - ), - 'Rollback': grpc.unary_unary_rpc_method_handler( - servicer.Rollback, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'RunQuery': grpc.unary_stream_rpc_method_handler( - servicer.RunQuery, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.SerializeToString, - ), - 'Write': grpc.stream_stream_rpc_method_handler( - servicer.Write, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.SerializeToString, - ), - 'Listen': grpc.stream_stream_rpc_method_handler( - servicer.Listen, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.SerializeToString, - ), - 'ListCollectionIds': grpc.unary_unary_rpc_method_handler( - servicer.ListCollectionIds, - request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.FromString, - response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.firestore.v1beta1.Firestore', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "GetDocument": grpc.unary_unary_rpc_method_handler( + servicer.GetDocument, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.GetDocumentRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, + ), + "ListDocuments": grpc.unary_unary_rpc_method_handler( + servicer.ListDocuments, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListDocumentsResponse.SerializeToString, + ), + "CreateDocument": grpc.unary_unary_rpc_method_handler( + servicer.CreateDocument, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CreateDocumentRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, + ), + "UpdateDocument": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDocument, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.UpdateDocumentRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.Document.SerializeToString, + ), + "DeleteDocument": grpc.unary_unary_rpc_method_handler( + servicer.DeleteDocument, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DeleteDocumentRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "BatchGetDocuments": grpc.unary_stream_rpc_method_handler( + servicer.BatchGetDocuments, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BatchGetDocumentsResponse.SerializeToString, + ), + "BeginTransaction": grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.BeginTransactionResponse.SerializeToString, + ), + "Commit": grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.CommitResponse.SerializeToString, + ), + "Rollback": grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RollbackRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "RunQuery": grpc.unary_stream_rpc_method_handler( + servicer.RunQuery, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.RunQueryResponse.SerializeToString, + ), + "Write": grpc.stream_stream_rpc_method_handler( + servicer.Write, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.WriteResponse.SerializeToString, + ), + "Listen": grpc.stream_stream_rpc_method_handler( + servicer.Listen, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListenResponse.SerializeToString, + ), + "ListCollectionIds": grpc.unary_unary_rpc_method_handler( + servicer.ListCollectionIds, + request_deserializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsRequest.FromString, + response_serializer=google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.ListCollectionIdsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.firestore.v1beta1.Firestore", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py index 6f3c4468661a..74b0f834f21b 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py @@ -2,628 +2,947 @@ # source: google/cloud/firestore_v1beta1/proto/query.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/query.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value\"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00\"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/query.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, + google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, + ], +) _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='AND', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1161, - serialized_end=1206, + name="Operator", + full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATOR_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="AND", index=1, number=1, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=1161, + serialized_end=1206, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR) _STRUCTUREDQUERY_FIELDFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='LESS_THAN_OR_EQUAL', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN', index=3, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='GREATER_THAN_OR_EQUAL', index=4, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='EQUAL', index=5, number=5, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ARRAY_CONTAINS', index=6, number=7, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1422, - serialized_end=1573, + name="Operator", + full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATOR_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="LESS_THAN", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="LESS_THAN_OR_EQUAL", index=2, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="GREATER_THAN", index=3, number=3, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="GREATER_THAN_OR_EQUAL", index=4, number=4, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="EQUAL", index=5, number=5, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ARRAY_CONTAINS", index=6, number=7, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=1422, + serialized_end=1573, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_FIELDFILTER_OPERATOR) _STRUCTUREDQUERY_UNARYFILTER_OPERATOR = _descriptor.EnumDescriptor( - name='Operator', - full_name='google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='OPERATOR_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='IS_NAN', index=1, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='IS_NULL', index=2, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1742, - serialized_end=1803, + name="Operator", + full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATOR_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="IS_NAN", index=1, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="IS_NULL", index=2, number=3, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=1742, + serialized_end=1803, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR) _STRUCTUREDQUERY_DIRECTION = _descriptor.EnumDescriptor( - name='Direction', - full_name='google.firestore.v1beta1.StructuredQuery.Direction', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='DIRECTION_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ASCENDING', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DESCENDING', index=2, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2102, - serialized_end=2171, + name="Direction", + full_name="google.firestore.v1beta1.StructuredQuery.Direction", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="DIRECTION_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ASCENDING", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DESCENDING", index=2, number=2, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=2102, + serialized_end=2171, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_DIRECTION) _STRUCTUREDQUERY_COLLECTIONSELECTOR = _descriptor.Descriptor( - name='CollectionSelector', - full_name='google.firestore.v1beta1.StructuredQuery.CollectionSelector', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='collection_id', full_name='google.firestore.v1beta1.StructuredQuery.CollectionSelector.collection_id', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='all_descendants', full_name='google.firestore.v1beta1.StructuredQuery.CollectionSelector.all_descendants', index=1, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=653, - serialized_end=721, + name="CollectionSelector", + full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="collection_id", + full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.collection_id", + index=0, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="all_descendants", + full_name="google.firestore.v1beta1.StructuredQuery.CollectionSelector.all_descendants", + index=1, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=653, + serialized_end=721, ) _STRUCTUREDQUERY_FILTER = _descriptor.Descriptor( - name='Filter', - full_name='google.firestore.v1beta1.StructuredQuery.Filter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='composite_filter', full_name='google.firestore.v1beta1.StructuredQuery.Filter.composite_filter', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field_filter', full_name='google.firestore.v1beta1.StructuredQuery.Filter.field_filter', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='unary_filter', full_name='google.firestore.v1beta1.StructuredQuery.Filter.unary_filter', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='filter_type', full_name='google.firestore.v1beta1.StructuredQuery.Filter.filter_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=724, - serialized_end=992, + name="Filter", + full_name="google.firestore.v1beta1.StructuredQuery.Filter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="composite_filter", + full_name="google.firestore.v1beta1.StructuredQuery.Filter.composite_filter", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field_filter", + full_name="google.firestore.v1beta1.StructuredQuery.Filter.field_filter", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unary_filter", + full_name="google.firestore.v1beta1.StructuredQuery.Filter.unary_filter", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="filter_type", + full_name="google.firestore.v1beta1.StructuredQuery.Filter.filter_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=724, + serialized_end=992, ) _STRUCTUREDQUERY_COMPOSITEFILTER = _descriptor.Descriptor( - name='CompositeFilter', - full_name='google.firestore.v1beta1.StructuredQuery.CompositeFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='op', full_name='google.firestore.v1beta1.StructuredQuery.CompositeFilter.op', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='filters', full_name='google.firestore.v1beta1.StructuredQuery.CompositeFilter.filters', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=995, - serialized_end=1206, + name="CompositeFilter", + full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="op", + full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.op", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filters", + full_name="google.firestore.v1beta1.StructuredQuery.CompositeFilter.filters", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=995, + serialized_end=1206, ) _STRUCTUREDQUERY_FIELDFILTER = _descriptor.Descriptor( - name='FieldFilter', - full_name='google.firestore.v1beta1.StructuredQuery.FieldFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='google.firestore.v1beta1.StructuredQuery.FieldFilter.field', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='op', full_name='google.firestore.v1beta1.StructuredQuery.FieldFilter.op', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.firestore.v1beta1.StructuredQuery.FieldFilter.value', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _STRUCTUREDQUERY_FIELDFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1209, - serialized_end=1573, + name="FieldFilter", + full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field", + full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.field", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="op", + full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.op", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.firestore.v1beta1.StructuredQuery.FieldFilter.value", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_STRUCTUREDQUERY_FIELDFILTER_OPERATOR], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1209, + serialized_end=1573, ) _STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor( - name='UnaryFilter', - full_name='google.firestore.v1beta1.StructuredQuery.UnaryFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='op', full_name='google.firestore.v1beta1.StructuredQuery.UnaryFilter.op', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field', full_name='google.firestore.v1beta1.StructuredQuery.UnaryFilter.field', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _STRUCTUREDQUERY_UNARYFILTER_OPERATOR, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='operand_type', full_name='google.firestore.v1beta1.StructuredQuery.UnaryFilter.operand_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1576, - serialized_end=1819, + name="UnaryFilter", + full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="op", + full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.op", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field", + full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.field", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_STRUCTUREDQUERY_UNARYFILTER_OPERATOR], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="operand_type", + full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter.operand_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1576, + serialized_end=1819, ) _STRUCTUREDQUERY_ORDER = _descriptor.Descriptor( - name='Order', - full_name='google.firestore.v1beta1.StructuredQuery.Order', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='google.firestore.v1beta1.StructuredQuery.Order.field', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='direction', full_name='google.firestore.v1beta1.StructuredQuery.Order.direction', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1822, - serialized_end=1974, + name="Order", + full_name="google.firestore.v1beta1.StructuredQuery.Order", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field", + full_name="google.firestore.v1beta1.StructuredQuery.Order.field", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="direction", + full_name="google.firestore.v1beta1.StructuredQuery.Order.direction", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1822, + serialized_end=1974, ) _STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor( - name='FieldReference', - full_name='google.firestore.v1beta1.StructuredQuery.FieldReference', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field_path', full_name='google.firestore.v1beta1.StructuredQuery.FieldReference.field_path', index=0, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1976, - serialized_end=2012, + name="FieldReference", + full_name="google.firestore.v1beta1.StructuredQuery.FieldReference", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field_path", + full_name="google.firestore.v1beta1.StructuredQuery.FieldReference.field_path", + index=0, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1976, + serialized_end=2012, ) _STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( - name='Projection', - full_name='google.firestore.v1beta1.StructuredQuery.Projection', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='google.firestore.v1beta1.StructuredQuery.Projection.fields', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2014, - serialized_end=2100, + name="Projection", + full_name="google.firestore.v1beta1.StructuredQuery.Projection", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2014, + serialized_end=2100, ) _STRUCTUREDQUERY = _descriptor.Descriptor( - name='StructuredQuery', - full_name='google.firestore.v1beta1.StructuredQuery', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='select', full_name='google.firestore.v1beta1.StructuredQuery.select', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='from', full_name='google.firestore.v1beta1.StructuredQuery.from', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='where', full_name='google.firestore.v1beta1.StructuredQuery.where', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='order_by', full_name='google.firestore.v1beta1.StructuredQuery.order_by', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='start_at', full_name='google.firestore.v1beta1.StructuredQuery.start_at', index=4, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='end_at', full_name='google.firestore.v1beta1.StructuredQuery.end_at', index=5, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='offset', full_name='google.firestore.v1beta1.StructuredQuery.offset', index=6, - number=6, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='limit', full_name='google.firestore.v1beta1.StructuredQuery.limit', index=7, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_STRUCTUREDQUERY_COLLECTIONSELECTOR, _STRUCTUREDQUERY_FILTER, _STRUCTUREDQUERY_COMPOSITEFILTER, _STRUCTUREDQUERY_FIELDFILTER, _STRUCTUREDQUERY_UNARYFILTER, _STRUCTUREDQUERY_ORDER, _STRUCTUREDQUERY_FIELDREFERENCE, _STRUCTUREDQUERY_PROJECTION, ], - enum_types=[ - _STRUCTUREDQUERY_DIRECTION, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=194, - serialized_end=2171, + name="StructuredQuery", + full_name="google.firestore.v1beta1.StructuredQuery", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="select", + full_name="google.firestore.v1beta1.StructuredQuery.select", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="from", + full_name="google.firestore.v1beta1.StructuredQuery.from", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="where", + full_name="google.firestore.v1beta1.StructuredQuery.where", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="google.firestore.v1beta1.StructuredQuery.order_by", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_at", + full_name="google.firestore.v1beta1.StructuredQuery.start_at", + index=4, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_at", + full_name="google.firestore.v1beta1.StructuredQuery.end_at", + index=5, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="offset", + full_name="google.firestore.v1beta1.StructuredQuery.offset", + index=6, + number=6, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="limit", + full_name="google.firestore.v1beta1.StructuredQuery.limit", + index=7, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _STRUCTUREDQUERY_COLLECTIONSELECTOR, + _STRUCTUREDQUERY_FILTER, + _STRUCTUREDQUERY_COMPOSITEFILTER, + _STRUCTUREDQUERY_FIELDFILTER, + _STRUCTUREDQUERY_UNARYFILTER, + _STRUCTUREDQUERY_ORDER, + _STRUCTUREDQUERY_FIELDREFERENCE, + _STRUCTUREDQUERY_PROJECTION, + ], + enum_types=[_STRUCTUREDQUERY_DIRECTION], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=194, + serialized_end=2171, ) _CURSOR = _descriptor.Descriptor( - name='Cursor', - full_name='google.firestore.v1beta1.Cursor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='google.firestore.v1beta1.Cursor.values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='before', full_name='google.firestore.v1beta1.Cursor.before', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2173, - serialized_end=2246, + name="Cursor", + full_name="google.firestore.v1beta1.Cursor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="values", + full_name="google.firestore.v1beta1.Cursor.values", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="before", + full_name="google.firestore.v1beta1.Cursor.before", + index=1, + number=2, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2173, + serialized_end=2246, ) _STRUCTUREDQUERY_COLLECTIONSELECTOR.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_FILTER.fields_by_name['composite_filter'].message_type = _STRUCTUREDQUERY_COMPOSITEFILTER -_STRUCTUREDQUERY_FILTER.fields_by_name['field_filter'].message_type = _STRUCTUREDQUERY_FIELDFILTER -_STRUCTUREDQUERY_FILTER.fields_by_name['unary_filter'].message_type = _STRUCTUREDQUERY_UNARYFILTER +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "composite_filter" +].message_type = _STRUCTUREDQUERY_COMPOSITEFILTER +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "field_filter" +].message_type = _STRUCTUREDQUERY_FIELDFILTER +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "unary_filter" +].message_type = _STRUCTUREDQUERY_UNARYFILTER _STRUCTUREDQUERY_FILTER.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'].fields.append( - _STRUCTUREDQUERY_FILTER.fields_by_name['composite_filter']) -_STRUCTUREDQUERY_FILTER.fields_by_name['composite_filter'].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'] -_STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'].fields.append( - _STRUCTUREDQUERY_FILTER.fields_by_name['field_filter']) -_STRUCTUREDQUERY_FILTER.fields_by_name['field_filter'].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'] -_STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'].fields.append( - _STRUCTUREDQUERY_FILTER.fields_by_name['unary_filter']) -_STRUCTUREDQUERY_FILTER.fields_by_name['unary_filter'].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name['filter_type'] -_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name['op'].enum_type = _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR -_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name['filters'].message_type = _STRUCTUREDQUERY_FILTER +_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append( + _STRUCTUREDQUERY_FILTER.fields_by_name["composite_filter"] +) +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "composite_filter" +].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"] +_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append( + _STRUCTUREDQUERY_FILTER.fields_by_name["field_filter"] +) +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "field_filter" +].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"] +_STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"].fields.append( + _STRUCTUREDQUERY_FILTER.fields_by_name["unary_filter"] +) +_STRUCTUREDQUERY_FILTER.fields_by_name[ + "unary_filter" +].containing_oneof = _STRUCTUREDQUERY_FILTER.oneofs_by_name["filter_type"] +_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[ + "op" +].enum_type = _STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR +_STRUCTUREDQUERY_COMPOSITEFILTER.fields_by_name[ + "filters" +].message_type = _STRUCTUREDQUERY_FILTER _STRUCTUREDQUERY_COMPOSITEFILTER.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_COMPOSITEFILTER -_STRUCTUREDQUERY_FIELDFILTER.fields_by_name['field'].message_type = _STRUCTUREDQUERY_FIELDREFERENCE -_STRUCTUREDQUERY_FIELDFILTER.fields_by_name['op'].enum_type = _STRUCTUREDQUERY_FIELDFILTER_OPERATOR -_STRUCTUREDQUERY_FIELDFILTER.fields_by_name['value'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE +_STRUCTUREDQUERY_COMPOSITEFILTER_OPERATOR.containing_type = ( + _STRUCTUREDQUERY_COMPOSITEFILTER +) +_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[ + "field" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[ + "op" +].enum_type = _STRUCTUREDQUERY_FIELDFILTER_OPERATOR +_STRUCTUREDQUERY_FIELDFILTER.fields_by_name[ + "value" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE +) _STRUCTUREDQUERY_FIELDFILTER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_FIELDFILTER -_STRUCTUREDQUERY_UNARYFILTER.fields_by_name['op'].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR -_STRUCTUREDQUERY_UNARYFILTER.fields_by_name['field'].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[ + "op" +].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR +_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[ + "field" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE _STRUCTUREDQUERY_UNARYFILTER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_UNARYFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_UNARYFILTER -_STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name['operand_type'].fields.append( - _STRUCTUREDQUERY_UNARYFILTER.fields_by_name['field']) -_STRUCTUREDQUERY_UNARYFILTER.fields_by_name['field'].containing_oneof = _STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name['operand_type'] -_STRUCTUREDQUERY_ORDER.fields_by_name['field'].message_type = _STRUCTUREDQUERY_FIELDREFERENCE -_STRUCTUREDQUERY_ORDER.fields_by_name['direction'].enum_type = _STRUCTUREDQUERY_DIRECTION +_STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"].fields.append( + _STRUCTUREDQUERY_UNARYFILTER.fields_by_name["field"] +) +_STRUCTUREDQUERY_UNARYFILTER.fields_by_name[ + "field" +].containing_oneof = _STRUCTUREDQUERY_UNARYFILTER.oneofs_by_name["operand_type"] +_STRUCTUREDQUERY_ORDER.fields_by_name[ + "field" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_ORDER.fields_by_name[ + "direction" +].enum_type = _STRUCTUREDQUERY_DIRECTION _STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_PROJECTION.fields_by_name['fields'].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_PROJECTION.fields_by_name[ + "fields" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE _STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY.fields_by_name['select'].message_type = _STRUCTUREDQUERY_PROJECTION -_STRUCTUREDQUERY.fields_by_name['from'].message_type = _STRUCTUREDQUERY_COLLECTIONSELECTOR -_STRUCTUREDQUERY.fields_by_name['where'].message_type = _STRUCTUREDQUERY_FILTER -_STRUCTUREDQUERY.fields_by_name['order_by'].message_type = _STRUCTUREDQUERY_ORDER -_STRUCTUREDQUERY.fields_by_name['start_at'].message_type = _CURSOR -_STRUCTUREDQUERY.fields_by_name['end_at'].message_type = _CURSOR -_STRUCTUREDQUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE +_STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION +_STRUCTUREDQUERY.fields_by_name[ + "from" +].message_type = _STRUCTUREDQUERY_COLLECTIONSELECTOR +_STRUCTUREDQUERY.fields_by_name["where"].message_type = _STRUCTUREDQUERY_FILTER +_STRUCTUREDQUERY.fields_by_name["order_by"].message_type = _STRUCTUREDQUERY_ORDER +_STRUCTUREDQUERY.fields_by_name["start_at"].message_type = _CURSOR +_STRUCTUREDQUERY.fields_by_name["end_at"].message_type = _CURSOR +_STRUCTUREDQUERY.fields_by_name[ + "limit" +].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE _STRUCTUREDQUERY_DIRECTION.containing_type = _STRUCTUREDQUERY -_CURSOR.fields_by_name['values'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE -DESCRIPTOR.message_types_by_name['StructuredQuery'] = _STRUCTUREDQUERY -DESCRIPTOR.message_types_by_name['Cursor'] = _CURSOR +_CURSOR.fields_by_name[ + "values" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE +) +DESCRIPTOR.message_types_by_name["StructuredQuery"] = _STRUCTUREDQUERY +DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR _sym_db.RegisterFileDescriptor(DESCRIPTOR) -StructuredQuery = _reflection.GeneratedProtocolMessageType('StructuredQuery', (_message.Message,), dict( - - CollectionSelector = _reflection.GeneratedProtocolMessageType('CollectionSelector', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_COLLECTIONSELECTOR, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A selection of a collection, such as ``messages as m1``. +StructuredQuery = _reflection.GeneratedProtocolMessageType( + "StructuredQuery", + (_message.Message,), + dict( + CollectionSelector=_reflection.GeneratedProtocolMessageType( + "CollectionSelector", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_COLLECTIONSELECTOR, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A selection of a collection, such as ``messages as m1``. Attributes: @@ -636,15 +955,16 @@ ``RunQueryRequest``. When true, selects all descendant collections. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CollectionSelector) - )) - , - - Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_FILTER, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A filter. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CollectionSelector) + ), + ), + Filter=_reflection.GeneratedProtocolMessageType( + "Filter", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_FILTER, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A filter. Attributes: @@ -657,15 +977,16 @@ unary_filter: A filter that takes exactly one argument. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Filter) - )) - , - - CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_COMPOSITEFILTER, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A filter that merges multiple other filters using the given operator. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Filter) + ), + ), + CompositeFilter=_reflection.GeneratedProtocolMessageType( + "CompositeFilter", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_COMPOSITEFILTER, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A filter that merges multiple other filters using the given operator. Attributes: @@ -675,15 +996,16 @@ The list of filters to combine. Must contain at least one filter. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CompositeFilter) - )) - , - - FieldFilter = _reflection.GeneratedProtocolMessageType('FieldFilter', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_FIELDFILTER, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A filter on a specific field. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.CompositeFilter) + ), + ), + FieldFilter=_reflection.GeneratedProtocolMessageType( + "FieldFilter", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_FIELDFILTER, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A filter on a specific field. Attributes: @@ -694,15 +1016,16 @@ value: The value to compare to. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter) - )) - , - - UnaryFilter = _reflection.GeneratedProtocolMessageType('UnaryFilter', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_UNARYFILTER, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A filter with a single operand. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter) + ), + ), + UnaryFilter=_reflection.GeneratedProtocolMessageType( + "UnaryFilter", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_UNARYFILTER, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A filter with a single operand. Attributes: @@ -713,15 +1036,16 @@ field: The field to which to apply the operator. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.UnaryFilter) - )) - , - - Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_ORDER, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """An order on a field. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.UnaryFilter) + ), + ), + Order=_reflection.GeneratedProtocolMessageType( + "Order", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_ORDER, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""An order on a field. Attributes: @@ -730,25 +1054,27 @@ direction: The direction to order by. Defaults to ``ASCENDING``. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order) - )) - , - - FieldReference = _reflection.GeneratedProtocolMessageType('FieldReference', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_FIELDREFERENCE, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A reference to a field, such as ``max(messages.time) as max_time``. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order) + ), + ), + FieldReference=_reflection.GeneratedProtocolMessageType( + "FieldReference", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_FIELDREFERENCE, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A reference to a field, such as ``max(messages.time) as max_time``. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference) - )) - , - - Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( - DESCRIPTOR = _STRUCTUREDQUERY_PROJECTION, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """The projection of document's fields to return. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference) + ), + ), + Projection=_reflection.GeneratedProtocolMessageType( + "Projection", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""The projection of document's fields to return. Attributes: @@ -756,13 +1082,12 @@ The fields to return. If empty, all fields are returned. To only return the name of the document, use ``['__name__']``. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) - )) - , - DESCRIPTOR = _STRUCTUREDQUERY, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A Firestore query. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) + ), + ), + DESCRIPTOR=_STRUCTUREDQUERY, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A Firestore query. Attributes: @@ -798,8 +1123,9 @@ The maximum number of results to return. Applies after all other constraints. Must be >= 0 if specified. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery) + ), +) _sym_db.RegisterMessage(StructuredQuery) _sym_db.RegisterMessage(StructuredQuery.CollectionSelector) _sym_db.RegisterMessage(StructuredQuery.Filter) @@ -810,11 +1136,13 @@ _sym_db.RegisterMessage(StructuredQuery.FieldReference) _sym_db.RegisterMessage(StructuredQuery.Projection) -Cursor = _reflection.GeneratedProtocolMessageType('Cursor', (_message.Message,), dict( - DESCRIPTOR = _CURSOR, - __module__ = 'google.cloud.firestore_v1beta1.proto.query_pb2' - , - __doc__ = """A position in a query result set. +Cursor = _reflection.GeneratedProtocolMessageType( + "Cursor", + (_message.Message,), + dict( + DESCRIPTOR=_CURSOR, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A position in a query result set. Attributes: @@ -826,11 +1154,17 @@ If the position is just before or just after the given values, relative to the sort order defined by the query. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Cursor) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Cursor) + ), +) _sym_db.RegisterMessage(Cursor) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" + ), +) # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py index fb451d0031ef..bc025b0f3681 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py @@ -2,1311 +2,2189 @@ # source: test.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.cloud.firestore_v1beta1.proto import common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 -from google.cloud.firestore_v1beta1.proto import firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2 -from google.cloud.firestore_v1beta1.proto import query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2 +from google.cloud.firestore_v1beta1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2, +) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='test.proto', - package='tests', - syntax='proto3', - serialized_pb=_b('\n\ntest.proto\x12\x05tests\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\'\n\tTestSuite\x12\x1a\n\x05tests\x18\x01 \x03(\x0b\x32\x0b.tests.Test\"\xc8\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x1d\n\x03get\x18\x02 \x01(\x0b\x32\x0e.tests.GetTestH\x00\x12#\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x11.tests.CreateTestH\x00\x12\x1d\n\x03set\x18\x04 \x01(\x0b\x32\x0e.tests.SetTestH\x00\x12#\n\x06update\x18\x05 \x01(\x0b\x32\x11.tests.UpdateTestH\x00\x12.\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x16.tests.UpdatePathsTestH\x00\x12#\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x11.tests.DeleteTestH\x00\x12!\n\x05query\x18\x08 \x01(\x0b\x32\x10.tests.QueryTestH\x00\x12#\n\x06listen\x18\t \x01(\x0b\x32\x11.tests.ListenTestH\x00\x42\x06\n\x04test\"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest\"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\"\xa0\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12 \n\x06option\x18\x02 \x01(\x0b\x32\x10.tests.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xed\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12%\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x10.tests.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08\"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\":\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12 \n\x06\x66ields\x18\x02 \x03(\x0b\x32\x10.tests.FieldPath\"\x8a\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x1e\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\r.tests.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08\"\xa8\x02\n\x06\x43lause\x12\x1f\n\x06select\x18\x01 \x01(\x0b\x32\r.tests.SelectH\x00\x12\x1d\n\x05where\x18\x02 \x01(\x0b\x32\x0c.tests.WhereH\x00\x12\"\n\x08order_by\x18\x03 \x01(\x0b\x32\x0e.tests.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12!\n\x08start_at\x18\x06 \x01(\x0b\x32\r.tests.CursorH\x00\x12$\n\x0bstart_after\x18\x07 \x01(\x0b\x32\r.tests.CursorH\x00\x12\x1f\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\r.tests.CursorH\x00\x12#\n\nend_before\x18\t \x01(\x0b\x32\r.tests.CursorH\x00\x42\x08\n\x06\x63lause\"*\n\x06Select\x12 \n\x06\x66ields\x18\x01 \x03(\x0b\x32\x10.tests.FieldPath\"G\n\x05Where\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t\"<\n\x07OrderBy\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t\"G\n\x06\x43ursor\x12(\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x12.tests.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t\".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t\"\x7f\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12\"\n\tsnapshots\x18\x02 \x03(\x0b\x32\x0f.tests.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08\"\x8e\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\".google.firestore.v1beta1.Document\x12!\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x10.tests.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xcb\x01\n\tDocChange\x12#\n\x04kind\x18\x01 \x01(\x0e\x32\x15.tests.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05\"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02\"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3') - , - dependencies=[google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - + name="test.proto", + package="tests", + syntax="proto3", + serialized_pb=_b( + '\n\ntest.proto\x12\x05tests\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\'\n\tTestSuite\x12\x1a\n\x05tests\x18\x01 \x03(\x0b\x32\x0b.tests.Test"\xc8\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x1d\n\x03get\x18\x02 \x01(\x0b\x32\x0e.tests.GetTestH\x00\x12#\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x11.tests.CreateTestH\x00\x12\x1d\n\x03set\x18\x04 \x01(\x0b\x32\x0e.tests.SetTestH\x00\x12#\n\x06update\x18\x05 \x01(\x0b\x32\x11.tests.UpdateTestH\x00\x12.\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x16.tests.UpdatePathsTestH\x00\x12#\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x11.tests.DeleteTestH\x00\x12!\n\x05query\x18\x08 \x01(\x0b\x32\x10.tests.QueryTestH\x00\x12#\n\x06listen\x18\t \x01(\x0b\x32\x11.tests.ListenTestH\x00\x42\x06\n\x04test"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa0\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12 \n\x06option\x18\x02 \x01(\x0b\x32\x10.tests.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xed\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12%\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x10.tests.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08":\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12 \n\x06\x66ields\x18\x02 \x03(\x0b\x32\x10.tests.FieldPath"\x8a\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x1e\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\r.tests.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa8\x02\n\x06\x43lause\x12\x1f\n\x06select\x18\x01 \x01(\x0b\x32\r.tests.SelectH\x00\x12\x1d\n\x05where\x18\x02 \x01(\x0b\x32\x0c.tests.WhereH\x00\x12"\n\x08order_by\x18\x03 \x01(\x0b\x32\x0e.tests.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12!\n\x08start_at\x18\x06 \x01(\x0b\x32\r.tests.CursorH\x00\x12$\n\x0bstart_after\x18\x07 \x01(\x0b\x32\r.tests.CursorH\x00\x12\x1f\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\r.tests.CursorH\x00\x12#\n\nend_before\x18\t \x01(\x0b\x32\r.tests.CursorH\x00\x42\x08\n\x06\x63lause"*\n\x06Select\x12 \n\x06\x66ields\x18\x01 \x03(\x0b\x32\x10.tests.FieldPath"G\n\x05Where\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"<\n\x07OrderBy\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"G\n\x06\x43ursor\x12(\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x12.tests.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x7f\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12"\n\tsnapshots\x18\x02 \x03(\x0b\x32\x0f.tests.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x8e\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12!\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x10.tests.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xcb\x01\n\tDocChange\x12#\n\x04kind\x18\x01 \x01(\x0e\x32\x15.tests.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3' + ), + dependencies=[ + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _DOCCHANGE_KIND = _descriptor.EnumDescriptor( - name='Kind', - full_name='tests.DocChange.Kind', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='KIND_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='ADDED', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='REMOVED', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MODIFIED', index=3, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=2874, - serialized_end=2940, + name="Kind", + full_name="tests.DocChange.Kind", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ADDED", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="REMOVED", index=2, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="MODIFIED", index=3, number=3, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=2874, + serialized_end=2940, ) _sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND) _TESTSUITE = _descriptor.Descriptor( - name='TestSuite', - full_name='tests.TestSuite', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='tests', full_name='tests.TestSuite.tests', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=262, - serialized_end=301, + name="TestSuite", + full_name="tests.TestSuite", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="tests", + full_name="tests.TestSuite.tests", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=262, + serialized_end=301, ) _TEST = _descriptor.Descriptor( - name='Test', - full_name='tests.Test', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='description', full_name='tests.Test.description', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='get', full_name='tests.Test.get', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='create', full_name='tests.Test.create', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='set', full_name='tests.Test.set', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update', full_name='tests.Test.update', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_paths', full_name='tests.Test.update_paths', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='delete', full_name='tests.Test.delete', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='query', full_name='tests.Test.query', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='listen', full_name='tests.Test.listen', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='test', full_name='tests.Test.test', - index=0, containing_type=None, fields=[]), - ], - serialized_start=304, - serialized_end=632, + name="Test", + full_name="tests.Test", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="description", + full_name="tests.Test.description", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="get", + full_name="tests.Test.get", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create", + full_name="tests.Test.create", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="set", + full_name="tests.Test.set", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update", + full_name="tests.Test.update", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_paths", + full_name="tests.Test.update_paths", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="delete", + full_name="tests.Test.delete", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="query", + full_name="tests.Test.query", + index=7, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="listen", + full_name="tests.Test.listen", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="test", + full_name="tests.Test.test", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=304, + serialized_end=632, ) _GETTEST = _descriptor.Descriptor( - name='GetTest', - full_name='tests.GetTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.GetTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.GetTest.request', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=634, - serialized_end=728, + name="GetTest", + full_name="tests.GetTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.GetTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.GetTest.request", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=634, + serialized_end=728, ) _CREATETEST = _descriptor.Descriptor( - name='CreateTest', - full_name='tests.CreateTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.CreateTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_data', full_name='tests.CreateTest.json_data', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.CreateTest.request', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.CreateTest.is_error', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=731, - serialized_end=860, + name="CreateTest", + full_name="tests.CreateTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.CreateTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.CreateTest.json_data", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.CreateTest.request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.CreateTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=731, + serialized_end=860, ) _SETTEST = _descriptor.Descriptor( - name='SetTest', - full_name='tests.SetTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.SetTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='option', full_name='tests.SetTest.option', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_data', full_name='tests.SetTest.json_data', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.SetTest.request', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.SetTest.is_error', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=863, - serialized_end=1023, + name="SetTest", + full_name="tests.SetTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.SetTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="option", + full_name="tests.SetTest.option", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.SetTest.json_data", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.SetTest.request", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.SetTest.is_error", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=863, + serialized_end=1023, ) _UPDATETEST = _descriptor.Descriptor( - name='UpdateTest', - full_name='tests.UpdateTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.UpdateTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='precondition', full_name='tests.UpdateTest.precondition', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_data', full_name='tests.UpdateTest.json_data', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.UpdateTest.request', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.UpdateTest.is_error', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1026, - serialized_end=1217, + name="UpdateTest", + full_name="tests.UpdateTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.UpdateTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.UpdateTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.UpdateTest.json_data", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.UpdateTest.request", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.UpdateTest.is_error", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1026, + serialized_end=1217, ) _UPDATEPATHSTEST = _descriptor.Descriptor( - name='UpdatePathsTest', - full_name='tests.UpdatePathsTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.UpdatePathsTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='precondition', full_name='tests.UpdatePathsTest.precondition', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field_paths', full_name='tests.UpdatePathsTest.field_paths', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_values', full_name='tests.UpdatePathsTest.json_values', index=3, - number=4, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.UpdatePathsTest.request', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.UpdatePathsTest.is_error', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1220, - serialized_end=1457, + name="UpdatePathsTest", + full_name="tests.UpdatePathsTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.UpdatePathsTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.UpdatePathsTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field_paths", + full_name="tests.UpdatePathsTest.field_paths", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_values", + full_name="tests.UpdatePathsTest.json_values", + index=3, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.UpdatePathsTest.request", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.UpdatePathsTest.is_error", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1220, + serialized_end=1457, ) _DELETETEST = _descriptor.Descriptor( - name='DeleteTest', - full_name='tests.DeleteTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_ref_path', full_name='tests.DeleteTest.doc_ref_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='precondition', full_name='tests.DeleteTest.precondition', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='request', full_name='tests.DeleteTest.request', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.DeleteTest.is_error', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1460, - serialized_end=1632, + name="DeleteTest", + full_name="tests.DeleteTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.DeleteTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.DeleteTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.DeleteTest.request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.DeleteTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1460, + serialized_end=1632, ) _SETOPTION = _descriptor.Descriptor( - name='SetOption', - full_name='tests.SetOption', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='all', full_name='tests.SetOption.all', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='fields', full_name='tests.SetOption.fields', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1634, - serialized_end=1692, + name="SetOption", + full_name="tests.SetOption", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="all", + full_name="tests.SetOption.all", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="fields", + full_name="tests.SetOption.fields", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1634, + serialized_end=1692, ) _QUERYTEST = _descriptor.Descriptor( - name='QueryTest', - full_name='tests.QueryTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='coll_path', full_name='tests.QueryTest.coll_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='clauses', full_name='tests.QueryTest.clauses', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='query', full_name='tests.QueryTest.query', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.QueryTest.is_error', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1695, - serialized_end=1833, + name="QueryTest", + full_name="tests.QueryTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="coll_path", + full_name="tests.QueryTest.coll_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="clauses", + full_name="tests.QueryTest.clauses", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="query", + full_name="tests.QueryTest.query", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.QueryTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1695, + serialized_end=1833, ) _CLAUSE = _descriptor.Descriptor( - name='Clause', - full_name='tests.Clause', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='select', full_name='tests.Clause.select', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='where', full_name='tests.Clause.where', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='order_by', full_name='tests.Clause.order_by', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='offset', full_name='tests.Clause.offset', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='limit', full_name='tests.Clause.limit', index=4, - number=5, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='start_at', full_name='tests.Clause.start_at', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='start_after', full_name='tests.Clause.start_after', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='end_at', full_name='tests.Clause.end_at', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='end_before', full_name='tests.Clause.end_before', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='clause', full_name='tests.Clause.clause', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1836, - serialized_end=2132, + name="Clause", + full_name="tests.Clause", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="select", + full_name="tests.Clause.select", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="where", + full_name="tests.Clause.where", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="tests.Clause.order_by", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="offset", + full_name="tests.Clause.offset", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="limit", + full_name="tests.Clause.limit", + index=4, + number=5, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_at", + full_name="tests.Clause.start_at", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_after", + full_name="tests.Clause.start_after", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_at", + full_name="tests.Clause.end_at", + index=7, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_before", + full_name="tests.Clause.end_before", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="clause", + full_name="tests.Clause.clause", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1836, + serialized_end=2132, ) _SELECT = _descriptor.Descriptor( - name='Select', - full_name='tests.Select', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='tests.Select.fields', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2134, - serialized_end=2176, + name="Select", + full_name="tests.Select", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="tests.Select.fields", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2134, + serialized_end=2176, ) _WHERE = _descriptor.Descriptor( - name='Where', - full_name='tests.Where', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='tests.Where.path', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='op', full_name='tests.Where.op', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_value', full_name='tests.Where.json_value', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2178, - serialized_end=2249, + name="Where", + full_name="tests.Where", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.Where.path", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="op", + full_name="tests.Where.op", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_value", + full_name="tests.Where.json_value", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2178, + serialized_end=2249, ) _ORDERBY = _descriptor.Descriptor( - name='OrderBy', - full_name='tests.OrderBy', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='tests.OrderBy.path', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='direction', full_name='tests.OrderBy.direction', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2251, - serialized_end=2311, + name="OrderBy", + full_name="tests.OrderBy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.OrderBy.path", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="direction", + full_name="tests.OrderBy.direction", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2251, + serialized_end=2311, ) _CURSOR = _descriptor.Descriptor( - name='Cursor', - full_name='tests.Cursor', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='doc_snapshot', full_name='tests.Cursor.doc_snapshot', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_values', full_name='tests.Cursor.json_values', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2313, - serialized_end=2384, + name="Cursor", + full_name="tests.Cursor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_snapshot", + full_name="tests.Cursor.doc_snapshot", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_values", + full_name="tests.Cursor.json_values", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2313, + serialized_end=2384, ) _DOCSNAPSHOT = _descriptor.Descriptor( - name='DocSnapshot', - full_name='tests.DocSnapshot', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='tests.DocSnapshot.path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='json_data', full_name='tests.DocSnapshot.json_data', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2386, - serialized_end=2432, + name="DocSnapshot", + full_name="tests.DocSnapshot", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.DocSnapshot.path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.DocSnapshot.json_data", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2386, + serialized_end=2432, ) _FIELDPATH = _descriptor.Descriptor( - name='FieldPath', - full_name='tests.FieldPath', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='tests.FieldPath.field', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2434, - serialized_end=2460, + name="FieldPath", + full_name="tests.FieldPath", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field", + full_name="tests.FieldPath.field", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2434, + serialized_end=2460, ) _LISTENTEST = _descriptor.Descriptor( - name='ListenTest', - full_name='tests.ListenTest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='responses', full_name='tests.ListenTest.responses', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='snapshots', full_name='tests.ListenTest.snapshots', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='is_error', full_name='tests.ListenTest.is_error', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2462, - serialized_end=2589, + name="ListenTest", + full_name="tests.ListenTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="responses", + full_name="tests.ListenTest.responses", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="snapshots", + full_name="tests.ListenTest.snapshots", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.ListenTest.is_error", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2462, + serialized_end=2589, ) _SNAPSHOT = _descriptor.Descriptor( - name='Snapshot', - full_name='tests.Snapshot', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='docs', full_name='tests.Snapshot.docs', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='changes', full_name='tests.Snapshot.changes', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='tests.Snapshot.read_time', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2592, - serialized_end=2734, + name="Snapshot", + full_name="tests.Snapshot", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="docs", + full_name="tests.Snapshot.docs", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="changes", + full_name="tests.Snapshot.changes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="tests.Snapshot.read_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2592, + serialized_end=2734, ) _DOCCHANGE = _descriptor.Descriptor( - name='DocChange', - full_name='tests.DocChange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='kind', full_name='tests.DocChange.kind', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='doc', full_name='tests.DocChange.doc', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='old_index', full_name='tests.DocChange.old_index', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='new_index', full_name='tests.DocChange.new_index', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _DOCCHANGE_KIND, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2737, - serialized_end=2940, + name="DocChange", + full_name="tests.DocChange", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kind", + full_name="tests.DocChange.kind", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="doc", + full_name="tests.DocChange.doc", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="old_index", + full_name="tests.DocChange.old_index", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="new_index", + full_name="tests.DocChange.new_index", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DOCCHANGE_KIND], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2737, + serialized_end=2940, ) -_TESTSUITE.fields_by_name['tests'].message_type = _TEST -_TEST.fields_by_name['get'].message_type = _GETTEST -_TEST.fields_by_name['create'].message_type = _CREATETEST -_TEST.fields_by_name['set'].message_type = _SETTEST -_TEST.fields_by_name['update'].message_type = _UPDATETEST -_TEST.fields_by_name['update_paths'].message_type = _UPDATEPATHSTEST -_TEST.fields_by_name['delete'].message_type = _DELETETEST -_TEST.fields_by_name['query'].message_type = _QUERYTEST -_TEST.fields_by_name['listen'].message_type = _LISTENTEST -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['get']) -_TEST.fields_by_name['get'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['create']) -_TEST.fields_by_name['create'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['set']) -_TEST.fields_by_name['set'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['update']) -_TEST.fields_by_name['update'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['update_paths']) -_TEST.fields_by_name['update_paths'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['delete']) -_TEST.fields_by_name['delete'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['query']) -_TEST.fields_by_name['query'].containing_oneof = _TEST.oneofs_by_name['test'] -_TEST.oneofs_by_name['test'].fields.append( - _TEST.fields_by_name['listen']) -_TEST.fields_by_name['listen'].containing_oneof = _TEST.oneofs_by_name['test'] -_GETTEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST -_CREATETEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST -_SETTEST.fields_by_name['option'].message_type = _SETOPTION -_SETTEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST -_UPDATETEST.fields_by_name['precondition'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_UPDATETEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST -_UPDATEPATHSTEST.fields_by_name['precondition'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_UPDATEPATHSTEST.fields_by_name['field_paths'].message_type = _FIELDPATH -_UPDATEPATHSTEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST -_DELETETEST.fields_by_name['precondition'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_DELETETEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST -_SETOPTION.fields_by_name['fields'].message_type = _FIELDPATH -_QUERYTEST.fields_by_name['clauses'].message_type = _CLAUSE -_QUERYTEST.fields_by_name['query'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY -_CLAUSE.fields_by_name['select'].message_type = _SELECT -_CLAUSE.fields_by_name['where'].message_type = _WHERE -_CLAUSE.fields_by_name['order_by'].message_type = _ORDERBY -_CLAUSE.fields_by_name['start_at'].message_type = _CURSOR -_CLAUSE.fields_by_name['start_after'].message_type = _CURSOR -_CLAUSE.fields_by_name['end_at'].message_type = _CURSOR -_CLAUSE.fields_by_name['end_before'].message_type = _CURSOR -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['select']) -_CLAUSE.fields_by_name['select'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['where']) -_CLAUSE.fields_by_name['where'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['order_by']) -_CLAUSE.fields_by_name['order_by'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['offset']) -_CLAUSE.fields_by_name['offset'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['limit']) -_CLAUSE.fields_by_name['limit'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['start_at']) -_CLAUSE.fields_by_name['start_at'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['start_after']) -_CLAUSE.fields_by_name['start_after'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['end_at']) -_CLAUSE.fields_by_name['end_at'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_CLAUSE.oneofs_by_name['clause'].fields.append( - _CLAUSE.fields_by_name['end_before']) -_CLAUSE.fields_by_name['end_before'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] -_SELECT.fields_by_name['fields'].message_type = _FIELDPATH -_WHERE.fields_by_name['path'].message_type = _FIELDPATH -_ORDERBY.fields_by_name['path'].message_type = _FIELDPATH -_CURSOR.fields_by_name['doc_snapshot'].message_type = _DOCSNAPSHOT -_LISTENTEST.fields_by_name['responses'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._LISTENRESPONSE -_LISTENTEST.fields_by_name['snapshots'].message_type = _SNAPSHOT -_SNAPSHOT.fields_by_name['docs'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_SNAPSHOT.fields_by_name['changes'].message_type = _DOCCHANGE -_SNAPSHOT.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DOCCHANGE.fields_by_name['kind'].enum_type = _DOCCHANGE_KIND -_DOCCHANGE.fields_by_name['doc'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +_TESTSUITE.fields_by_name["tests"].message_type = _TEST +_TEST.fields_by_name["get"].message_type = _GETTEST +_TEST.fields_by_name["create"].message_type = _CREATETEST +_TEST.fields_by_name["set"].message_type = _SETTEST +_TEST.fields_by_name["update"].message_type = _UPDATETEST +_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST +_TEST.fields_by_name["delete"].message_type = _DELETETEST +_TEST.fields_by_name["query"].message_type = _QUERYTEST +_TEST.fields_by_name["listen"].message_type = _LISTENTEST +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"]) +_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"]) +_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"]) +_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"]) +_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"]) +_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"]) +_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"]) +_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"]) +_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"] +_GETTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST +) +_CREATETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_SETTEST.fields_by_name["option"].message_type = _SETOPTION +_SETTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_UPDATETEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_UPDATETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_UPDATEPATHSTEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH +_UPDATEPATHSTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_DELETETEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_DELETETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH +_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE +_QUERYTEST.fields_by_name[ + "query" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +) +_CLAUSE.fields_by_name["select"].message_type = _SELECT +_CLAUSE.fields_by_name["where"].message_type = _WHERE +_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY +_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR +_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR +_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR +_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"]) +_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"]) +_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"]) +_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"]) +_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"]) +_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"]) +_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"]) +_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[ + "clause" +] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"]) +_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"]) +_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_SELECT.fields_by_name["fields"].message_type = _FIELDPATH +_WHERE.fields_by_name["path"].message_type = _FIELDPATH +_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH +_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT +_LISTENTEST.fields_by_name[ + "responses" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._LISTENRESPONSE +) +_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT +_SNAPSHOT.fields_by_name[ + "docs" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE +_SNAPSHOT.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND +_DOCCHANGE.fields_by_name[ + "doc" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) _DOCCHANGE_KIND.containing_type = _DOCCHANGE -DESCRIPTOR.message_types_by_name['TestSuite'] = _TESTSUITE -DESCRIPTOR.message_types_by_name['Test'] = _TEST -DESCRIPTOR.message_types_by_name['GetTest'] = _GETTEST -DESCRIPTOR.message_types_by_name['CreateTest'] = _CREATETEST -DESCRIPTOR.message_types_by_name['SetTest'] = _SETTEST -DESCRIPTOR.message_types_by_name['UpdateTest'] = _UPDATETEST -DESCRIPTOR.message_types_by_name['UpdatePathsTest'] = _UPDATEPATHSTEST -DESCRIPTOR.message_types_by_name['DeleteTest'] = _DELETETEST -DESCRIPTOR.message_types_by_name['SetOption'] = _SETOPTION -DESCRIPTOR.message_types_by_name['QueryTest'] = _QUERYTEST -DESCRIPTOR.message_types_by_name['Clause'] = _CLAUSE -DESCRIPTOR.message_types_by_name['Select'] = _SELECT -DESCRIPTOR.message_types_by_name['Where'] = _WHERE -DESCRIPTOR.message_types_by_name['OrderBy'] = _ORDERBY -DESCRIPTOR.message_types_by_name['Cursor'] = _CURSOR -DESCRIPTOR.message_types_by_name['DocSnapshot'] = _DOCSNAPSHOT -DESCRIPTOR.message_types_by_name['FieldPath'] = _FIELDPATH -DESCRIPTOR.message_types_by_name['ListenTest'] = _LISTENTEST -DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT -DESCRIPTOR.message_types_by_name['DocChange'] = _DOCCHANGE +DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE +DESCRIPTOR.message_types_by_name["Test"] = _TEST +DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST +DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST +DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST +DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST +DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST +DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST +DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION +DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST +DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE +DESCRIPTOR.message_types_by_name["Select"] = _SELECT +DESCRIPTOR.message_types_by_name["Where"] = _WHERE +DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY +DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR +DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT +DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH +DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST +DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT +DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -TestSuite = _reflection.GeneratedProtocolMessageType('TestSuite', (_message.Message,), dict( - DESCRIPTOR = _TESTSUITE, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.TestSuite) - )) +TestSuite = _reflection.GeneratedProtocolMessageType( + "TestSuite", + (_message.Message,), + dict( + DESCRIPTOR=_TESTSUITE, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.TestSuite) + ), +) _sym_db.RegisterMessage(TestSuite) -Test = _reflection.GeneratedProtocolMessageType('Test', (_message.Message,), dict( - DESCRIPTOR = _TEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Test) - )) +Test = _reflection.GeneratedProtocolMessageType( + "Test", + (_message.Message,), + dict( + DESCRIPTOR=_TEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Test) + ), +) _sym_db.RegisterMessage(Test) -GetTest = _reflection.GeneratedProtocolMessageType('GetTest', (_message.Message,), dict( - DESCRIPTOR = _GETTEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.GetTest) - )) +GetTest = _reflection.GeneratedProtocolMessageType( + "GetTest", + (_message.Message,), + dict( + DESCRIPTOR=_GETTEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.GetTest) + ), +) _sym_db.RegisterMessage(GetTest) -CreateTest = _reflection.GeneratedProtocolMessageType('CreateTest', (_message.Message,), dict( - DESCRIPTOR = _CREATETEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.CreateTest) - )) +CreateTest = _reflection.GeneratedProtocolMessageType( + "CreateTest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATETEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.CreateTest) + ), +) _sym_db.RegisterMessage(CreateTest) -SetTest = _reflection.GeneratedProtocolMessageType('SetTest', (_message.Message,), dict( - DESCRIPTOR = _SETTEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.SetTest) - )) +SetTest = _reflection.GeneratedProtocolMessageType( + "SetTest", + (_message.Message,), + dict( + DESCRIPTOR=_SETTEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.SetTest) + ), +) _sym_db.RegisterMessage(SetTest) -UpdateTest = _reflection.GeneratedProtocolMessageType('UpdateTest', (_message.Message,), dict( - DESCRIPTOR = _UPDATETEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.UpdateTest) - )) +UpdateTest = _reflection.GeneratedProtocolMessageType( + "UpdateTest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATETEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.UpdateTest) + ), +) _sym_db.RegisterMessage(UpdateTest) -UpdatePathsTest = _reflection.GeneratedProtocolMessageType('UpdatePathsTest', (_message.Message,), dict( - DESCRIPTOR = _UPDATEPATHSTEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.UpdatePathsTest) - )) +UpdatePathsTest = _reflection.GeneratedProtocolMessageType( + "UpdatePathsTest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEPATHSTEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.UpdatePathsTest) + ), +) _sym_db.RegisterMessage(UpdatePathsTest) -DeleteTest = _reflection.GeneratedProtocolMessageType('DeleteTest', (_message.Message,), dict( - DESCRIPTOR = _DELETETEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.DeleteTest) - )) +DeleteTest = _reflection.GeneratedProtocolMessageType( + "DeleteTest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETETEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.DeleteTest) + ), +) _sym_db.RegisterMessage(DeleteTest) -SetOption = _reflection.GeneratedProtocolMessageType('SetOption', (_message.Message,), dict( - DESCRIPTOR = _SETOPTION, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.SetOption) - )) +SetOption = _reflection.GeneratedProtocolMessageType( + "SetOption", + (_message.Message,), + dict( + DESCRIPTOR=_SETOPTION, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.SetOption) + ), +) _sym_db.RegisterMessage(SetOption) -QueryTest = _reflection.GeneratedProtocolMessageType('QueryTest', (_message.Message,), dict( - DESCRIPTOR = _QUERYTEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.QueryTest) - )) +QueryTest = _reflection.GeneratedProtocolMessageType( + "QueryTest", + (_message.Message,), + dict( + DESCRIPTOR=_QUERYTEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.QueryTest) + ), +) _sym_db.RegisterMessage(QueryTest) -Clause = _reflection.GeneratedProtocolMessageType('Clause', (_message.Message,), dict( - DESCRIPTOR = _CLAUSE, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Clause) - )) +Clause = _reflection.GeneratedProtocolMessageType( + "Clause", + (_message.Message,), + dict( + DESCRIPTOR=_CLAUSE, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Clause) + ), +) _sym_db.RegisterMessage(Clause) -Select = _reflection.GeneratedProtocolMessageType('Select', (_message.Message,), dict( - DESCRIPTOR = _SELECT, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Select) - )) +Select = _reflection.GeneratedProtocolMessageType( + "Select", + (_message.Message,), + dict( + DESCRIPTOR=_SELECT, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Select) + ), +) _sym_db.RegisterMessage(Select) -Where = _reflection.GeneratedProtocolMessageType('Where', (_message.Message,), dict( - DESCRIPTOR = _WHERE, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Where) - )) +Where = _reflection.GeneratedProtocolMessageType( + "Where", + (_message.Message,), + dict( + DESCRIPTOR=_WHERE, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Where) + ), +) _sym_db.RegisterMessage(Where) -OrderBy = _reflection.GeneratedProtocolMessageType('OrderBy', (_message.Message,), dict( - DESCRIPTOR = _ORDERBY, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.OrderBy) - )) +OrderBy = _reflection.GeneratedProtocolMessageType( + "OrderBy", + (_message.Message,), + dict( + DESCRIPTOR=_ORDERBY, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.OrderBy) + ), +) _sym_db.RegisterMessage(OrderBy) -Cursor = _reflection.GeneratedProtocolMessageType('Cursor', (_message.Message,), dict( - DESCRIPTOR = _CURSOR, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Cursor) - )) +Cursor = _reflection.GeneratedProtocolMessageType( + "Cursor", + (_message.Message,), + dict( + DESCRIPTOR=_CURSOR, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Cursor) + ), +) _sym_db.RegisterMessage(Cursor) -DocSnapshot = _reflection.GeneratedProtocolMessageType('DocSnapshot', (_message.Message,), dict( - DESCRIPTOR = _DOCSNAPSHOT, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.DocSnapshot) - )) +DocSnapshot = _reflection.GeneratedProtocolMessageType( + "DocSnapshot", + (_message.Message,), + dict( + DESCRIPTOR=_DOCSNAPSHOT, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.DocSnapshot) + ), +) _sym_db.RegisterMessage(DocSnapshot) -FieldPath = _reflection.GeneratedProtocolMessageType('FieldPath', (_message.Message,), dict( - DESCRIPTOR = _FIELDPATH, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.FieldPath) - )) +FieldPath = _reflection.GeneratedProtocolMessageType( + "FieldPath", + (_message.Message,), + dict( + DESCRIPTOR=_FIELDPATH, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.FieldPath) + ), +) _sym_db.RegisterMessage(FieldPath) -ListenTest = _reflection.GeneratedProtocolMessageType('ListenTest', (_message.Message,), dict( - DESCRIPTOR = _LISTENTEST, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.ListenTest) - )) +ListenTest = _reflection.GeneratedProtocolMessageType( + "ListenTest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTENTEST, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.ListenTest) + ), +) _sym_db.RegisterMessage(ListenTest) -Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( - DESCRIPTOR = _SNAPSHOT, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.Snapshot) - )) +Snapshot = _reflection.GeneratedProtocolMessageType( + "Snapshot", + (_message.Message,), + dict( + DESCRIPTOR=_SNAPSHOT, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.Snapshot) + ), +) _sym_db.RegisterMessage(Snapshot) -DocChange = _reflection.GeneratedProtocolMessageType('DocChange', (_message.Message,), dict( - DESCRIPTOR = _DOCCHANGE, - __module__ = 'test_pb2' - # @@protoc_insertion_point(class_scope:tests.DocChange) - )) +DocChange = _reflection.GeneratedProtocolMessageType( + "DocChange", + (_message.Message,), + dict( + DESCRIPTOR=_DOCCHANGE, + __module__="test_pb2" + # @@protoc_insertion_point(class_scope:tests.DocChange) + ), +) _sym_db.RegisterMessage(DocChange) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n&com.google.cloud.firestore.conformance\252\002\"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance' + ), +) # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py index 2f13c48d8530..d766ce29bd27 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py @@ -2,468 +2,775 @@ # source: google/cloud/firestore_v1beta1/proto/write.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.firestore_v1beta1.proto import common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 +from google.cloud.firestore_v1beta1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1beta1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, +) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/firestore_v1beta1/proto/write.proto', - package='google.firestore.v1beta1', - syntax='proto3', - serialized_pb=_b('\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32\".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation\"\xea\x03\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\xec\x02\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type\"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - + name="google/cloud/firestore_v1beta1/proto/write.proto", + package="google.firestore.v1beta1", + syntax="proto3", + serialized_pb=_b( + '\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation"\xea\x03\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\xec\x02\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE = _descriptor.EnumDescriptor( - name='ServerValue', - full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValue', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='SERVER_VALUE_UNSPECIFIED', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='REQUEST_TIME', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=945, - serialized_end=1006, + name="ServerValue", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValue", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="SERVER_VALUE_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="REQUEST_TIME", index=1, number=1, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=945, + serialized_end=1006, ) _sym_db.RegisterEnumDescriptor(_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE) _WRITE = _descriptor.Descriptor( - name='Write', - full_name='google.firestore.v1beta1.Write', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='update', full_name='google.firestore.v1beta1.Write.update', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='delete', full_name='google.firestore.v1beta1.Write.delete', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transform', full_name='google.firestore.v1beta1.Write.transform', index=2, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.firestore.v1beta1.Write.update_mask', index=3, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='current_document', full_name='google.firestore.v1beta1.Write.current_document', index=4, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='operation', full_name='google.firestore.v1beta1.Write.operation', - index=0, containing_type=None, fields=[]), - ], - serialized_start=246, - serialized_end=531, + name="Write", + full_name="google.firestore.v1beta1.Write", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="update", + full_name="google.firestore.v1beta1.Write.update", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="delete", + full_name="google.firestore.v1beta1.Write.delete", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transform", + full_name="google.firestore.v1beta1.Write.transform", + index=2, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.firestore.v1beta1.Write.update_mask", + index=3, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="current_document", + full_name="google.firestore.v1beta1.Write.current_document", + index=4, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="operation", + full_name="google.firestore.v1beta1.Write.operation", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=246, + serialized_end=531, ) _DOCUMENTTRANSFORM_FIELDTRANSFORM = _descriptor.Descriptor( - name='FieldTransform', - full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field_path', full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.field_path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='set_to_server_value', full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.set_to_server_value', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='append_missing_elements', full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements', index=2, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='remove_all_from_array', full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array', index=3, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE, - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='transform_type', full_name='google.firestore.v1beta1.DocumentTransform.FieldTransform.transform_type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=660, - serialized_end=1024, + name="FieldTransform", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field_path", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.field_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="set_to_server_value", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.set_to_server_value", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="append_missing_elements", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements", + index=2, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="remove_all_from_array", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array", + index=3, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="transform_type", + full_name="google.firestore.v1beta1.DocumentTransform.FieldTransform.transform_type", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=660, + serialized_end=1024, ) _DOCUMENTTRANSFORM = _descriptor.Descriptor( - name='DocumentTransform', - full_name='google.firestore.v1beta1.DocumentTransform', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.DocumentTransform.document', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field_transforms', full_name='google.firestore.v1beta1.DocumentTransform.field_transforms', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=534, - serialized_end=1024, + name="DocumentTransform", + full_name="google.firestore.v1beta1.DocumentTransform", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.DocumentTransform.document", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field_transforms", + full_name="google.firestore.v1beta1.DocumentTransform.field_transforms", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DOCUMENTTRANSFORM_FIELDTRANSFORM], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=534, + serialized_end=1024, ) _WRITERESULT = _descriptor.Descriptor( - name='WriteResult', - full_name='google.firestore.v1beta1.WriteResult', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='update_time', full_name='google.firestore.v1beta1.WriteResult.update_time', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='transform_results', full_name='google.firestore.v1beta1.WriteResult.transform_results', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1026, - serialized_end=1148, + name="WriteResult", + full_name="google.firestore.v1beta1.WriteResult", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.firestore.v1beta1.WriteResult.update_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="transform_results", + full_name="google.firestore.v1beta1.WriteResult.transform_results", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1026, + serialized_end=1148, ) _DOCUMENTCHANGE = _descriptor.Descriptor( - name='DocumentChange', - full_name='google.firestore.v1beta1.DocumentChange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.DocumentChange.document', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='target_ids', full_name='google.firestore.v1beta1.DocumentChange.target_ids', index=1, - number=5, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='removed_target_ids', full_name='google.firestore.v1beta1.DocumentChange.removed_target_ids', index=2, - number=6, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1150, - serialized_end=1268, + name="DocumentChange", + full_name="google.firestore.v1beta1.DocumentChange", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.DocumentChange.document", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_ids", + full_name="google.firestore.v1beta1.DocumentChange.target_ids", + index=1, + number=5, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="removed_target_ids", + full_name="google.firestore.v1beta1.DocumentChange.removed_target_ids", + index=2, + number=6, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1150, + serialized_end=1268, ) _DOCUMENTDELETE = _descriptor.Descriptor( - name='DocumentDelete', - full_name='google.firestore.v1beta1.DocumentDelete', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.DocumentDelete.document', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='removed_target_ids', full_name='google.firestore.v1beta1.DocumentDelete.removed_target_ids', index=1, - number=6, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.DocumentDelete.read_time', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1270, - serialized_end=1379, + name="DocumentDelete", + full_name="google.firestore.v1beta1.DocumentDelete", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.DocumentDelete.document", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="removed_target_ids", + full_name="google.firestore.v1beta1.DocumentDelete.removed_target_ids", + index=1, + number=6, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.DocumentDelete.read_time", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1270, + serialized_end=1379, ) _DOCUMENTREMOVE = _descriptor.Descriptor( - name='DocumentRemove', - full_name='google.firestore.v1beta1.DocumentRemove', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='document', full_name='google.firestore.v1beta1.DocumentRemove.document', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='removed_target_ids', full_name='google.firestore.v1beta1.DocumentRemove.removed_target_ids', index=1, - number=2, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='read_time', full_name='google.firestore.v1beta1.DocumentRemove.read_time', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1381, - serialized_end=1490, + name="DocumentRemove", + full_name="google.firestore.v1beta1.DocumentRemove", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="document", + full_name="google.firestore.v1beta1.DocumentRemove.document", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="removed_target_ids", + full_name="google.firestore.v1beta1.DocumentRemove.removed_target_ids", + index=1, + number=2, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="google.firestore.v1beta1.DocumentRemove.read_time", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1381, + serialized_end=1490, ) _EXISTENCEFILTER = _descriptor.Descriptor( - name='ExistenceFilter', - full_name='google.firestore.v1beta1.ExistenceFilter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='target_id', full_name='google.firestore.v1beta1.ExistenceFilter.target_id', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='count', full_name='google.firestore.v1beta1.ExistenceFilter.count', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1492, - serialized_end=1543, + name="ExistenceFilter", + full_name="google.firestore.v1beta1.ExistenceFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="target_id", + full_name="google.firestore.v1beta1.ExistenceFilter.target_id", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="count", + full_name="google.firestore.v1beta1.ExistenceFilter.count", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1492, + serialized_end=1543, ) -_WRITE.fields_by_name['update'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_WRITE.fields_by_name['transform'].message_type = _DOCUMENTTRANSFORM -_WRITE.fields_by_name['update_mask'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK -_WRITE.fields_by_name['current_document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION -_WRITE.oneofs_by_name['operation'].fields.append( - _WRITE.fields_by_name['update']) -_WRITE.fields_by_name['update'].containing_oneof = _WRITE.oneofs_by_name['operation'] -_WRITE.oneofs_by_name['operation'].fields.append( - _WRITE.fields_by_name['delete']) -_WRITE.fields_by_name['delete'].containing_oneof = _WRITE.oneofs_by_name['operation'] -_WRITE.oneofs_by_name['operation'].fields.append( - _WRITE.fields_by_name['transform']) -_WRITE.fields_by_name['transform'].containing_oneof = _WRITE.oneofs_by_name['operation'] -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['set_to_server_value'].enum_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['append_missing_elements'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['remove_all_from_array'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE +_WRITE.fields_by_name[ + "update" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_WRITE.fields_by_name["transform"].message_type = _DOCUMENTTRANSFORM +_WRITE.fields_by_name[ + "update_mask" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._DOCUMENTMASK +) +_WRITE.fields_by_name[ + "current_document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION +) +_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["update"]) +_WRITE.fields_by_name["update"].containing_oneof = _WRITE.oneofs_by_name["operation"] +_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["delete"]) +_WRITE.fields_by_name["delete"].containing_oneof = _WRITE.oneofs_by_name["operation"] +_WRITE.oneofs_by_name["operation"].fields.append(_WRITE.fields_by_name["transform"]) +_WRITE.fields_by_name["transform"].containing_oneof = _WRITE.oneofs_by_name["operation"] +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "set_to_server_value" +].enum_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "append_missing_elements" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE +) +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "remove_all_from_array" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._ARRAYVALUE +) _DOCUMENTTRANSFORM_FIELDTRANSFORM.containing_type = _DOCUMENTTRANSFORM -_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE.containing_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM -_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'].fields.append( - _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['set_to_server_value']) -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['set_to_server_value'].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'] -_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'].fields.append( - _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['append_missing_elements']) -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['append_missing_elements'].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'] -_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'].fields.append( - _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['remove_all_from_array']) -_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name['remove_all_from_array'].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name['transform_type'] -_DOCUMENTTRANSFORM.fields_by_name['field_transforms'].message_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM -_WRITERESULT.fields_by_name['update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_WRITERESULT.fields_by_name['transform_results'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE -_DOCUMENTCHANGE.fields_by_name['document'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT -_DOCUMENTDELETE.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DOCUMENTREMOVE.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name['Write'] = _WRITE -DESCRIPTOR.message_types_by_name['DocumentTransform'] = _DOCUMENTTRANSFORM -DESCRIPTOR.message_types_by_name['WriteResult'] = _WRITERESULT -DESCRIPTOR.message_types_by_name['DocumentChange'] = _DOCUMENTCHANGE -DESCRIPTOR.message_types_by_name['DocumentDelete'] = _DOCUMENTDELETE -DESCRIPTOR.message_types_by_name['DocumentRemove'] = _DOCUMENTREMOVE -DESCRIPTOR.message_types_by_name['ExistenceFilter'] = _EXISTENCEFILTER +_DOCUMENTTRANSFORM_FIELDTRANSFORM_SERVERVALUE.containing_type = ( + _DOCUMENTTRANSFORM_FIELDTRANSFORM +) +_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append( + _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["set_to_server_value"] +) +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "set_to_server_value" +].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"] +_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append( + _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["append_missing_elements"] +) +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "append_missing_elements" +].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"] +_DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"].fields.append( + _DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name["remove_all_from_array"] +) +_DOCUMENTTRANSFORM_FIELDTRANSFORM.fields_by_name[ + "remove_all_from_array" +].containing_oneof = _DOCUMENTTRANSFORM_FIELDTRANSFORM.oneofs_by_name["transform_type"] +_DOCUMENTTRANSFORM.fields_by_name[ + "field_transforms" +].message_type = _DOCUMENTTRANSFORM_FIELDTRANSFORM +_WRITERESULT.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_WRITERESULT.fields_by_name[ + "transform_results" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._VALUE +) +_DOCUMENTCHANGE.fields_by_name[ + "document" +].message_type = ( + google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +) +_DOCUMENTDELETE.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DOCUMENTREMOVE.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +DESCRIPTOR.message_types_by_name["Write"] = _WRITE +DESCRIPTOR.message_types_by_name["DocumentTransform"] = _DOCUMENTTRANSFORM +DESCRIPTOR.message_types_by_name["WriteResult"] = _WRITERESULT +DESCRIPTOR.message_types_by_name["DocumentChange"] = _DOCUMENTCHANGE +DESCRIPTOR.message_types_by_name["DocumentDelete"] = _DOCUMENTDELETE +DESCRIPTOR.message_types_by_name["DocumentRemove"] = _DOCUMENTREMOVE +DESCRIPTOR.message_types_by_name["ExistenceFilter"] = _EXISTENCEFILTER _sym_db.RegisterFileDescriptor(DESCRIPTOR) -Write = _reflection.GeneratedProtocolMessageType('Write', (_message.Message,), dict( - DESCRIPTOR = _WRITE, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A write on a document. +Write = _reflection.GeneratedProtocolMessageType( + "Write", + (_message.Message,), + dict( + DESCRIPTOR=_WRITE, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A write on a document. Attributes: @@ -493,17 +800,22 @@ An optional precondition on the document. The write will fail if this is set and not met by the target document. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Write) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.Write) + ), +) _sym_db.RegisterMessage(Write) -DocumentTransform = _reflection.GeneratedProtocolMessageType('DocumentTransform', (_message.Message,), dict( - - FieldTransform = _reflection.GeneratedProtocolMessageType('FieldTransform', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENTTRANSFORM_FIELDTRANSFORM, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A transformation of a field of the document. +DocumentTransform = _reflection.GeneratedProtocolMessageType( + "DocumentTransform", + (_message.Message,), + dict( + FieldTransform=_reflection.GeneratedProtocolMessageType( + "FieldTransform", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENTTRANSFORM_FIELDTRANSFORM, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A transformation of a field of the document. Attributes: @@ -535,13 +847,12 @@ equivalent values if there are duplicates. The corresponding transform\_result will be the null value. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform.FieldTransform) - )) - , - DESCRIPTOR = _DOCUMENTTRANSFORM, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A transformation of a document. + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform.FieldTransform) + ), + ), + DESCRIPTOR=_DOCUMENTTRANSFORM, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A transformation of a document. Attributes: @@ -551,16 +862,19 @@ The list of transformations to apply to the fields of the document, in order. This must not be empty. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentTransform) + ), +) _sym_db.RegisterMessage(DocumentTransform) _sym_db.RegisterMessage(DocumentTransform.FieldTransform) -WriteResult = _reflection.GeneratedProtocolMessageType('WriteResult', (_message.Message,), dict( - DESCRIPTOR = _WRITERESULT, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """The result of applying a write. +WriteResult = _reflection.GeneratedProtocolMessageType( + "WriteResult", + (_message.Message,), + dict( + DESCRIPTOR=_WRITERESULT, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""The result of applying a write. Attributes: @@ -573,15 +887,18 @@ ][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the same order. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResult) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.WriteResult) + ), +) _sym_db.RegisterMessage(WriteResult) -DocumentChange = _reflection.GeneratedProtocolMessageType('DocumentChange', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENTCHANGE, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A [Document][google.firestore.v1beta1.Document] has changed. +DocumentChange = _reflection.GeneratedProtocolMessageType( + "DocumentChange", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENTCHANGE, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A [Document][google.firestore.v1beta1.Document] has changed. May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that ultimately resulted in a new value for the @@ -603,15 +920,18 @@ A set of target IDs for targets that no longer match this document. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentChange) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentChange) + ), +) _sym_db.RegisterMessage(DocumentChange) -DocumentDelete = _reflection.GeneratedProtocolMessageType('DocumentDelete', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENTDELETE, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A [Document][google.firestore.v1beta1.Document] has been deleted. +DocumentDelete = _reflection.GeneratedProtocolMessageType( + "DocumentDelete", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENTDELETE, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A [Document][google.firestore.v1beta1.Document] has been deleted. May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the last of which deleted the @@ -634,15 +954,18 @@ The read timestamp at which the delete was observed. Greater or equal to the ``commit_time`` of the delete. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentDelete) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentDelete) + ), +) _sym_db.RegisterMessage(DocumentDelete) -DocumentRemove = _reflection.GeneratedProtocolMessageType('DocumentRemove', (_message.Message,), dict( - DESCRIPTOR = _DOCUMENTREMOVE, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A [Document][google.firestore.v1beta1.Document] has been removed from +DocumentRemove = _reflection.GeneratedProtocolMessageType( + "DocumentRemove", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENTREMOVE, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets. Sent if the document is no longer relevant to a target and is out of @@ -666,15 +989,18 @@ The read timestamp at which the remove was observed. Greater or equal to the ``commit_time`` of the change/delete/remove. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentRemove) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.DocumentRemove) + ), +) _sym_db.RegisterMessage(DocumentRemove) -ExistenceFilter = _reflection.GeneratedProtocolMessageType('ExistenceFilter', (_message.Message,), dict( - DESCRIPTOR = _EXISTENCEFILTER, - __module__ = 'google.cloud.firestore_v1beta1.proto.write_pb2' - , - __doc__ = """A digest of all the documents that match a given target. +ExistenceFilter = _reflection.GeneratedProtocolMessageType( + "ExistenceFilter", + (_message.Message,), + dict( + DESCRIPTOR=_EXISTENCEFILTER, + __module__="google.cloud.firestore_v1beta1.proto.write_pb2", + __doc__="""A digest of all the documents that match a given target. Attributes: @@ -687,11 +1013,17 @@ client must manually determine which documents no longer match the target. """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ExistenceFilter) - )) + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.ExistenceFilter) + ), +) _sym_db.RegisterMessage(ExistenceFilter) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.firestore.v1beta1B\nWriteProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\034com.google.firestore.v1beta1B\nWriteProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" + ), +) # @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py index a89435267cb2..07cb78fe03a9 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2_grpc.py @@ -1,3 +1,2 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc - diff --git a/firestore/google/cloud/firestore_v1beta1/query.py b/firestore/google/cloud/firestore_v1beta1/query.py index e52187e0c1fb..6860f45578be 100644 --- a/firestore/google/cloud/firestore_v1beta1/query.py +++ b/firestore/google/cloud/firestore_v1beta1/query.py @@ -33,37 +33,38 @@ from google.cloud.firestore_v1beta1.order import Order from google.cloud.firestore_v1beta1.watch import Watch -_EQ_OP = '==' +_EQ_OP = "==" _operator_enum = enums.StructuredQuery.FieldFilter.Operator _COMPARISON_OPERATORS = { - '<': _operator_enum.LESS_THAN, - '<=': _operator_enum.LESS_THAN_OR_EQUAL, + "<": _operator_enum.LESS_THAN, + "<=": _operator_enum.LESS_THAN_OR_EQUAL, _EQ_OP: _operator_enum.EQUAL, - '>=': _operator_enum.GREATER_THAN_OR_EQUAL, - '>': _operator_enum.GREATER_THAN, - 'array_contains': _operator_enum.ARRAY_CONTAINS, + ">=": _operator_enum.GREATER_THAN_OR_EQUAL, + ">": _operator_enum.GREATER_THAN, + "array_contains": _operator_enum.ARRAY_CONTAINS, } -_BAD_OP_STRING = 'Operator string {!r} is invalid. Valid choices are: {}.' -_BAD_OP_NAN_NULL = ( - 'Only an equality filter ("==") can be used with None or NaN values') -_INVALID_WHERE_TRANSFORM = 'Transforms cannot be used as where values.' -_BAD_DIR_STRING = 'Invalid direction {!r}. Must be one of {!r} or {!r}.' -_INVALID_CURSOR_TRANSFORM = 'Transforms cannot be used as cursor values.' +_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}." +_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values' +_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values." +_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}." +_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values." _MISSING_ORDER_BY = ( 'The "order by" field path {!r} is not present in the cursor data {!r}. ' - 'All fields sent to ``order_by()`` must be present in the fields ' - 'if passed to one of ``start_at()`` / ``start_after()`` / ' - '``end_before()`` / ``end_at()`` to define a cursor.') + "All fields sent to ``order_by()`` must be present in the fields " + "if passed to one of ``start_at()`` / ``start_after()`` / " + "``end_before()`` / ``end_at()`` to define a cursor." +) _NO_ORDERS_FOR_CURSOR = ( - 'Attempting to create a cursor with no fields to order on. ' - 'When defining a cursor with one of ``start_at()`` / ``start_after()`` / ' - '``end_before()`` / ``end_at()``, all fields in the cursor must ' - 'come from fields set in ``order_by()``.') -_MISMATCH_CURSOR_W_ORDER_BY = ( - 'The cursor {!r} does not match the order fields {!r}.') + "Attempting to create a cursor with no fields to order on. " + "When defining a cursor with one of ``start_at()`` / ``start_after()`` / " + "``end_before()`` / ``end_at()``, all fields in the cursor must " + "come from fields set in ``order_by()``." +) +_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}." _EMPTY_DOC_TEMPLATE = ( - 'Unexpected server response. All responses other than the first must ' - 'contain a document. The response at index {} was\n{}.') + "Unexpected server response. All responses other than the first must " + "contain a document. The response at index {} was\n{}." +) class Query(object): @@ -115,14 +116,22 @@ class Query(object): will be used in the order given by ``orders``. """ - ASCENDING = 'ASCENDING' + ASCENDING = "ASCENDING" """str: Sort query results in ascending order on a field.""" - DESCENDING = 'DESCENDING' + DESCENDING = "DESCENDING" """str: Sort query results in descending order on a field.""" def __init__( - self, parent, projection=None, field_filters=(), orders=(), - limit=None, offset=None, start_at=None, end_at=None): + self, + parent, + projection=None, + field_filters=(), + orders=(), + limit=None, + offset=None, + start_at=None, + end_at=None, + ): self._parent = parent self._projection = projection self._field_filters = field_filters @@ -172,7 +181,7 @@ def select(self, field_paths): fields=[ query_pb2.StructuredQuery.FieldReference(field_path=field_path) for field_path in field_paths - ], + ] ) return self.__class__( self._parent, @@ -221,27 +230,21 @@ def where(self, field_path, op_string, value): if op_string != _EQ_OP: raise ValueError(_BAD_OP_NAN_NULL) filter_pb = query_pb2.StructuredQuery.UnaryFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL, ) elif _isnan(value): if op_string != _EQ_OP: raise ValueError(_BAD_OP_NAN_NULL) filter_pb = query_pb2.StructuredQuery.UnaryFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN, ) elif isinstance(value, (transforms.Sentinel, transforms._ValueList)): raise ValueError(_INVALID_WHERE_TRANSFORM) else: filter_pb = query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), op=_enum_from_op_string(op_string), value=_helpers.encode_value(value), ) @@ -288,9 +291,7 @@ def order_by(self, field_path, direction=ASCENDING): _helpers.split_field_path(field_path) # raises order_pb = query_pb2.StructuredQuery.Order( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), direction=_enum_from_direction(direction), ) @@ -393,18 +394,18 @@ def _cursor_helper(self, document_fields, before, start): cursor_pair = document_fields, before query_kwargs = { - 'projection': self._projection, - 'field_filters': self._field_filters, - 'orders': self._orders, - 'limit': self._limit, - 'offset': self._offset, + "projection": self._projection, + "field_filters": self._field_filters, + "orders": self._orders, + "limit": self._limit, + "offset": self._offset, } if start: - query_kwargs['start_at'] = cursor_pair - query_kwargs['end_at'] = self._end_at + query_kwargs["start_at"] = cursor_pair + query_kwargs["end_at"] = self._end_at else: - query_kwargs['start_at'] = self._start_at - query_kwargs['end_at'] = cursor_pair + query_kwargs["start_at"] = self._start_at + query_kwargs["end_at"] = cursor_pair return self.__class__(self._parent, **query_kwargs) @@ -543,12 +544,9 @@ def _filters_pb(self): else: composite_filter = query_pb2.StructuredQuery.CompositeFilter( op=enums.StructuredQuery.CompositeFilter.Operator.AND, - filters=[ - _filter_pb(filter_) for filter_ in self._field_filters - ], + filters=[_filter_pb(filter_) for filter_ in self._field_filters], ) - return query_pb2.StructuredQuery.Filter( - composite_filter=composite_filter) + return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter) @staticmethod def _normalize_projection(projection): @@ -559,11 +557,9 @@ def _normalize_projection(projection): if not fields: field_ref = query_pb2.StructuredQuery.FieldReference( - field_path='__name__', - ) - return query_pb2.StructuredQuery.Projection( - fields=[field_ref], + field_path="__name__" ) + return query_pb2.StructuredQuery.Projection(fields=[field_ref]) return projection @@ -593,8 +589,7 @@ def _normalize_cursor(cursor, orders): document_fields = values if len(document_fields) != len(orders): - msg = _MISMATCH_CURSOR_W_ORDER_BY.format( - document_fields, order_keys) + msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys) raise ValueError(msg) _transform_bases = (transforms.Sentinel, transforms._ValueList) @@ -617,21 +612,21 @@ def _to_protobuf(self): end_at = self._normalize_cursor(self._end_at, self._orders) query_kwargs = { - 'select': projection, - 'from': [ + "select": projection, + "from": [ query_pb2.StructuredQuery.CollectionSelector( - collection_id=self._parent.id, - ), + collection_id=self._parent.id + ) ], - 'where': self._filters_pb(), - 'order_by': self._orders, - 'start_at': _cursor_pb(start_at), - 'end_at': _cursor_pb(end_at), + "where": self._filters_pb(), + "order_by": self._orders, + "start_at": _cursor_pb(start_at), + "end_at": _cursor_pb(end_at), } if self._offset is not None: - query_kwargs['offset'] = self._offset + query_kwargs["offset"] = self._offset if self._limit is not None: - query_kwargs['limit'] = wrappers_pb2.Int32Value(value=self._limit) + query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit) return query_pb2.StructuredQuery(**query_kwargs) @@ -662,19 +657,24 @@ def get(self, transaction=None): """ parent_path, expected_prefix = self._parent._parent_info() response_iterator = self._client._firestore_api.run_query( - parent_path, self._to_protobuf(), + parent_path, + self._to_protobuf(), transaction=_helpers.get_transaction_id(transaction), - metadata=self._client._rpc_metadata) + metadata=self._client._rpc_metadata, + ) empty_stream = False for index, response_pb in enumerate(response_iterator): if empty_stream: raise ValueError( - 'First response in stream was empty', - 'Received second response', response_pb) + "First response in stream was empty", + "Received second response", + response_pb, + ) snapshot, skipped_results = _query_response_to_snapshot( - response_pb, self._parent, expected_prefix) + response_pb, self._parent, expected_prefix + ) if snapshot is None: if index != 0: msg = _EMPTY_DOC_TEMPLATE.format(index, response_pb) @@ -709,10 +709,9 @@ def on_snapshot(query_snapshot): # Terminate this watch query_watch.unsubscribe() """ - return Watch.for_query(self, - callback, - document.DocumentSnapshot, - document.DocumentReference) + return Watch.for_query( + self, callback, document.DocumentSnapshot, document.DocumentReference + ) def _comparator(self, doc1, doc2): _orders = self._orders @@ -729,21 +728,20 @@ def _comparator(self, doc1, doc2): orderBys = list(_orders) order_pb = query_pb2.StructuredQuery.Order( - field=query_pb2.StructuredQuery.FieldReference( - field_path='id', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="id"), direction=_enum_from_direction(lastDirection), ) orderBys.append(order_pb) for orderBy in orderBys: - if orderBy.field.field_path == 'id': + if orderBy.field.field_path == "id": # If ordering by docuent id, compare resource paths. - comp = Order()._compare_to( - doc1.reference._path, doc2.reference._path) + comp = Order()._compare_to(doc1.reference._path, doc2.reference._path) else: - if orderBy.field.field_path not in doc1._data or \ - orderBy.field.field_path not in doc2._data: + if ( + orderBy.field.field_path not in doc1._data + or orderBy.field.field_path not in doc2._data + ): raise ValueError( "Can only compare fields that exist in the " "DocumentSnapshot. Please include the fields you are " @@ -755,7 +753,7 @@ def _comparator(self, doc1, doc2): encoded_v2 = _helpers.encode_value(v2) comp = Order().compare(encoded_v1, encoded_v2) - if (comp != 0): + if comp != 0: # 1 == Ascending, -1 == Descending return orderBy.direction * comp @@ -782,7 +780,7 @@ def _enum_from_op_string(op_string): try: return _COMPARISON_OPERATORS[op_string] except KeyError: - choices = ', '.join(sorted(_COMPARISON_OPERATORS.keys())) + choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys())) msg = _BAD_OP_STRING.format(op_string, choices) raise ValueError(msg) @@ -824,8 +822,7 @@ def _enum_from_direction(direction): elif direction == Query.DESCENDING: return enums.StructuredQuery.Direction.DESCENDING else: - msg = _BAD_DIR_STRING.format( - direction, Query.ASCENDING, Query.DESCENDING) + msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING) raise ValueError(msg) @@ -850,8 +847,7 @@ def _filter_pb(field_or_unary): elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter): return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary) else: - raise ValueError( - 'Unexpected filter type', type(field_or_unary), field_or_unary) + raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary) def _cursor_pb(cursor_pair): @@ -893,19 +889,18 @@ def _query_response_to_snapshot(response_pb, collection, expected_prefix): results. If ``response_pb.document`` is not set, the snapshot will be :data:`None`. """ - if not response_pb.HasField('document'): + if not response_pb.HasField("document"): return None, response_pb.skipped_results - document_id = _helpers.get_doc_id( - response_pb.document, expected_prefix) + document_id = _helpers.get_doc_id(response_pb.document, expected_prefix) reference = collection.document(document_id) - data = _helpers.decode_dict( - response_pb.document.fields, collection._client) + data = _helpers.decode_dict(response_pb.document.fields, collection._client) snapshot = document.DocumentSnapshot( reference, data, exists=True, read_time=response_pb.read_time, create_time=response_pb.document.create_time, - update_time=response_pb.document.update_time) + update_time=response_pb.document.update_time, + ) return snapshot, response_pb.skipped_results diff --git a/firestore/google/cloud/firestore_v1beta1/transaction.py b/firestore/google/cloud/firestore_v1beta1/transaction.py index 93d00519b46b..d7c01523b625 100644 --- a/firestore/google/cloud/firestore_v1beta1/transaction.py +++ b/firestore/google/cloud/firestore_v1beta1/transaction.py @@ -27,21 +27,19 @@ MAX_ATTEMPTS = 5 """int: Default number of transaction attempts (with retries).""" -_CANT_BEGIN = ( - 'The transaction has already begun. Current transaction ID: {!r}.') -_MISSING_ID_TEMPLATE = ( - 'The transaction has no transaction ID, so it cannot be {}.') -_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format('rolled back') -_CANT_COMMIT = _MISSING_ID_TEMPLATE.format('committed') -_WRITE_READ_ONLY = 'Cannot perform write operation in read-only transaction.' +_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}." +_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}." +_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back") +_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed") +_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction." _INITIAL_SLEEP = 1.0 """float: Initial "max" for sleep interval. To be used in :func:`_sleep`.""" _MAX_SLEEP = 30.0 """float: Eventual "max" sleep time. To be used in :func:`_sleep`.""" _MULTIPLIER = 2.0 """float: Multiplier for exponential backoff. To be used in :func:`_sleep`.""" -_EXCEED_ATTEMPTS_TEMPLATE = 'Failed to commit transaction in {:d} attempts.' -_CANT_RETRY_READ_ONLY = 'Only read-write transactions can be retried.' +_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts." +_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried." class Transaction(batch.WriteBatch): @@ -105,12 +103,13 @@ def _options_protobuf(self, retry_id): return types.TransactionOptions( read_write=types.TransactionOptions.ReadWrite( - retry_transaction=retry_id, - ), + retry_transaction=retry_id + ) ) elif self._read_only: return types.TransactionOptions( - read_only=types.TransactionOptions.ReadOnly()) + read_only=types.TransactionOptions.ReadOnly() + ) else: return None @@ -150,7 +149,7 @@ def _begin(self, retry_id=None): transaction_response = self._client._firestore_api.begin_transaction( self._client._database_string, options_=self._options_protobuf(retry_id), - metadata=self._client._rpc_metadata + metadata=self._client._rpc_metadata, ) self._id = transaction_response.transaction @@ -174,8 +173,10 @@ def _rollback(self): try: # NOTE: The response is just ``google.protobuf.Empty``. self._client._firestore_api.rollback( - self._client._database_string, self._id, - metadata=self._client._rpc_metadata) + self._client._database_string, + self._id, + metadata=self._client._rpc_metadata, + ) finally: self._clean_up() @@ -195,8 +196,7 @@ def _commit(self): if not self.in_progress: raise ValueError(_CANT_COMMIT) - commit_response = _commit_with_retry( - self._client, self._write_pbs, self._id) + commit_response = _commit_with_retry(self._client, self._write_pbs, self._id) self._clean_up() return list(commit_response.write_results) @@ -372,9 +372,11 @@ def _commit_with_retry(client, write_pbs, transaction_id): while True: try: return client._firestore_api.commit( - client._database_string, write_pbs, + client._database_string, + write_pbs, transaction=transaction_id, - metadata=client._rpc_metadata) + metadata=client._rpc_metadata, + ) except exceptions.ServiceUnavailable: # Retry pass diff --git a/firestore/google/cloud/firestore_v1beta1/transforms.py b/firestore/google/cloud/firestore_v1beta1/transforms.py index b3b73da20a16..4849eb63b6fe 100644 --- a/firestore/google/cloud/firestore_v1beta1/transforms.py +++ b/firestore/google/cloud/firestore_v1beta1/transforms.py @@ -17,7 +17,8 @@ class Sentinel(object): """Sentinel objects used to signal special handling.""" - __slots__ = ('description',) + + __slots__ = ("description",) def __init__(self, description): self.description = description @@ -30,7 +31,8 @@ def __repr__(self): SERVER_TIMESTAMP = Sentinel( - "Value used to set a document field to the server timestamp.") + "Value used to set a document field to the server timestamp." +) class _ValueList(object): @@ -39,7 +41,8 @@ class _ValueList(object): Args: values (List | Tuple): values held in the helper. """ - slots = ('_values',) + + slots = ("_values",) def __init__(self, values): if not isinstance(values, (list, tuple)): diff --git a/firestore/google/cloud/firestore_v1beta1/types.py b/firestore/google/cloud/firestore_v1beta1/types.py index 9e21515fa717..90c03b8aba2e 100644 --- a/firestore/google/cloud/firestore_v1beta1/types.py +++ b/firestore/google/cloud/firestore_v1beta1/types.py @@ -45,13 +45,7 @@ latlng_pb2, ] -_local_modules = [ - common_pb2, - document_pb2, - firestore_pb2, - query_pb2, - write_pb2, -] +_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2] names = [] @@ -62,7 +56,7 @@ for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = 'google.cloud.firestore_v1beta1.types' + message.__module__ = "google.cloud.firestore_v1beta1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/firestore/google/cloud/firestore_v1beta1/watch.py b/firestore/google/cloud/firestore_v1beta1/watch.py index 1cdfe56598f2..05cc4f89c62b 100644 --- a/firestore/google/cloud/firestore_v1beta1/watch.py +++ b/firestore/google/cloud/firestore_v1beta1/watch.py @@ -37,35 +37,35 @@ WATCH_TARGET_ID = 0x5079 # "Py" GRPC_STATUS_CODE = { - 'OK': 0, - 'CANCELLED': 1, - 'UNKNOWN': 2, - 'INVALID_ARGUMENT': 3, - 'DEADLINE_EXCEEDED': 4, - 'NOT_FOUND': 5, - 'ALREADY_EXISTS': 6, - 'PERMISSION_DENIED': 7, - 'UNAUTHENTICATED': 16, - 'RESOURCE_EXHAUSTED': 8, - 'FAILED_PRECONDITION': 9, - 'ABORTED': 10, - 'OUT_OF_RANGE': 11, - 'UNIMPLEMENTED': 12, - 'INTERNAL': 13, - 'UNAVAILABLE': 14, - 'DATA_LOSS': 15, - 'DO_NOT_USE': -1 + "OK": 0, + "CANCELLED": 1, + "UNKNOWN": 2, + "INVALID_ARGUMENT": 3, + "DEADLINE_EXCEEDED": 4, + "NOT_FOUND": 5, + "ALREADY_EXISTS": 6, + "PERMISSION_DENIED": 7, + "UNAUTHENTICATED": 16, + "RESOURCE_EXHAUSTED": 8, + "FAILED_PRECONDITION": 9, + "ABORTED": 10, + "OUT_OF_RANGE": 11, + "UNIMPLEMENTED": 12, + "INTERNAL": 13, + "UNAVAILABLE": 14, + "DATA_LOSS": 15, + "DO_NOT_USE": -1, } -_RPC_ERROR_THREAD_NAME = 'Thread-OnRpcTerminated' +_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, exceptions.InternalServerError, exceptions.Unknown, - exceptions.GatewayTimeout + exceptions.GatewayTimeout, ) -DocTreeEntry = collections.namedtuple('DocTreeEntry', ['value', 'index']) +DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"]) class WatchDocTree(object): @@ -152,7 +152,7 @@ def _maybe_wrap_exception(exception): def document_watch_comparator(doc1, doc2): - assert doc1 == doc2, 'Document watches only support one document.' + assert doc1 == doc2, "Document watches only support one document." return 0 @@ -161,17 +161,18 @@ class Watch(object): BackgroundConsumer = BackgroundConsumer # FBO unit tests ResumableBidiRpc = ResumableBidiRpc # FBO unit tests - def __init__(self, - document_reference, - firestore, - target, - comparator, - snapshot_callback, - document_snapshot_cls, - document_reference_cls, - BackgroundConsumer=None, # FBO unit testing - ResumableBidiRpc=None, # FBO unit testing - ): + def __init__( + self, + document_reference, + firestore, + target, + comparator, + snapshot_callback, + document_snapshot_cls, + document_reference_cls, + BackgroundConsumer=None, # FBO unit testing + ResumableBidiRpc=None, # FBO unit testing + ): """ Args: firestore: @@ -203,21 +204,22 @@ def __init__(self, def should_recover(exc): # pragma: NO COVER return ( - isinstance(exc, grpc.RpcError) and - exc.code() == grpc.StatusCode.UNAVAILABLE) + isinstance(exc, grpc.RpcError) + and exc.code() == grpc.StatusCode.UNAVAILABLE + ) initial_request = firestore_pb2.ListenRequest( - database=self._firestore._database_string, - add_target=self._targets + database=self._firestore._database_string, add_target=self._targets ) if ResumableBidiRpc is None: ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests self._rpc = ResumableBidiRpc( - self._api.transport._stubs['firestore_stub'].Listen, + self._api.transport._stubs["firestore_stub"].Listen, initial_request=initial_request, - should_recover=should_recover) + should_recover=should_recover, + ) self._rpc.add_done_callback(self._on_rpc_done) @@ -274,14 +276,14 @@ def close(self, reason=None): # Stop consuming messages. if self.is_active: - _LOGGER.debug('Stopping consumer.') + _LOGGER.debug("Stopping consumer.") self._consumer.stop() self._consumer = None self._rpc.close() self._rpc = None self._closed = True - _LOGGER.debug('Finished stopping manager.') + _LOGGER.debug("Finished stopping manager.") if reason: # Raise an exception if a reason is provided @@ -301,13 +303,11 @@ def _on_rpc_done(self, future): with shutting everything down. This is to prevent blocking in the background consumer and preventing it from being ``joined()``. """ - _LOGGER.info( - 'RPC termination has signaled manager shutdown.') + _LOGGER.info("RPC termination has signaled manager shutdown.") future = _maybe_wrap_exception(future) thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, - target=self.close, - kwargs={'reason': future}) + name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} + ) thread.daemon = True thread.start() @@ -315,8 +315,13 @@ def unsubscribe(self): self.close() @classmethod - def for_document(cls, document_ref, snapshot_callback, - snapshot_class_instance, reference_class_instance): + def for_document( + cls, + document_ref, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ): """ Creates a watch snapshot listener for a document. snapshot_callback receives a DocumentChange object, but may also start to get @@ -331,43 +336,42 @@ def for_document(cls, document_ref, snapshot_callback, references """ - return cls(document_ref, - document_ref._client, - { - 'documents': { - 'documents': [document_ref._document_path]}, - 'target_id': WATCH_TARGET_ID - }, - document_watch_comparator, - snapshot_callback, - snapshot_class_instance, - reference_class_instance) + return cls( + document_ref, + document_ref._client, + { + "documents": {"documents": [document_ref._document_path]}, + "target_id": WATCH_TARGET_ID, + }, + document_watch_comparator, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ) @classmethod - def for_query(cls, query, snapshot_callback, snapshot_class_instance, - reference_class_instance): + def for_query( + cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance + ): query_target = firestore_pb2.Target.QueryTarget( - parent=query._client._database_string, - structured_query=query._to_protobuf(), + parent=query._client._database_string, structured_query=query._to_protobuf() ) - return cls(query, - query._client, - { - 'query': query_target, - 'target_id': WATCH_TARGET_ID - }, - query._comparator, - snapshot_callback, - snapshot_class_instance, - reference_class_instance) + return cls( + query, + query._client, + {"query": query_target, "target_id": WATCH_TARGET_ID}, + query._comparator, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ) def _on_snapshot_target_change_no_change(self, proto): - _LOGGER.debug('on_snapshot: target change: NO_CHANGE') + _LOGGER.debug("on_snapshot: target change: NO_CHANGE") change = proto.target_change - no_target_ids = (change.target_ids is None or - len(change.target_ids) == 0) + no_target_ids = change.target_ids is None or len(change.target_ids) == 0 if no_target_ids and change.read_time and self.current: # TargetChange.CURRENT followed by TargetChange.NO_CHANGE # signals a consistent state. Invoke the onSnapshot @@ -376,22 +380,23 @@ def _on_snapshot_target_change_no_change(self, proto): def _on_snapshot_target_change_add(self, proto): _LOGGER.debug("on_snapshot: target change: ADD") - assert WATCH_TARGET_ID == proto.target_change.target_ids[0], \ - 'Unexpected target ID sent by server' + assert ( + WATCH_TARGET_ID == proto.target_change.target_ids[0] + ), "Unexpected target ID sent by server" def _on_snapshot_target_change_remove(self, proto): _LOGGER.debug("on_snapshot: target change: REMOVE") change = proto.target_change code = 13 - message = 'internal error' + message = "internal error" if change.cause: code = change.cause.code message = change.cause.message # TODO: Consider surfacing a code property on the exception. # TODO: Consider a more exact exception - raise Exception('Error %s: %s' % (code, message)) + raise Exception("Error %s: %s" % (code, message)) def _on_snapshot_target_change_reset(self, proto): # Whatever changes have happened so far no longer matter. @@ -420,19 +425,20 @@ def on_snapshot(self, proto): TargetChange.REMOVE: self._on_snapshot_target_change_remove, TargetChange.RESET: self._on_snapshot_target_change_reset, TargetChange.CURRENT: self._on_snapshot_target_change_current, - } + } target_change = proto.target_change if str(target_change): target_change_type = target_change.target_change_type - _LOGGER.debug( - 'on_snapshot: target change: ' + str(target_change_type)) + _LOGGER.debug("on_snapshot: target change: " + str(target_change_type)) meth = target_changetype_dispatch.get(target_change_type) if meth is None: - _LOGGER.info('on_snapshot: Unknown target change ' + - str(target_change_type)) - self.close(reason='Unknown target change type: %s ' % - str(target_change_type)) + _LOGGER.info( + "on_snapshot: Unknown target change " + str(target_change_type) + ) + self.close( + reason="Unknown target change type: %s " % str(target_change_type) + ) else: try: meth(proto) @@ -445,7 +451,7 @@ def on_snapshot(self, proto): # in this version bidi rpc is just used and will control this. elif str(proto.document_change): - _LOGGER.debug('on_snapshot: document change') + _LOGGER.debug("on_snapshot: document change") # No other target_ids can show up here, but we still need to see # if the targetId was in the added list or removed list. @@ -461,7 +467,7 @@ def on_snapshot(self, proto): removed = True if changed: - _LOGGER.debug('on_snapshot: document change: CHANGED') + _LOGGER.debug("on_snapshot: document change: CHANGED") # google.cloud.firestore_v1beta1.types.DocumentChange document_change = proto.document_change @@ -475,9 +481,9 @@ def on_snapshot(self, proto): # fashion than self._document_reference document_name = document.name db_str = self._firestore._database_string - db_str_documents = db_str + '/documents/' + db_str_documents = db_str + "/documents/" if document_name.startswith(db_str_documents): - document_name = document_name[len(db_str_documents):] + document_name = document_name[len(db_str_documents) :] document_ref = self._firestore.document(document_name) @@ -487,22 +493,23 @@ def on_snapshot(self, proto): exists=True, read_time=None, create_time=document.create_time, - update_time=document.update_time) + update_time=document.update_time, + ) self.change_map[document.name] = snapshot elif removed: - _LOGGER.debug('on_snapshot: document change: REMOVED') + _LOGGER.debug("on_snapshot: document change: REMOVED") document = proto.document_change.document self.change_map[document.name] = ChangeType.REMOVED - elif (proto.document_delete or proto.document_remove): - _LOGGER.debug('on_snapshot: document change: DELETE/REMOVE') + elif proto.document_delete or proto.document_remove: + _LOGGER.debug("on_snapshot: document change: DELETE/REMOVE") name = (proto.document_delete or proto.document_remove).document self.change_map[name] = ChangeType.REMOVED - elif (proto.filter): - _LOGGER.debug('on_snapshot: filter update') + elif proto.filter: + _LOGGER.debug("on_snapshot: filter update") if proto.filter.count != self._current_size(): # We need to remove all the current results. self._reset_docs() @@ -512,8 +519,7 @@ def on_snapshot(self, proto): else: _LOGGER.debug("UNKNOWN TYPE. UHOH") - self.close(reason=ValueError( - 'Unknown listen response type: %s' % proto)) + self.close(reason=ValueError("Unknown listen response type: %s" % proto)) def push(self, read_time, next_resume_token): """ @@ -521,17 +527,11 @@ def push(self, read_time, next_resume_token): the user's callback. Clears the current changes on completion. """ deletes, adds, updates = Watch._extract_changes( - self.doc_map, - self.change_map, - read_time, - ) + self.doc_map, self.change_map, read_time + ) updated_tree, updated_map, appliedChanges = self._compute_snapshot( - self.doc_tree, - self.doc_map, - deletes, - adds, - updates, + self.doc_tree, self.doc_map, deletes, adds, updates ) if not self.has_pushed or len(appliedChanges): @@ -543,7 +543,7 @@ def push(self, read_time, next_resume_token): self._snapshot_callback( keys, appliedChanges, - datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc) + datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc), ) self.has_pushed = True @@ -573,32 +573,34 @@ def _extract_changes(doc_map, changes, read_time): return (deletes, adds, updates) - def _compute_snapshot(self, doc_tree, doc_map, delete_changes, add_changes, - update_changes): + def _compute_snapshot( + self, doc_tree, doc_map, delete_changes, add_changes, update_changes + ): updated_tree = doc_tree updated_map = doc_map - assert len(doc_tree) == len(doc_map), \ - 'The document tree and document map should have the same ' + \ - 'number of entries.' + assert len(doc_tree) == len(doc_map), ( + "The document tree and document map should have the same " + + "number of entries." + ) def delete_doc(name, updated_tree, updated_map): """ Applies a document delete to the document tree and document map. Returns the corresponding DocumentChange event. """ - assert name in updated_map, 'Document to delete does not exist' + assert name in updated_map, "Document to delete does not exist" old_document = updated_map.get(name) # TODO: If a document doesn't exist this raises IndexError. Handle? existing = updated_tree.find(old_document) old_index = existing.index updated_tree = updated_tree.remove(old_document) del updated_map[name] - return (DocumentChange(ChangeType.REMOVED, - old_document, - old_index, - -1), - updated_tree, updated_map) + return ( + DocumentChange(ChangeType.REMOVED, old_document, old_index, -1), + updated_tree, + updated_map, + ) def add_doc(new_document, updated_tree, updated_map): """ @@ -606,15 +608,15 @@ def add_doc(new_document, updated_tree, updated_map): Returns the corresponding DocumentChange event. """ name = new_document.reference._document_path - assert name not in updated_map, 'Document to add already exists' + assert name not in updated_map, "Document to add already exists" updated_tree = updated_tree.insert(new_document, None) new_index = updated_tree.find(new_document).index updated_map[name] = new_document - return (DocumentChange(ChangeType.ADDED, - new_document, - -1, - new_index), - updated_tree, updated_map) + return ( + DocumentChange(ChangeType.ADDED, new_document, -1, new_index), + updated_tree, + updated_map, + ) def modify_doc(new_document, updated_tree, updated_map): """ @@ -623,18 +625,25 @@ def modify_doc(new_document, updated_tree, updated_map): Returns the DocumentChange event for successful modifications. """ name = new_document.reference._document_path - assert name in updated_map, 'Document to modify does not exist' + assert name in updated_map, "Document to modify does not exist" old_document = updated_map.get(name) if old_document.update_time != new_document.update_time: remove_change, updated_tree, updated_map = delete_doc( - name, updated_tree, updated_map) + name, updated_tree, updated_map + ) add_change, updated_tree, updated_map = add_doc( - new_document, updated_tree, updated_map) - return (DocumentChange(ChangeType.MODIFIED, - new_document, - remove_change.old_index, - add_change.new_index), - updated_tree, updated_map) + new_document, updated_tree, updated_map + ) + return ( + DocumentChange( + ChangeType.MODIFIED, + new_document, + remove_change.old_index, + add_change.new_index, + ), + updated_tree, + updated_map, + ) return None, updated_tree, updated_map @@ -650,27 +659,31 @@ def modify_doc(new_document, updated_tree, updated_map): delete_changes = sorted(delete_changes, key=key) for name in delete_changes: change, updated_tree, updated_map = delete_doc( - name, updated_tree, updated_map) + name, updated_tree, updated_map + ) appliedChanges.append(change) add_changes = sorted(add_changes, key=key) - _LOGGER.debug('walk over add_changes') + _LOGGER.debug("walk over add_changes") for snapshot in add_changes: - _LOGGER.debug('in add_changes') + _LOGGER.debug("in add_changes") change, updated_tree, updated_map = add_doc( - snapshot, updated_tree, updated_map) + snapshot, updated_tree, updated_map + ) appliedChanges.append(change) update_changes = sorted(update_changes, key=key) for snapshot in update_changes: change, updated_tree, updated_map = modify_doc( - snapshot, updated_tree, updated_map) + snapshot, updated_tree, updated_map + ) if change is not None: appliedChanges.append(change) - assert len(updated_tree) == len(updated_map), \ - 'The update document ' + \ - 'tree and document map should have the same number of entries.' + assert len(updated_tree) == len(updated_map), ( + "The update document " + + "tree and document map should have the same number of entries." + ) return (updated_tree, updated_map, appliedChanges) def _affects_target(self, target_ids, current_id): @@ -684,9 +697,7 @@ def _current_size(self): Returns the current count of all documents, including the changes from the current changeMap. """ - deletes, adds, _ = Watch._extract_changes( - self.doc_map, self.change_map, None - ) + deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None) return len(self.doc_map) + len(adds) - len(deletes) def _reset_docs(self): diff --git a/firestore/tests/system.py b/firestore/tests/system.py index 137f2087b4fb..226b1bd9bfbb 100644 --- a/firestore/tests/system.py +++ b/firestore/tests/system.py @@ -34,17 +34,16 @@ from time import sleep -FIRESTORE_CREDS = os.environ.get('FIRESTORE_APPLICATION_CREDENTIALS') -FIRESTORE_PROJECT = os.environ.get('GCLOUD_PROJECT') -RANDOM_ID_REGEX = re.compile('^[a-zA-Z0-9]{20}$') -MISSING_DOCUMENT = 'No document to update: ' -DOCUMENT_EXISTS = 'Document already exists: ' +FIRESTORE_CREDS = os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS") +FIRESTORE_PROJECT = os.environ.get("GCLOUD_PROJECT") +RANDOM_ID_REGEX = re.compile("^[a-zA-Z0-9]{20}$") +MISSING_DOCUMENT = "No document to update: " +DOCUMENT_EXISTS = "Document already exists: " -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def client(): - credentials = service_account.Credentials.from_service_account_file( - FIRESTORE_CREDS) + credentials = service_account.Credentials.from_service_account_file(FIRESTORE_CREDS) project = FIRESTORE_PROJECT or credentials.project_id yield firestore.Client(project=project, credentials=credentials) @@ -60,19 +59,16 @@ def cleanup(): def test_create_document(client, cleanup): now = datetime.datetime.utcnow().replace(tzinfo=UTC) - document_id = 'shun' + unique_resource_id('-') - document = client.document('collek', document_id) + document_id = "shun" + unique_resource_id("-") + document = client.document("collek", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) data = { - 'now': firestore.SERVER_TIMESTAMP, - 'eenta-ger': 11, - 'bites': b'\xe2\x98\x83 \xe2\x9b\xb5', - 'also': { - 'nestednow': firestore.SERVER_TIMESTAMP, - 'quarter': 0.25, - }, + "now": firestore.SERVER_TIMESTAMP, + "eenta-ger": 11, + "bites": b"\xe2\x98\x83 \xe2\x9b\xb5", + "also": {"nestednow": firestore.SERVER_TIMESTAMP, "quarter": 0.25}, } write_result = document.create(data) updated = _pb_timestamp_to_datetime(write_result.update_time) @@ -86,7 +82,7 @@ def test_create_document(client, cleanup): # Verify the server times. snapshot = document.get() stored_data = snapshot.to_dict() - server_now = stored_data['now'] + server_now = stored_data["now"] delta = updated - server_now # NOTE: We could check the ``transform_results`` from the write result @@ -94,33 +90,28 @@ def test_create_document(client, cleanup): # we make sure the timestamps are close. assert 0.0 <= delta.total_seconds() < 5.0 expected_data = { - 'now': server_now, - 'eenta-ger': data['eenta-ger'], - 'bites': data['bites'], - 'also': { - 'nestednow': server_now, - 'quarter': data['also']['quarter'], - }, + "now": server_now, + "eenta-ger": data["eenta-ger"], + "bites": data["bites"], + "also": {"nestednow": server_now, "quarter": data["also"]["quarter"]}, } assert stored_data == expected_data def test_create_document_w_subcollection(client, cleanup): - document_id = 'shun' + unique_resource_id('-') - document = client.document('collek', document_id) + document_id = "shun" + unique_resource_id("-") + document = client.document("collek", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) - data = { - 'now': firestore.SERVER_TIMESTAMP, - } + data = {"now": firestore.SERVER_TIMESTAMP} document.create(data) - child_ids = ['child1', 'child2'] + child_ids = ["child1", "child2"] for child_id in child_ids: subcollection = document.collection(child_id) - _, subdoc = subcollection.add({'foo': 'bar'}) + _, subdoc = subcollection.add({"foo": "bar"}) cleanup(subdoc) children = document.collections() @@ -128,19 +119,18 @@ def test_create_document_w_subcollection(client, cleanup): def test_cannot_use_foreign_key(client, cleanup): - document_id = 'cannot' + unique_resource_id('-') - document = client.document('foreign-key', document_id) + document_id = "cannot" + unique_resource_id("-") + document = client.document("foreign-key", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) other_client = firestore.Client( - project='other-prahj', - credentials=client._credentials, - database='dee-bee') + project="other-prahj", credentials=client._credentials, database="dee-bee" + ) assert other_client._database_string != client._database_string - fake_doc = other_client.document('foo', 'bar') + fake_doc = other_client.document("foo", "bar") with pytest.raises(InvalidArgument): - document.create({'ref': fake_doc}) + document.create({"ref": fake_doc}) def assert_timestamp_less(timestamp_pb1, timestamp_pb2): @@ -150,15 +140,15 @@ def assert_timestamp_less(timestamp_pb1, timestamp_pb2): def test_no_document(client, cleanup): - document_id = 'no_document' + unique_resource_id('-') - document = client.document('abcde', document_id) + document_id = "no_document" + unique_resource_id("-") + document = client.document("abcde", document_id) snapshot = document.get() assert snapshot.to_dict() is None def test_document_set(client, cleanup): - document_id = 'for-set' + unique_resource_id('-') - document = client.document('i-did-it', document_id) + document_id = "for-set" + unique_resource_id("-") + document = client.document("i-did-it", document_id) # Add to clean-up before API request (in case ``set()`` fails). cleanup(document) @@ -167,7 +157,7 @@ def test_document_set(client, cleanup): assert snapshot.to_dict() is None # 1. Use ``create()`` to create the document. - data1 = {'foo': 88} + data1 = {"foo": 88} write_result1 = document.create(data1) snapshot1 = document.get() assert snapshot1.to_dict() == data1 @@ -176,7 +166,7 @@ def test_document_set(client, cleanup): assert snapshot1.update_time == write_result1.update_time # 2. Call ``set()`` again to overwrite. - data2 = {'bar': None} + data2 = {"bar": None} write_result2 = document.set(data2) snapshot2 = document.get() assert snapshot2.to_dict() == data2 @@ -186,38 +176,24 @@ def test_document_set(client, cleanup): def test_document_integer_field(client, cleanup): - document_id = 'for-set' + unique_resource_id('-') - document = client.document('i-did-it', document_id) + document_id = "for-set" + unique_resource_id("-") + document = client.document("i-did-it", document_id) # Add to clean-up before API request (in case ``set()`` fails). cleanup(document) - data1 = { - '1a': { - '2b': '3c', - 'ab': '5e'}, - '6f': { - '7g': '8h', - 'cd': '0j'} - } + data1 = {"1a": {"2b": "3c", "ab": "5e"}, "6f": {"7g": "8h", "cd": "0j"}} document.create(data1) - data2 = {'1a.ab': '4d', '6f.7g': '9h'} + data2 = {"1a.ab": "4d", "6f.7g": "9h"} document.update(data2) snapshot = document.get() - expected = { - '1a': { - '2b': '3c', - 'ab': '4d'}, - '6f': { - '7g': '9h', - 'cd': '0j'} - } + expected = {"1a": {"2b": "3c", "ab": "4d"}, "6f": {"7g": "9h", "cd": "0j"}} assert snapshot.to_dict() == expected def test_document_set_merge(client, cleanup): - document_id = 'for-set' + unique_resource_id('-') - document = client.document('i-did-it', document_id) + document_id = "for-set" + unique_resource_id("-") + document = client.document("i-did-it", document_id) # Add to clean-up before API request (in case ``set()`` fails). cleanup(document) @@ -226,9 +202,7 @@ def test_document_set_merge(client, cleanup): assert not snapshot.exists # 1. Use ``create()`` to create the document. - data1 = {'name': 'Sam', - 'address': {'city': 'SF', - 'state': 'CA'}} + data1 = {"name": "Sam", "address": {"city": "SF", "state": "CA"}} write_result1 = document.create(data1) snapshot1 = document.get() assert snapshot1.to_dict() == data1 @@ -237,20 +211,21 @@ def test_document_set_merge(client, cleanup): assert snapshot1.update_time == write_result1.update_time # 2. Call ``set()`` to merge - data2 = {'address': {'city': 'LA'}} + data2 = {"address": {"city": "LA"}} write_result2 = document.set(data2, merge=True) snapshot2 = document.get() - assert snapshot2.to_dict() == {'name': 'Sam', - 'address': {'city': 'LA', - 'state': 'CA'}} + assert snapshot2.to_dict() == { + "name": "Sam", + "address": {"city": "LA", "state": "CA"}, + } # Make sure the create time hasn't changed. assert snapshot2.create_time == snapshot1.create_time assert snapshot2.update_time == write_result2.update_time def test_document_set_w_int_field(client, cleanup): - document_id = 'set-int-key' + unique_resource_id('-') - document = client.document('i-did-it', document_id) + document_id = "set-int-key" + unique_resource_id("-") + document = client.document("i-did-it", document_id) # Add to clean-up before API request (in case ``set()`` fails). cleanup(document) @@ -259,11 +234,11 @@ def test_document_set_w_int_field(client, cleanup): assert not snapshot.exists # 1. Use ``create()`` to create the document. - before = {'testing': '1'} + before = {"testing": "1"} document.create(before) # 2. Replace using ``set()``. - data = {'14': {'status': 'active'}} + data = {"14": {"status": "active"}} document.set(data) # 3. Verify replaced data. @@ -273,8 +248,8 @@ def test_document_set_w_int_field(client, cleanup): def test_document_update_w_int_field(client, cleanup): # Attempt to reproduce #5489. - document_id = 'update-int-key' + unique_resource_id('-') - document = client.document('i-did-it', document_id) + document_id = "update-int-key" + unique_resource_id("-") + document = client.document("i-did-it", document_id) # Add to clean-up before API request (in case ``set()`` fails). cleanup(document) @@ -283,11 +258,11 @@ def test_document_update_w_int_field(client, cleanup): assert not snapshot.exists # 1. Use ``create()`` to create the document. - before = {'testing': '1'} + before = {"testing": "1"} document.create(before) # 2. Add values using ``update()``. - data = {'14': {'status': 'active'}} + data = {"14": {"status": "active"}} document.update(data) # 3. Verify updated data. @@ -298,79 +273,64 @@ def test_document_update_w_int_field(client, cleanup): def test_update_document(client, cleanup): - document_id = 'for-update' + unique_resource_id('-') - document = client.document('made', document_id) + document_id = "for-update" + unique_resource_id("-") + document = client.document("made", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) # 0. Try to update before the document exists. with pytest.raises(NotFound) as exc_info: - document.update({'not': 'there'}) + document.update({"not": "there"}) assert exc_info.value.message.startswith(MISSING_DOCUMENT) assert document_id in exc_info.value.message # 1. Try to update before the document exists (now with an option). with pytest.raises(NotFound) as exc_info: - document.update({'still': 'not-there'}) + document.update({"still": "not-there"}) assert exc_info.value.message.startswith(MISSING_DOCUMENT) assert document_id in exc_info.value.message # 2. Update and create the document (with an option). - data = { - 'foo': { - 'bar': 'baz', - }, - 'scoop': { - 'barn': 981, - }, - 'other': True, - } + data = {"foo": {"bar": "baz"}, "scoop": {"barn": 981}, "other": True} write_result2 = document.create(data) # 3. Send an update without a field path (no option). - field_updates3 = {'foo': {'quux': 800}} + field_updates3 = {"foo": {"quux": 800}} write_result3 = document.update(field_updates3) assert_timestamp_less(write_result2.update_time, write_result3.update_time) snapshot3 = document.get() expected3 = { - 'foo': field_updates3['foo'], - 'scoop': data['scoop'], - 'other': data['other'], + "foo": field_updates3["foo"], + "scoop": data["scoop"], + "other": data["other"], } assert snapshot3.to_dict() == expected3 # 4. Send an update **with** a field path and a delete and a valid # "last timestamp" option. - field_updates4 = { - 'scoop.silo': None, - 'other': firestore.DELETE_FIELD, - } + field_updates4 = {"scoop.silo": None, "other": firestore.DELETE_FIELD} option4 = client.write_option(last_update_time=snapshot3.update_time) write_result4 = document.update(field_updates4, option=option4) assert_timestamp_less(write_result3.update_time, write_result4.update_time) snapshot4 = document.get() expected4 = { - 'foo': field_updates3['foo'], - 'scoop': { - 'barn': data['scoop']['barn'], - 'silo': field_updates4['scoop.silo'], - }, + "foo": field_updates3["foo"], + "scoop": {"barn": data["scoop"]["barn"], "silo": field_updates4["scoop.silo"]}, } assert snapshot4.to_dict() == expected4 # 5. Call ``update()`` with invalid (in the past) "last timestamp" option. assert_timestamp_less(option4._last_update_time, snapshot4.update_time) with pytest.raises(FailedPrecondition) as exc_info: - document.update({'bad': 'time-past'}, option=option4) + document.update({"bad": "time-past"}, option=option4) # 6. Call ``update()`` with invalid (in future) "last timestamp" option. timestamp_pb = timestamp_pb2.Timestamp( - seconds=snapshot4.update_time.nanos + 3600, - nanos=snapshot4.update_time.nanos, + seconds=snapshot4.update_time.nanos + 3600, nanos=snapshot4.update_time.nanos ) option6 = client.write_option(last_update_time=timestamp_pb) with pytest.raises(FailedPrecondition) as exc_info: - document.update({'bad': 'time-future'}, option=option6) + document.update({"bad": "time-future"}, option=option6) def check_snapshot(snapshot, document, data, write_result): @@ -383,32 +343,23 @@ def check_snapshot(snapshot, document, data, write_result): def test_document_get(client, cleanup): now = datetime.datetime.utcnow().replace(tzinfo=UTC) - document_id = 'for-get' + unique_resource_id('-') - document = client.document('created', document_id) + document_id = "for-get" + unique_resource_id("-") + document = client.document("created", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) # First make sure it doesn't exist. assert not document.get().exists - ref_doc = client.document('top', 'middle1', 'middle2', 'bottom') + ref_doc = client.document("top", "middle1", "middle2", "bottom") data = { - 'turtle': 'power', - 'cheese': 19.5, - 'fire': 199099299, - 'referee': ref_doc, - 'gio': firestore.GeoPoint(45.5, 90.0), - 'deep': [ - u'some', - b'\xde\xad\xbe\xef', - ], - 'map': { - 'ice': True, - 'water': None, - 'vapor': { - 'deeper': now, - }, - }, + "turtle": "power", + "cheese": 19.5, + "fire": 199099299, + "referee": ref_doc, + "gio": firestore.GeoPoint(45.5, 90.0), + "deep": [u"some", b"\xde\xad\xbe\xef"], + "map": {"ice": True, "water": None, "vapor": {"deeper": now}}, } write_result = document.create(data) snapshot = document.get() @@ -416,17 +367,16 @@ def test_document_get(client, cleanup): def test_document_delete(client, cleanup): - document_id = 'deleted' + unique_resource_id('-') - document = client.document('here-to-be', document_id) + document_id = "deleted" + unique_resource_id("-") + document = client.document("here-to-be", document_id) # Add to clean-up before API request (in case ``create()`` fails). cleanup(document) - document.create({'not': 'much'}) + document.create({"not": "much"}) # 1. Call ``delete()`` with invalid (in the past) "last timestamp" option. snapshot1 = document.get() timestamp_pb = timestamp_pb2.Timestamp( - seconds=snapshot1.update_time.nanos - 3600, - nanos=snapshot1.update_time.nanos, + seconds=snapshot1.update_time.nanos - 3600, nanos=snapshot1.update_time.nanos ) option1 = client.write_option(last_update_time=timestamp_pb) with pytest.raises(FailedPrecondition): @@ -434,8 +384,7 @@ def test_document_delete(client, cleanup): # 2. Call ``delete()`` with invalid (in future) "last timestamp" option. timestamp_pb = timestamp_pb2.Timestamp( - seconds=snapshot1.update_time.nanos + 3600, - nanos=snapshot1.update_time.nanos, + seconds=snapshot1.update_time.nanos + 3600, nanos=snapshot1.update_time.nanos ) option2 = client.write_option(last_update_time=timestamp_pb) with pytest.raises(FailedPrecondition): @@ -450,12 +399,12 @@ def test_document_delete(client, cleanup): def test_collection_add(client, cleanup): - collection1 = client.collection('collek') - collection2 = client.collection('collek', 'shun', 'child') - explicit_doc_id = 'hula' + unique_resource_id('-') + collection1 = client.collection("collek") + collection2 = client.collection("collek", "shun", "child") + explicit_doc_id = "hula" + unique_resource_id("-") # Auto-ID at top-level. - data1 = {'foo': 'bar'} + data1 = {"foo": "bar"} update_time1, document_ref1 = collection1.add(data1) cleanup(document_ref1) snapshot1 = document_ref1.get() @@ -465,9 +414,8 @@ def test_collection_add(client, cleanup): assert RANDOM_ID_REGEX.match(document_ref1.id) # Explicit ID at top-level. - data2 = {'baz': 999} - update_time2, document_ref2 = collection1.add( - data2, document_id=explicit_doc_id) + data2 = {"baz": 999} + update_time2, document_ref2 = collection1.add(data2, document_id=explicit_doc_id) cleanup(document_ref2) snapshot2 = document_ref2.get() assert snapshot2.to_dict() == data2 @@ -476,7 +424,7 @@ def test_collection_add(client, cleanup): assert document_ref2.id == explicit_doc_id # Auto-ID for nested collection. - data3 = {'quux': b'\x00\x01\x02\x03'} + data3 = {"quux": b"\x00\x01\x02\x03"} update_time3, document_ref3 = collection2.add(data3) cleanup(document_ref3) snapshot3 = document_ref3.get() @@ -486,9 +434,8 @@ def test_collection_add(client, cleanup): assert RANDOM_ID_REGEX.match(document_ref3.id) # Explicit for nested collection. - data4 = {'kazaam': None, 'bad': False} - update_time4, document_ref4 = collection2.add( - data4, document_id=explicit_doc_id) + data4 = {"kazaam": None, "bad": False} + update_time4, document_ref4 = collection2.add(data4, document_id=explicit_doc_id) cleanup(document_ref4) snapshot4 = document_ref4.get() assert snapshot4.to_dict() == data4 @@ -498,8 +445,8 @@ def test_collection_add(client, cleanup): def test_query_get(client, cleanup): - sub_collection = 'child' + unique_resource_id('-') - collection = client.collection('collek', 'shun', sub_collection) + sub_collection = "child" + unique_resource_id("-") + collection = client.collection("collek", "shun", sub_collection) stored = {} num_vals = 5 @@ -507,12 +454,9 @@ def test_query_get(client, cleanup): for a_val in allowed_vals: for b_val in allowed_vals: document_data = { - 'a': a_val, - 'b': b_val, - 'stats': { - 'sum': a_val + b_val, - 'product': a_val * b_val, - }, + "a": a_val, + "b": b_val, + "stats": {"sum": a_val + b_val, "product": a_val * b_val}, } _, doc_ref = collection.add(document_data) # Add to clean-up. @@ -520,92 +464,77 @@ def test_query_get(client, cleanup): stored[doc_ref.id] = document_data # 0. Limit to snapshots where ``a==1``. - query0 = collection.where('a', '==', 1) - values0 = { - snapshot.id: snapshot.to_dict() - for snapshot in query0.get() - } + query0 = collection.where("a", "==", 1) + values0 = {snapshot.id: snapshot.to_dict() for snapshot in query0.get()} assert len(values0) == num_vals for key, value in six.iteritems(values0): assert stored[key] == value - assert value['a'] == 1 + assert value["a"] == 1 # 1. Order by ``b``. - query1 = collection.order_by('b', direction=query0.DESCENDING) - values1 = [ - (snapshot.id, snapshot.to_dict()) - for snapshot in query1.get() - ] + query1 = collection.order_by("b", direction=query0.DESCENDING) + values1 = [(snapshot.id, snapshot.to_dict()) for snapshot in query1.get()] assert len(values1) == len(stored) b_vals1 = [] for key, value in values1: assert stored[key] == value - b_vals1.append(value['b']) + b_vals1.append(value["b"]) # Make sure the ``b``-values are in DESCENDING order. assert sorted(b_vals1, reverse=True) == b_vals1 # 2. Limit to snapshots where ``stats.sum > 1`` (a field path). - query2 = collection.where('stats.sum', '>', 4) - values2 = { - snapshot.id: snapshot.to_dict() - for snapshot in query2.get() - } + query2 = collection.where("stats.sum", ">", 4) + values2 = {snapshot.id: snapshot.to_dict() for snapshot in query2.get()} assert len(values2) == 10 ab_pairs2 = set() for key, value in six.iteritems(values2): assert stored[key] == value - ab_pairs2.add((value['a'], value['b'])) - - expected_ab_pairs = set([ - (a_val, b_val) - for a_val in allowed_vals - for b_val in allowed_vals - if a_val + b_val > 4 - ]) + ab_pairs2.add((value["a"], value["b"])) + + expected_ab_pairs = set( + [ + (a_val, b_val) + for a_val in allowed_vals + for b_val in allowed_vals + if a_val + b_val > 4 + ] + ) assert expected_ab_pairs == ab_pairs2 # 3. Use a start and end cursor. - query3 = collection.order_by( - 'a').start_at({'a': num_vals - 2}).end_before({'a': num_vals - 1}) - values3 = [ - (snapshot.id, snapshot.to_dict()) - for snapshot in query3.get() - ] + query3 = ( + collection.order_by("a") + .start_at({"a": num_vals - 2}) + .end_before({"a": num_vals - 1}) + ) + values3 = [(snapshot.id, snapshot.to_dict()) for snapshot in query3.get()] assert len(values3) == num_vals for key, value in values3: assert stored[key] == value - assert value['a'] == num_vals - 2 - b_vals1.append(value['b']) + assert value["a"] == num_vals - 2 + b_vals1.append(value["b"]) # 4. Send a query with no results. - query4 = collection.where('b', '==', num_vals + 100) + query4 = collection.where("b", "==", num_vals + 100) values4 = list(query4.get()) assert len(values4) == 0 # 5. Select a subset of fields. - query5 = collection.where('b', '<=', 1) - query5 = query5.select(['a', 'stats.product']) - values5 = { - snapshot.id: snapshot.to_dict() - for snapshot in query5.get() - } + query5 = collection.where("b", "<=", 1) + query5 = query5.select(["a", "stats.product"]) + values5 = {snapshot.id: snapshot.to_dict() for snapshot in query5.get()} assert len(values5) == num_vals * 2 # a ANY, b in (0, 1) for key, value in six.iteritems(values5): expected = { - 'a': stored[key]['a'], - 'stats': { - 'product': stored[key]['stats']['product'], - }, + "a": stored[key]["a"], + "stats": {"product": stored[key]["stats"]["product"]}, } assert expected == value # 6. Add multiple filters via ``where()``. - query6 = collection.where('stats.product', '>', 5) - query6 = query6.where('stats.product', '<', 10) - values6 = { - snapshot.id: snapshot.to_dict() - for snapshot in query6.get() - } + query6 = collection.where("stats.product", ">", 5) + query6 = query6.where("stats.product", "<", 10) + values6 = {snapshot.id: snapshot.to_dict() for snapshot in query6.get()} matching_pairs = [ (a_val, b_val) @@ -616,42 +545,39 @@ def test_query_get(client, cleanup): assert len(values6) == len(matching_pairs) for key, value in six.iteritems(values6): assert stored[key] == value - pair = (value['a'], value['b']) + pair = (value["a"], value["b"]) assert pair in matching_pairs # 7. Skip the first three results, when ``b==2`` - query7 = collection.where('b', '==', 2) + query7 = collection.where("b", "==", 2) offset = 3 query7 = query7.offset(offset) - values7 = { - snapshot.id: snapshot.to_dict() - for snapshot in query7.get() - } + values7 = {snapshot.id: snapshot.to_dict() for snapshot in query7.get()} # NOTE: We don't check the ``a``-values, since that would require # an ``order_by('a')``, which combined with the ``b == 2`` # filter would necessitate an index. assert len(values7) == num_vals - offset for key, value in six.iteritems(values7): assert stored[key] == value - assert value['b'] == 2 + assert value["b"] == 2 def test_query_unary(client, cleanup): - collection_name = 'unary' + unique_resource_id('-') + collection_name = "unary" + unique_resource_id("-") collection = client.collection(collection_name) - field_name = 'foo' + field_name = "foo" _, document0 = collection.add({field_name: None}) # Add to clean-up. cleanup(document0) - nan_val = float('nan') + nan_val = float("nan") _, document1 = collection.add({field_name: nan_val}) # Add to clean-up. cleanup(document1) # 0. Query for null. - query0 = collection.where(field_name, '==', None) + query0 = collection.where(field_name, "==", None) values0 = list(query0.get()) assert len(values0) == 1 snapshot0 = values0[0] @@ -659,7 +585,7 @@ def test_query_unary(client, cleanup): assert snapshot0.to_dict() == {field_name: None} # 1. Query for a NAN. - query1 = collection.where(field_name, '==', nan_val) + query1 = collection.where(field_name, "==", nan_val) values1 = list(query1.get()) assert len(values1) == 1 snapshot1 = values1[0] @@ -670,43 +596,28 @@ def test_query_unary(client, cleanup): def test_get_all(client, cleanup): - collection_name = 'get-all' + unique_resource_id('-') + collection_name = "get-all" + unique_resource_id("-") - document1 = client.document(collection_name, 'a') - document2 = client.document(collection_name, 'b') - document3 = client.document(collection_name, 'c') + document1 = client.document(collection_name, "a") + document2 = client.document(collection_name, "b") + document3 = client.document(collection_name, "c") # Add to clean-up before API requests (in case ``create()`` fails). cleanup(document1) cleanup(document3) - data1 = { - 'a': { - 'b': 2, - 'c': 3, - }, - 'd': 4, - 'e': 0, - } + data1 = {"a": {"b": 2, "c": 3}, "d": 4, "e": 0} write_result1 = document1.create(data1) - data3 = { - 'a': { - 'b': 5, - 'c': 6, - }, - 'd': 7, - 'e': 100, - } + data3 = {"a": {"b": 5, "c": 6}, "d": 7, "e": 100} write_result3 = document3.create(data3) # 0. Get 3 unique documents, one of which is missing. - snapshots = list(client.get_all( - [document1, document2, document3])) + snapshots = list(client.get_all([document1, document2, document3])) assert snapshots[0].exists assert snapshots[1].exists assert not snapshots[2].exists snapshots = [snapshot for snapshot in snapshots if snapshot.exists] - id_attr = operator.attrgetter('id') + id_attr = operator.attrgetter("id") snapshots.sort(key=id_attr) snapshot1, snapshot3 = snapshots @@ -714,7 +625,7 @@ def test_get_all(client, cleanup): check_snapshot(snapshot3, document3, data3, write_result3) # 1. Get 2 colliding documents. - document1_also = client.document(collection_name, 'a') + document1_also = client.document(collection_name, "a") snapshots = list(client.get_all([document1, document1_also])) assert len(snapshots) == 1 @@ -722,51 +633,38 @@ def test_get_all(client, cleanup): check_snapshot(snapshots[0], document1_also, data1, write_result1) # 2. Use ``field_paths`` / projection in ``get_all()``. - snapshots = list(client.get_all( - [document1, document3], field_paths=['a.b', 'd'])) + snapshots = list(client.get_all([document1, document3], field_paths=["a.b", "d"])) assert len(snapshots) == 2 snapshots.sort(key=id_attr) snapshot1, snapshot3 = snapshots - restricted1 = { - 'a': {'b': data1['a']['b']}, - 'd': data1['d'], - } + restricted1 = {"a": {"b": data1["a"]["b"]}, "d": data1["d"]} check_snapshot(snapshot1, document1, restricted1, write_result1) - restricted3 = { - 'a': {'b': data3['a']['b']}, - 'd': data3['d'], - } + restricted3 = {"a": {"b": data3["a"]["b"]}, "d": data3["d"]} check_snapshot(snapshot3, document3, restricted3, write_result3) def test_batch(client, cleanup): - collection_name = 'batch' + unique_resource_id('-') + collection_name = "batch" + unique_resource_id("-") - document1 = client.document(collection_name, 'abc') - document2 = client.document(collection_name, 'mno') - document3 = client.document(collection_name, 'xyz') + document1 = client.document(collection_name, "abc") + document2 = client.document(collection_name, "mno") + document3 = client.document(collection_name, "xyz") # Add to clean-up before API request (in case ``create()`` fails). cleanup(document1) cleanup(document2) cleanup(document3) - data2 = { - 'some': { - 'deep': 'stuff', - 'and': 'here', - }, - 'water': 100.0, - } + data2 = {"some": {"deep": "stuff", "and": "here"}, "water": 100.0} document2.create(data2) - document3.create({'other': 19}) + document3.create({"other": 19}) batch = client.batch() - data1 = {'all': True} + data1 = {"all": True} batch.create(document1, data1) - new_value = 'there' - batch.update(document2, {'some.and': new_value}) + new_value = "there" + batch.update(document2, {"some.and": new_value}) batch.delete(document3) write_results = batch.commit() @@ -775,7 +673,7 @@ def test_batch(client, cleanup): write_result1 = write_results[0] write_result2 = write_results[1] write_result3 = write_results[2] - assert not write_result3.HasField('update_time') + assert not write_result3.HasField("update_time") snapshot1 = document1.get() assert snapshot1.to_dict() == data1 @@ -784,7 +682,7 @@ def test_batch(client, cleanup): snapshot2 = document2.get() assert snapshot2.to_dict() != data2 - data2['some']['and'] = new_value + data2["some"]["and"] = new_value assert snapshot2.to_dict() == data2 assert_timestamp_less(snapshot2.create_time, write_result2.update_time) assert snapshot2.update_time == write_result2.update_time @@ -794,15 +692,10 @@ def test_batch(client, cleanup): def test_watch_document(client, cleanup): db = client - doc_ref = db.collection(u'users').document( - u'alovelace' + unique_resource_id()) + doc_ref = db.collection(u"users").document(u"alovelace" + unique_resource_id()) # Initial setting - doc_ref.set({ - u'first': u'Jane', - u'last': u'Doe', - u'born': 1900 - }) + doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900}) sleep(1) @@ -815,11 +708,7 @@ def on_snapshot(docs, changes, read_time): doc_ref.on_snapshot(on_snapshot) # Alter document - doc_ref.set({ - u'first': u'Ada', - u'last': u'Lovelace', - u'born': 1815 - }) + doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815}) sleep(1) @@ -830,28 +719,24 @@ def on_snapshot(docs, changes, read_time): if on_snapshot.called_count != 1: raise AssertionError( - "Failed to get exactly one document change: count: " + - str(on_snapshot.called_count)) + "Failed to get exactly one document change: count: " + + str(on_snapshot.called_count) + ) def test_watch_collection(client, cleanup): db = client - doc_ref = db.collection(u'users').document( - u'alovelace' + unique_resource_id()) - collection_ref = db.collection(u'users') + doc_ref = db.collection(u"users").document(u"alovelace" + unique_resource_id()) + collection_ref = db.collection(u"users") # Initial setting - doc_ref.set({ - u'first': u'Jane', - u'last': u'Doe', - u'born': 1900 - }) + doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900}) # Setup listener def on_snapshot(docs, changes, read_time): on_snapshot.called_count += 1 for doc in [doc for doc in docs if doc.id == doc_ref.id]: - on_snapshot.born = doc.get('born') + on_snapshot.born = doc.get("born") on_snapshot.called_count = 0 on_snapshot.born = 0 @@ -861,11 +746,7 @@ def on_snapshot(docs, changes, read_time): # delay here so initial on_snapshot occurs and isn't combined with set sleep(1) - doc_ref.set({ - u'first': u'Ada', - u'last': u'Lovelace', - u'born': 1815 - }) + doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815}) for _ in range(10): if on_snapshot.born == 1815: @@ -874,22 +755,17 @@ def on_snapshot(docs, changes, read_time): if on_snapshot.born != 1815: raise AssertionError( - "Expected the last document update to update born: " + - str(on_snapshot.born)) + "Expected the last document update to update born: " + str(on_snapshot.born) + ) def test_watch_query(client, cleanup): db = client - doc_ref = db.collection(u'users').document( - u'alovelace' + unique_resource_id()) - query_ref = db.collection(u'users').where("first", "==", u'Ada') + doc_ref = db.collection(u"users").document(u"alovelace" + unique_resource_id()) + query_ref = db.collection(u"users").where("first", "==", u"Ada") # Initial setting - doc_ref.set({ - u'first': u'Jane', - u'last': u'Doe', - u'born': 1900 - }) + doc_ref.set({u"first": u"Jane", u"last": u"Doe", u"born": 1900}) sleep(1) @@ -898,7 +774,7 @@ def on_snapshot(docs, changes, read_time): on_snapshot.called_count += 1 # A snapshot should return the same thing as if a query ran now. - query_ran = db.collection(u'users').where("first", "==", u'Ada').get() + query_ran = db.collection(u"users").where("first", "==", u"Ada").get() assert len(docs) == len([i for i in query_ran]) on_snapshot.called_count = 0 @@ -906,11 +782,7 @@ def on_snapshot(docs, changes, read_time): query_ref.on_snapshot(on_snapshot) # Alter document - doc_ref.set({ - u'first': u'Ada', - u'last': u'Lovelace', - u'born': 1815 - }) + doc_ref.set({u"first": u"Ada", u"last": u"Lovelace", u"born": 1815}) for _ in range(10): if on_snapshot.called_count == 1: @@ -919,26 +791,25 @@ def on_snapshot(docs, changes, read_time): if on_snapshot.called_count != 1: raise AssertionError( - "Failed to get exactly one document change: count: " + - str(on_snapshot.called_count)) + "Failed to get exactly one document change: count: " + + str(on_snapshot.called_count) + ) def test_watch_query_order(client, cleanup): db = client unique_id = unique_resource_id() - doc_ref1 = db.collection(u'users').document( - u'alovelace' + unique_id) - doc_ref2 = db.collection(u'users').document( - u'asecondlovelace' + unique_id) - doc_ref3 = db.collection(u'users').document( - u'athirdlovelace' + unique_id) - doc_ref4 = db.collection(u'users').document( - u'afourthlovelace' + unique_id) - doc_ref5 = db.collection(u'users').document( - u'afifthlovelace' + unique_id) - - query_ref = db.collection(u'users').where( - "first", "==", u'Ada' + unique_id).order_by("last") + doc_ref1 = db.collection(u"users").document(u"alovelace" + unique_id) + doc_ref2 = db.collection(u"users").document(u"asecondlovelace" + unique_id) + doc_ref3 = db.collection(u"users").document(u"athirdlovelace" + unique_id) + doc_ref4 = db.collection(u"users").document(u"afourthlovelace" + unique_id) + doc_ref5 = db.collection(u"users").document(u"afifthlovelace" + unique_id) + + query_ref = ( + db.collection(u"users") + .where("first", "==", u"Ada" + unique_id) + .order_by("last") + ) # Setup listener def on_snapshot(docs, changes, read_time): @@ -952,10 +823,12 @@ def on_snapshot(docs, changes, read_time): # compare the order things are returned for snapshot, query in zip(docs, query_ran_results): - assert snapshot.get('last') == query.get( - 'last'), "expect the sort order to match, last" - assert snapshot.get('born') == query.get( - 'born'), "expect the sort order to match, born" + assert snapshot.get("last") == query.get( + "last" + ), "expect the sort order to match, last" + assert snapshot.get("born") == query.get( + "born" + ), "expect the sort order to match, born" on_snapshot.called_count += 1 on_snapshot.last_doc_count = len(docs) except Exception as e: @@ -968,31 +841,17 @@ def on_snapshot(docs, changes, read_time): sleep(1) - doc_ref1.set({ - u'first': u'Ada' + unique_id, - u'last': u'Lovelace', - u'born': 1815 - }) - doc_ref2.set({ - u'first': u'Ada' + unique_id, - u'last': u'SecondLovelace', - u'born': 1815 - }) - doc_ref3.set({ - u'first': u'Ada' + unique_id, - u'last': u'ThirdLovelace', - u'born': 1815 - }) - doc_ref4.set({ - u'first': u'Ada' + unique_id, - u'last': u'FourthLovelace', - u'born': 1815 - }) - doc_ref5.set({ - u'first': u'Ada' + unique_id, - u'last': u'lovelace', - u'born': 1815 - }) + doc_ref1.set({u"first": u"Ada" + unique_id, u"last": u"Lovelace", u"born": 1815}) + doc_ref2.set( + {u"first": u"Ada" + unique_id, u"last": u"SecondLovelace", u"born": 1815} + ) + doc_ref3.set( + {u"first": u"Ada" + unique_id, u"last": u"ThirdLovelace", u"born": 1815} + ) + doc_ref4.set( + {u"first": u"Ada" + unique_id, u"last": u"FourthLovelace", u"born": 1815} + ) + doc_ref5.set({u"first": u"Ada" + unique_id, u"last": u"lovelace", u"born": 1815}) for _ in range(10): if on_snapshot.last_doc_count == 5: @@ -1004,5 +863,5 @@ def on_snapshot(docs, changes, read_time): if on_snapshot.last_doc_count != 5: raise AssertionError( - "5 docs expected in snapshot method " + - str(on_snapshot.last_doc_count)) + "5 docs expected in snapshot method " + str(on_snapshot.last_doc_count) + ) diff --git a/firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py b/firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py index f3baab904b29..be503936280f 100644 --- a/firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py +++ b/firestore/tests/unit/gapic/v1beta1/test_firestore_client_v1beta1.py @@ -53,22 +53,15 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) - def unary_stream(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_stream(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) - def stream_stream(self, - method, - request_serializer=None, - response_deserializer=None): + def stream_stream( + self, method, request_serializer=None, response_deserializer=None + ): return MultiCallableStub(method, self) @@ -79,20 +72,21 @@ class CustomException(Exception): class TestFirestoreClient(object): def test_get_document(self): # Setup Expected Response - name_2 = 'name2-1052831874' - expected_response = {'name': name_2} + name_2 = "name2-1052831874" + expected_response = {"name": name_2} expected_response = document_pb2.Document(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + name = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) response = client.get_document(name) assert expected_response == response @@ -105,41 +99,39 @@ def test_get_document(self): def test_get_document_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + name = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) with pytest.raises(CustomException): client.get_document(name) def test_list_documents(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" documents_element = {} documents = [documents_element] - expected_response = { - 'next_page_token': next_page_token, - 'documents': documents - } - expected_response = firestore_pb2.ListDocumentsResponse( - **expected_response) + expected_response = {"next_page_token": next_page_token, "documents": documents} + expected_response = firestore_pb2.ListDocumentsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') - collection_id = 'collectionId-821242276' + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) + collection_id = "collectionId-821242276" paged_list_response = client.list_documents(parent, collection_id) resources = list(paged_list_response) @@ -149,21 +141,23 @@ def test_list_documents(self): assert len(channel.requests) == 1 expected_request = firestore_pb2.ListDocumentsRequest( - parent=parent, collection_id=collection_id) + parent=parent, collection_id=collection_id + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_documents_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') - collection_id = 'collectionId-821242276' + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) + collection_id = "collectionId-821242276" paged_list_response = client.list_documents(parent, collection_id) with pytest.raises(CustomException): @@ -171,26 +165,26 @@ def test_list_documents_exception(self): def test_create_document(self): # Setup Expected Response - name = 'name3373707' - expected_response = {'name': name} + name = "name3373707" + expected_response = {"name": name} expected_response = document_pb2.Document(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') - collection_id = 'collectionId-821242276' - document_id = 'documentId506676927' + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) + collection_id = "collectionId-821242276" + document_id = "documentId506676927" document = {} - response = client.create_document(parent, collection_id, document_id, - document) + response = client.create_document(parent, collection_id, document_id, document) assert expected_response == response assert len(channel.requests) == 1 @@ -198,38 +192,39 @@ def test_create_document(self): parent=parent, collection_id=collection_id, document_id=document_id, - document=document) + document=document, + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_create_document_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') - collection_id = 'collectionId-821242276' - document_id = 'documentId506676927' + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) + collection_id = "collectionId-821242276" + document_id = "documentId506676927" document = {} with pytest.raises(CustomException): - client.create_document(parent, collection_id, document_id, - document) + client.create_document(parent, collection_id, document_id, document) def test_update_document(self): # Setup Expected Response - name = 'name3373707' - expected_response = {'name': name} + name = "name3373707" + expected_response = {"name": name} expected_response = document_pb2.Document(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() @@ -243,14 +238,15 @@ def test_update_document(self): assert len(channel.requests) == 1 expected_request = firestore_pb2.UpdateDocumentRequest( - document=document, update_mask=update_mask) + document=document, update_mask=update_mask + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_document_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() @@ -264,14 +260,15 @@ def test_update_document_exception(self): def test_delete_document(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + name = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) client.delete_document(name) @@ -283,35 +280,35 @@ def test_delete_document(self): def test_delete_document_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + name = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) with pytest.raises(CustomException): client.delete_document(name) def test_batch_get_documents(self): # Setup Expected Response - missing = 'missing1069449574' - transaction = b'-34' - expected_response = {'missing': missing, 'transaction': transaction} - expected_response = firestore_pb2.BatchGetDocumentsResponse( - **expected_response) + missing = "missing1069449574" + transaction = b"-34" + expected_response = {"missing": missing, "transaction": transaction} + expected_response = firestore_pb2.BatchGetDocumentsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") documents = [] response = client.batch_get_documents(database, documents) @@ -321,20 +318,21 @@ def test_batch_get_documents(self): assert len(channel.requests) == 1 expected_request = firestore_pb2.BatchGetDocumentsRequest( - database=database, documents=documents) + database=database, documents=documents + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_batch_get_documents_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") documents = [] with pytest.raises(CustomException): @@ -342,40 +340,38 @@ def test_batch_get_documents_exception(self): def test_begin_transaction(self): # Setup Expected Response - transaction = b'-34' - expected_response = {'transaction': transaction} - expected_response = firestore_pb2.BeginTransactionResponse( - **expected_response) + transaction = b"-34" + expected_response = {"transaction": transaction} + expected_response = firestore_pb2.BeginTransactionResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") response = client.begin_transaction(database) assert expected_response == response assert len(channel.requests) == 1 - expected_request = firestore_pb2.BeginTransactionRequest( - database=database) + expected_request = firestore_pb2.BeginTransactionRequest(database=database) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_begin_transaction_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") with pytest.raises(CustomException): client.begin_transaction(database) @@ -387,34 +383,33 @@ def test_commit(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") writes = [] response = client.commit(database, writes) assert expected_response == response assert len(channel.requests) == 1 - expected_request = firestore_pb2.CommitRequest( - database=database, writes=writes) + expected_request = firestore_pb2.CommitRequest(database=database, writes=writes) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_commit_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') + database = client.database_root_path("[PROJECT]", "[DATABASE]") writes = [] with pytest.raises(CustomException): @@ -422,58 +417,60 @@ def test_commit_exception(self): def test_rollback(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - transaction = b'-34' + database = client.database_root_path("[PROJECT]", "[DATABASE]") + transaction = b"-34" client.rollback(database, transaction) assert len(channel.requests) == 1 expected_request = firestore_pb2.RollbackRequest( - database=database, transaction=transaction) + database=database, transaction=transaction + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_rollback_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - transaction = b'-34' + database = client.database_root_path("[PROJECT]", "[DATABASE]") + transaction = b"-34" with pytest.raises(CustomException): client.rollback(database, transaction) def test_run_query(self): # Setup Expected Response - transaction = b'-34' + transaction = b"-34" skipped_results = 880286183 expected_response = { - 'transaction': transaction, - 'skipped_results': skipped_results + "transaction": transaction, + "skipped_results": skipped_results, } expected_response = firestore_pb2.RunQueryResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) response = client.run_query(parent) resources = list(response) @@ -488,38 +485,36 @@ def test_run_query(self): def test_run_query_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) with pytest.raises(CustomException): client.run_query(parent) def test_write(self): # Setup Expected Response - stream_id = 'streamId-315624902' - stream_token = b'122' - expected_response = { - 'stream_id': stream_id, - 'stream_token': stream_token - } + stream_id = "streamId-315624902" + stream_token = b"122" + expected_response = {"stream_id": stream_id, "stream_token": stream_token} expected_response = firestore_pb2.WriteResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - request = {'database': database} + database = client.database_root_path("[PROJECT]", "[DATABASE]") + request = {"database": database} request = firestore_pb2.WriteRequest(**request) requests = [request] @@ -537,14 +532,14 @@ def test_write(self): def test_write_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - request = {'database': database} + database = client.database_root_path("[PROJECT]", "[DATABASE]") + request = {"database": database} request = firestore_pb2.WriteRequest(**request) requests = [request] @@ -559,14 +554,14 @@ def test_listen(self): # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - request = {'database': database} + database = client.database_root_path("[PROJECT]", "[DATABASE]") + request = {"database": database} request = firestore_pb2.ListenRequest(**request) requests = [request] @@ -584,14 +579,14 @@ def test_listen(self): def test_listen_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - database = client.database_root_path('[PROJECT]', '[DATABASE]') - request = {'database': database} + database = client.database_root_path("[PROJECT]", "[DATABASE]") + request = {"database": database} request = firestore_pb2.ListenRequest(**request) requests = [request] @@ -601,26 +596,26 @@ def test_listen_exception(self): def test_list_collection_ids(self): # Setup Expected Response - next_page_token = '' - collection_ids_element = 'collectionIdsElement1368994900' + next_page_token = "" + collection_ids_element = "collectionIdsElement1368994900" collection_ids = [collection_ids_element] expected_response = { - 'next_page_token': next_page_token, - 'collection_ids': collection_ids + "next_page_token": next_page_token, + "collection_ids": collection_ids, } - expected_response = firestore_pb2.ListCollectionIdsResponse( - **expected_response) + expected_response = firestore_pb2.ListCollectionIdsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup Request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) paged_list_response = client.list_collection_ids(parent) resources = list(paged_list_response) @@ -629,21 +624,21 @@ def test_list_collection_ids(self): assert expected_response.collection_ids[0] == resources[0] assert len(channel.requests) == 1 - expected_request = firestore_pb2.ListCollectionIdsRequest( - parent=parent) + expected_request = firestore_pb2.ListCollectionIdsRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_collection_ids_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = firestore_client.FirestoreClient() # Setup request - parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', - '[ANY_PATH]') + parent = client.any_path_path( + "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]" + ) paged_list_response = client.list_collection_ids(parent) with pytest.raises(CustomException): diff --git a/firestore/tests/unit/test__helpers.py b/firestore/tests/unit/test__helpers.py index 712bf745cb3e..b30cb4d370ff 100644 --- a/firestore/tests/unit/test__helpers.py +++ b/firestore/tests/unit/test__helpers.py @@ -21,7 +21,6 @@ class TestGeoPoint(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1._helpers import GeoPoint @@ -69,7 +68,7 @@ def test___ne__same_value(self): lng = 20.03125 geo_pt1 = self._make_one(lat, lng) geo_pt2 = self._make_one(lat, lng) - comparison_val = (geo_pt1 != geo_pt2) + comparison_val = geo_pt1 != geo_pt2 self.assertFalse(comparison_val) def test___ne__(self): @@ -87,10 +86,10 @@ def test___ne__type_differ(self): class TestFieldPath(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1._helpers import FieldPath + return FieldPath def _make_one(self, *args): @@ -99,118 +98,118 @@ def _make_one(self, *args): def test_ctor_w_none_in_part(self): with self.assertRaises(ValueError): - self._make_one('a', None, 'b') + self._make_one("a", None, "b") def test_ctor_w_empty_string_in_part(self): with self.assertRaises(ValueError): - self._make_one('a', '', 'b') + self._make_one("a", "", "b") def test_ctor_w_integer_part(self): with self.assertRaises(ValueError): - self._make_one('a', 3, 'b') + self._make_one("a", 3, "b") def test_ctor_w_list(self): - parts = ['a', 'b', 'c'] + parts = ["a", "b", "c"] with self.assertRaises(ValueError): self._make_one(parts) def test_ctor_w_tuple(self): - parts = ('a', 'b', 'c') + parts = ("a", "b", "c") with self.assertRaises(ValueError): self._make_one(parts) def test_ctor_w_iterable_part(self): with self.assertRaises(ValueError): - self._make_one('a', ['a'], 'b') + self._make_one("a", ["a"], "b") def test_constructor_w_single_part(self): - field_path = self._make_one('a') - self.assertEqual(field_path.parts, ('a',)) + field_path = self._make_one("a") + self.assertEqual(field_path.parts, ("a",)) def test_constructor_w_multiple_parts(self): - field_path = self._make_one('a', 'b', 'c') - self.assertEqual(field_path.parts, ('a', 'b', 'c')) + field_path = self._make_one("a", "b", "c") + self.assertEqual(field_path.parts, ("a", "b", "c")) def test_ctor_w_invalid_chars_in_part(self): - invalid_parts = ('~', '*', '/', '[', ']', '.') + invalid_parts = ("~", "*", "/", "[", "]", ".") for invalid_part in invalid_parts: field_path = self._make_one(invalid_part) - self.assertEqual(field_path.parts, (invalid_part, )) + self.assertEqual(field_path.parts, (invalid_part,)) def test_ctor_w_double_dots(self): - field_path = self._make_one('a..b') - self.assertEqual(field_path.parts, ('a..b',)) + field_path = self._make_one("a..b") + self.assertEqual(field_path.parts, ("a..b",)) def test_ctor_w_unicode(self): - field_path = self._make_one('一', '二', '三') - self.assertEqual(field_path.parts, ('一', '二', '三')) + field_path = self._make_one("一", "二", "三") + self.assertEqual(field_path.parts, ("一", "二", "三")) def test_from_string_w_empty_string(self): - parts = '' + parts = "" with self.assertRaises(ValueError): self._get_target_class().from_string(parts) def test_from_string_w_empty_field_name(self): - parts = 'a..b' + parts = "a..b" with self.assertRaises(ValueError): self._get_target_class().from_string(parts) def test_from_string_w_invalid_chars(self): - invalid_parts = ('~', '*', '/', '[', ']', '.') + invalid_parts = ("~", "*", "/", "[", "]", ".") for invalid_part in invalid_parts: with self.assertRaises(ValueError): self._get_target_class().from_string(invalid_part) def test_from_string_w_ascii_single(self): - field_path = self._get_target_class().from_string('a') - self.assertEqual(field_path.parts, ('a',)) + field_path = self._get_target_class().from_string("a") + self.assertEqual(field_path.parts, ("a",)) def test_from_string_w_ascii_dotted(self): - field_path = self._get_target_class().from_string('a.b.c') - self.assertEqual(field_path.parts, ('a', 'b', 'c')) + field_path = self._get_target_class().from_string("a.b.c") + self.assertEqual(field_path.parts, ("a", "b", "c")) def test_from_string_w_non_ascii_dotted(self): - field_path = self._get_target_class().from_string('a.一') - self.assertEqual(field_path.parts, ('a', '一')) + field_path = self._get_target_class().from_string("a.一") + self.assertEqual(field_path.parts, ("a", "一")) def test___hash___w_single_part(self): - field_path = self._make_one('a') - self.assertEqual(hash(field_path), hash('a')) + field_path = self._make_one("a") + self.assertEqual(hash(field_path), hash("a")) def test___hash___w_multiple_parts(self): - field_path = self._make_one('a', 'b') - self.assertEqual(hash(field_path), hash('a.b')) + field_path = self._make_one("a", "b") + self.assertEqual(hash(field_path), hash("a.b")) def test___hash___w_escaped_parts(self): - field_path = self._make_one('a', '3') - self.assertEqual(hash(field_path), hash('a.`3`')) + field_path = self._make_one("a", "3") + self.assertEqual(hash(field_path), hash("a.`3`")) def test___eq___w_matching_type(self): - field_path = self._make_one('a', 'b') - string_path = self._get_target_class().from_string('a.b') + field_path = self._make_one("a", "b") + string_path = self._get_target_class().from_string("a.b") self.assertEqual(field_path, string_path) def test___eq___w_non_matching_type(self): - field_path = self._make_one('a', 'c') + field_path = self._make_one("a", "c") other = mock.Mock() - other.parts = 'a', 'b' + other.parts = "a", "b" self.assertNotEqual(field_path, other) def test___lt___w_matching_type(self): - field_path = self._make_one('a', 'b') - string_path = self._get_target_class().from_string('a.c') + field_path = self._make_one("a", "b") + string_path = self._get_target_class().from_string("a.c") self.assertTrue(field_path < string_path) def test___lt___w_non_matching_type(self): - field_path = self._make_one('a', 'b') + field_path = self._make_one("a", "b") other = object() # Python 2 doesn't raise TypeError here, but Python3 does. self.assertIs(field_path.__lt__(other), NotImplemented) def test___add__(self): - path1 = 'a123', 'b456' - path2 = 'c789', 'd012' - path3 = 'c789.d012' + path1 = "a123", "b456" + path2 = "c789", "d012" + path3 = "c789.d012" field_path1 = self._make_one(*path1) field_path1_string = self._make_one(*path1) field_path2 = self._make_one(*path2) @@ -225,81 +224,82 @@ def test___add__(self): field_path1 + 305 def test_eq_or_parent_same(self): - field_path = self._make_one('a', 'b') - other = self._make_one('a', 'b') + field_path = self._make_one("a", "b") + other = self._make_one("a", "b") self.assertTrue(field_path.eq_or_parent(other)) def test_eq_or_parent_prefix(self): - field_path = self._make_one('a', 'b') - other = self._make_one('a', 'b', 'c') + field_path = self._make_one("a", "b") + other = self._make_one("a", "b", "c") self.assertTrue(field_path.eq_or_parent(other)) self.assertTrue(other.eq_or_parent(field_path)) def test_eq_or_parent_no_prefix(self): - field_path = self._make_one('a', 'b') - other = self._make_one('d', 'e', 'f') + field_path = self._make_one("a", "b") + other = self._make_one("d", "e", "f") self.assertFalse(field_path.eq_or_parent(other)) self.assertFalse(other.eq_or_parent(field_path)) def test_to_api_repr_a(self): - parts = 'a' + parts = "a" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), 'a') + self.assertEqual(field_path.to_api_repr(), "a") def test_to_api_repr_backtick(self): - parts = '`' + parts = "`" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r'`\``') + self.assertEqual(field_path.to_api_repr(), r"`\``") def test_to_api_repr_dot(self): - parts = '.' + parts = "." field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), '`.`') + self.assertEqual(field_path.to_api_repr(), "`.`") def test_to_api_repr_slash(self): - parts = '\\' + parts = "\\" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r'`\\`') + self.assertEqual(field_path.to_api_repr(), r"`\\`") def test_to_api_repr_double_slash(self): - parts = r'\\' + parts = r"\\" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r'`\\\\`') + self.assertEqual(field_path.to_api_repr(), r"`\\\\`") def test_to_api_repr_underscore(self): - parts = '_33132' + parts = "_33132" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), '_33132') + self.assertEqual(field_path.to_api_repr(), "_33132") def test_to_api_repr_unicode_non_simple(self): - parts = '一' + parts = "一" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), '`一`') + self.assertEqual(field_path.to_api_repr(), "`一`") def test_to_api_repr_number_non_simple(self): - parts = '03' + parts = "03" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), '`03`') + self.assertEqual(field_path.to_api_repr(), "`03`") def test_to_api_repr_simple_with_dot(self): - field_path = self._make_one('a.b') - self.assertEqual(field_path.to_api_repr(), '`a.b`') + field_path = self._make_one("a.b") + self.assertEqual(field_path.to_api_repr(), "`a.b`") def test_to_api_repr_non_simple_with_dot(self): - parts = 'a.一' + parts = "a.一" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), '`a.一`') + self.assertEqual(field_path.to_api_repr(), "`a.一`") def test_to_api_repr_simple(self): - parts = 'a0332432' + parts = "a0332432" field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), 'a0332432') + self.assertEqual(field_path.to_api_repr(), "a0332432") def test_to_api_repr_chain(self): - parts = 'a', '`', '\\', '_3', '03', 'a03', '\\\\', 'a0332432', '一' + parts = "a", "`", "\\", "_3", "03", "a03", "\\\\", "a0332432", "一" field_path = self._make_one(*parts) - self.assertEqual(field_path.to_api_repr(), - r'a.`\``.`\\`._3.`03`.a03.`\\\\`.a0332432.`一`') + self.assertEqual( + field_path.to_api_repr(), r"a.`\``.`\\`._3.`03`.a03.`\\\\`.a0332432.`一`" + ) def test_lineage_empty(self): field_path = self._make_one() @@ -307,21 +307,17 @@ def test_lineage_empty(self): self.assertEqual(field_path.lineage(), expected) def test_lineage_single(self): - field_path = self._make_one('a') + field_path = self._make_one("a") expected = set() self.assertEqual(field_path.lineage(), expected) def test_lineage_nested(self): - field_path = self._make_one('a', 'b', 'c') - expected = set([ - self._make_one('a'), - self._make_one('a', 'b'), - ]) + field_path = self._make_one("a", "b", "c") + expected = set([self._make_one("a"), self._make_one("a", "b")]) self.assertEqual(field_path.lineage(), expected) class Test_verify_path(unittest.TestCase): - @staticmethod def _call_fut(path, is_collection): from google.cloud.firestore_v1beta1._helpers import verify_path @@ -336,40 +332,39 @@ def test_empty(self): self._call_fut(path, False) def test_wrong_length_collection(self): - path = ('foo', 'bar') + path = ("foo", "bar") with self.assertRaises(ValueError): self._call_fut(path, True) def test_wrong_length_document(self): - path = ('Kind',) + path = ("Kind",) with self.assertRaises(ValueError): self._call_fut(path, False) def test_wrong_type_collection(self): - path = (99, 'ninety-nine', 'zap') + path = (99, "ninety-nine", "zap") with self.assertRaises(ValueError): self._call_fut(path, True) def test_wrong_type_document(self): - path = ('Users', 'Ada', 'Candy', {}) + path = ("Users", "Ada", "Candy", {}) with self.assertRaises(ValueError): self._call_fut(path, False) def test_success_collection(self): - path = ('Computer', 'Magic', 'Win') + path = ("Computer", "Magic", "Win") ret_val = self._call_fut(path, True) # NOTE: We are just checking that it didn't fail. self.assertIsNone(ret_val) def test_success_document(self): - path = ('Tokenizer', 'Seventeen', 'Cheese', 'Burger') + path = ("Tokenizer", "Seventeen", "Cheese", "Burger") ret_val = self._call_fut(path, False) # NOTE: We are just checking that it didn't fail. self.assertIsNone(ret_val) class Test_encode_value(unittest.TestCase): - @staticmethod def _call_fut(value): from google.cloud.firestore_v1beta1._helpers import encode_value @@ -407,25 +402,21 @@ def test_datetime(self): dt_nanos = 458816000 # Make sure precision is valid in microseconds too. self.assertEqual(dt_nanos % 1000, 0) - dt_val = datetime.datetime.utcfromtimestamp( - dt_seconds + 1e-9 * dt_nanos) + dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) result = self._call_fut(dt_val) - timestamp_pb = timestamp_pb2.Timestamp( - seconds=dt_seconds, - nanos=dt_nanos, - ) + timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) expected = _value_pb(timestamp_value=timestamp_pb) self.assertEqual(result, expected) def test_string(self): - value = u'\u2018left quote, right quote\u2019' + value = u"\u2018left quote, right quote\u2019" result = self._call_fut(value) expected = _value_pb(string_value=value) self.assertEqual(result, expected) def test_bytes(self): - value = b'\xe3\xf2\xff\x00' + value = b"\xe3\xf2\xff\x00" result = self._call_fut(value) expected = _value_pb(bytes_value=value) self.assertEqual(result, expected) @@ -433,7 +424,7 @@ def test_bytes(self): def test_reference_value(self): client = _make_client() - value = client.document('my', 'friend') + value = client.document("my", "friend") result = self._call_fut(value) expected = _value_pb(reference_value=value._document_path) self.assertEqual(result, expected) @@ -447,35 +438,31 @@ def test_geo_point(self): self.assertEqual(result, expected) def test_array(self): - from google.cloud.firestore_v1beta1.proto.document_pb2 import ( - ArrayValue) - - result = self._call_fut([ - 99, - True, - 118.5 - ]) - - array_pb = ArrayValue(values=[ - _value_pb(integer_value=99), - _value_pb(boolean_value=True), - _value_pb(double_value=118.5), - ]) + from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue + + result = self._call_fut([99, True, 118.5]) + + array_pb = ArrayValue( + values=[ + _value_pb(integer_value=99), + _value_pb(boolean_value=True), + _value_pb(double_value=118.5), + ] + ) expected = _value_pb(array_value=array_pb) self.assertEqual(result, expected) def test_map(self): from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue - result = self._call_fut({ - 'abc': 285, - 'def': b'piglatin', - }) + result = self._call_fut({"abc": 285, "def": b"piglatin"}) - map_pb = MapValue(fields={ - 'abc': _value_pb(integer_value=285), - 'def': _value_pb(bytes_value=b'piglatin'), - }) + map_pb = MapValue( + fields={ + "abc": _value_pb(integer_value=285), + "def": _value_pb(bytes_value=b"piglatin"), + } + ) expected = _value_pb(map_value=map_pb) self.assertEqual(result, expected) @@ -486,7 +473,6 @@ def test_bad_type(self): class Test_encode_dict(unittest.TestCase): - @staticmethod def _call_fut(values_dict): from google.cloud.firestore_v1beta1._helpers import encode_dict @@ -496,76 +482,75 @@ def _call_fut(values_dict): def test_many_types(self): from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.proto.document_pb2 import ( - ArrayValue) + from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue dt_seconds = 1497397225 dt_nanos = 465964000 # Make sure precision is valid in microseconds too. self.assertEqual(dt_nanos % 1000, 0) - dt_val = datetime.datetime.utcfromtimestamp( - dt_seconds + 1e-9 * dt_nanos) + dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) client = _make_client() - document = client.document('most', 'adjective', 'thing', 'here') + document = client.document("most", "adjective", "thing", "here") values_dict = { - 'foo': None, - 'bar': True, - 'baz': 981, - 'quux': 2.875, - 'quuz': dt_val, - 'corge': u'\N{snowman}', - 'grault': b'\xe2\x98\x83', - 'wibble': document, - 'garply': [ - u'fork', - 4.0, - ], - 'waldo': { - 'fred': u'zap', - 'thud': False, - }, + "foo": None, + "bar": True, + "baz": 981, + "quux": 2.875, + "quuz": dt_val, + "corge": u"\N{snowman}", + "grault": b"\xe2\x98\x83", + "wibble": document, + "garply": [u"fork", 4.0], + "waldo": {"fred": u"zap", "thud": False}, } encoded_dict = self._call_fut(values_dict) expected_dict = { - 'foo': _value_pb(null_value=struct_pb2.NULL_VALUE), - 'bar': _value_pb(boolean_value=True), - 'baz': _value_pb(integer_value=981), - 'quux': _value_pb(double_value=2.875), - 'quuz': _value_pb(timestamp_value=timestamp_pb2.Timestamp( - seconds=dt_seconds, - nanos=dt_nanos, - )), - 'corge': _value_pb(string_value=u'\N{snowman}'), - 'grault': _value_pb(bytes_value=b'\xe2\x98\x83'), - 'wibble': _value_pb(reference_value=document._document_path), - 'garply': _value_pb(array_value=ArrayValue(values=[ - _value_pb(string_value=u'fork'), - _value_pb(double_value=4.0), - ])), - 'waldo': _value_pb(map_value=MapValue(fields={ - 'fred': _value_pb(string_value=u'zap'), - 'thud': _value_pb(boolean_value=False), - })), + "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), + "bar": _value_pb(boolean_value=True), + "baz": _value_pb(integer_value=981), + "quux": _value_pb(double_value=2.875), + "quuz": _value_pb( + timestamp_value=timestamp_pb2.Timestamp( + seconds=dt_seconds, nanos=dt_nanos + ) + ), + "corge": _value_pb(string_value=u"\N{snowman}"), + "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), + "wibble": _value_pb(reference_value=document._document_path), + "garply": _value_pb( + array_value=ArrayValue( + values=[ + _value_pb(string_value=u"fork"), + _value_pb(double_value=4.0), + ] + ) + ), + "waldo": _value_pb( + map_value=MapValue( + fields={ + "fred": _value_pb(string_value=u"zap"), + "thud": _value_pb(boolean_value=False), + } + ) + ), } self.assertEqual(encoded_dict, expected_dict) class Test_reference_value_to_document(unittest.TestCase): - @staticmethod def _call_fut(reference_value, client): - from google.cloud.firestore_v1beta1._helpers import ( - reference_value_to_document) + from google.cloud.firestore_v1beta1._helpers import reference_value_to_document return reference_value_to_document(reference_value, client) def test_bad_format(self): from google.cloud.firestore_v1beta1._helpers import BAD_REFERENCE_ERROR - reference_value = 'not/the/right/format' + reference_value = "not/the/right/format" with self.assertRaises(ValueError) as exc_info: self._call_fut(reference_value, None) @@ -576,7 +561,7 @@ def test_same_client(self): from google.cloud.firestore_v1beta1.document import DocumentReference client = _make_client() - document = client.document('that', 'this') + document = client.document("that", "this") reference_value = document._document_path new_document = self._call_fut(reference_value, client) @@ -589,21 +574,19 @@ def test_same_client(self): def test_different_client(self): from google.cloud.firestore_v1beta1._helpers import WRONG_APP_REFERENCE - client1 = _make_client(project='kirk') - document = client1.document('tin', 'foil') + client1 = _make_client(project="kirk") + document = client1.document("tin", "foil") reference_value = document._document_path - client2 = _make_client(project='spock') + client2 = _make_client(project="spock") with self.assertRaises(ValueError) as exc_info: self._call_fut(reference_value, client2) - err_msg = WRONG_APP_REFERENCE.format( - reference_value, client2._database_string) + err_msg = WRONG_APP_REFERENCE.format(reference_value, client2._database_string) self.assertEqual(exc_info.exception.args, (err_msg,)) class Test_decode_value(unittest.TestCase): - @staticmethod def _call_fut(value, client=mock.sentinel.client): from google.cloud.firestore_v1beta1._helpers import decode_value @@ -632,8 +615,9 @@ def test_float(self): value = _value_pb(double_value=float_val) self.assertEqual(self._call_fut(value), float_val) - @unittest.skipIf((3,) <= sys.version_info < (3, 4, 4), - 'known datetime bug (bpo-23517) in Python') + @unittest.skipIf( + (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" + ) def test_datetime(self): from google.protobuf import timestamp_pb2 from google.cloud._helpers import UTC @@ -643,23 +627,21 @@ def test_datetime(self): # Make sure precision is valid in microseconds too. self.assertEqual(dt_nanos % 1000, 0) - timestamp_pb = timestamp_pb2.Timestamp( - seconds=dt_seconds, - nanos=dt_nanos, - ) + timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) value = _value_pb(timestamp_value=timestamp_pb) expected_dt_val = datetime.datetime.utcfromtimestamp( - dt_seconds + 1e-9 * dt_nanos).replace(tzinfo=UTC) + dt_seconds + 1e-9 * dt_nanos + ).replace(tzinfo=UTC) self.assertEqual(self._call_fut(value), expected_dt_val) def test_unicode(self): - unicode_val = u'zorgon' + unicode_val = u"zorgon" value = _value_pb(string_value=unicode_val) self.assertEqual(self._call_fut(value), unicode_val) def test_bytes(self): - bytes_val = b'abc\x80' + bytes_val = b"abc\x80" value = _value_pb(bytes_value=bytes_val) self.assertEqual(self._call_fut(value), bytes_val) @@ -667,7 +649,7 @@ def test_reference(self): from google.cloud.firestore_v1beta1.document import DocumentReference client = _make_client() - path = (u'then', u'there-was-one') + path = (u"then", u"there-was-one") document = client.document(*path) ref_string = document._document_path value = _value_pb(reference_value=ref_string) @@ -689,9 +671,8 @@ def test_array(self): sub_value1 = _value_pb(boolean_value=True) sub_value2 = _value_pb(double_value=14.1396484375) - sub_value3 = _value_pb(bytes_value=b'\xde\xad\xbe\xef') - array_pb = document_pb2.ArrayValue( - values=[sub_value1, sub_value2, sub_value3]) + sub_value3 = _value_pb(bytes_value=b"\xde\xad\xbe\xef") + array_pb = document_pb2.ArrayValue(values=[sub_value1, sub_value2, sub_value3]) value = _value_pb(array_value=array_pb) expected = [ @@ -705,16 +686,15 @@ def test_map(self): from google.cloud.firestore_v1beta1.proto import document_pb2 sub_value1 = _value_pb(integer_value=187680) - sub_value2 = _value_pb(string_value=u'how low can you go?') - map_pb = document_pb2.MapValue(fields={ - 'first': sub_value1, - 'second': sub_value2, - }) + sub_value2 = _value_pb(string_value=u"how low can you go?") + map_pb = document_pb2.MapValue( + fields={"first": sub_value1, "second": sub_value2} + ) value = _value_pb(map_value=map_pb) expected = { - 'first': sub_value1.integer_value, - 'second': sub_value2.string_value, + "first": sub_value1.integer_value, + "second": sub_value2.string_value, } self.assertEqual(self._call_fut(value), expected) @@ -722,31 +702,34 @@ def test_nested_map(self): from google.cloud.firestore_v1beta1.proto import document_pb2 actual_value1 = 1009876 - actual_value2 = u'hey you guys' + actual_value2 = u"hey you guys" actual_value3 = 90.875 - map_pb1 = document_pb2.MapValue(fields={ - 'lowest': _value_pb(integer_value=actual_value1), - 'aside': _value_pb(string_value=actual_value2), - }) - map_pb2 = document_pb2.MapValue(fields={ - 'middle': _value_pb(map_value=map_pb1), - 'aside': _value_pb(boolean_value=True), - }) - map_pb3 = document_pb2.MapValue(fields={ - 'highest': _value_pb(map_value=map_pb2), - 'aside': _value_pb(double_value=actual_value3), - }) + map_pb1 = document_pb2.MapValue( + fields={ + "lowest": _value_pb(integer_value=actual_value1), + "aside": _value_pb(string_value=actual_value2), + } + ) + map_pb2 = document_pb2.MapValue( + fields={ + "middle": _value_pb(map_value=map_pb1), + "aside": _value_pb(boolean_value=True), + } + ) + map_pb3 = document_pb2.MapValue( + fields={ + "highest": _value_pb(map_value=map_pb2), + "aside": _value_pb(double_value=actual_value3), + } + ) value = _value_pb(map_value=map_pb3) expected = { - 'highest': { - 'middle': { - 'lowest': actual_value1, - 'aside': actual_value2, - }, - 'aside': True, + "highest": { + "middle": {"lowest": actual_value1, "aside": actual_value2}, + "aside": True, }, - 'aside': actual_value3, + "aside": actual_value3, } self.assertEqual(self._call_fut(value), expected) @@ -755,30 +738,29 @@ def test_unset_value_type(self): self._call_fut(_value_pb()) def test_unknown_value_type(self): - value_pb = mock.Mock(spec=['WhichOneof']) - value_pb.WhichOneof.return_value = 'zoob_value' + value_pb = mock.Mock(spec=["WhichOneof"]) + value_pb.WhichOneof.return_value = "zoob_value" with self.assertRaises(ValueError): self._call_fut(value_pb) - value_pb.WhichOneof.assert_called_once_with('value_type') + value_pb.WhichOneof.assert_called_once_with("value_type") class Test_decode_dict(unittest.TestCase): - @staticmethod def _call_fut(value_fields, client=mock.sentinel.client): from google.cloud.firestore_v1beta1._helpers import decode_dict return decode_dict(value_fields, client) - @unittest.skipIf((3,) <= sys.version_info < (3, 4, 4), - 'known datetime bug (bpo-23517) in Python') + @unittest.skipIf( + (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" + ) def test_many_types(self): from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.proto.document_pb2 import ( - ArrayValue) + from google.cloud.firestore_v1beta1.proto.document_pb2 import ArrayValue from google.cloud.firestore_v1beta1.proto.document_pb2 import MapValue from google.cloud._helpers import UTC from google.cloud.firestore_v1beta1._helpers import FieldPath @@ -788,53 +770,55 @@ def test_many_types(self): # Make sure precision is valid in microseconds too. self.assertEqual(dt_nanos % 1000, 0) dt_val = datetime.datetime.utcfromtimestamp( - dt_seconds + 1e-9 * dt_nanos).replace(tzinfo=UTC) + dt_seconds + 1e-9 * dt_nanos + ).replace(tzinfo=UTC) value_fields = { - 'foo': _value_pb(null_value=struct_pb2.NULL_VALUE), - 'bar': _value_pb(boolean_value=True), - 'baz': _value_pb(integer_value=981), - 'quux': _value_pb(double_value=2.875), - 'quuz': _value_pb(timestamp_value=timestamp_pb2.Timestamp( - seconds=dt_seconds, - nanos=dt_nanos, - )), - 'corge': _value_pb(string_value=u'\N{snowman}'), - 'grault': _value_pb(bytes_value=b'\xe2\x98\x83'), - 'garply': _value_pb(array_value=ArrayValue(values=[ - _value_pb(string_value=u'fork'), - _value_pb(double_value=4.0), - ])), - 'waldo': _value_pb(map_value=MapValue(fields={ - 'fred': _value_pb(string_value=u'zap'), - 'thud': _value_pb(boolean_value=False), - })), - FieldPath('a', 'b', 'c').to_api_repr(): - _value_pb(boolean_value=False) + "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), + "bar": _value_pb(boolean_value=True), + "baz": _value_pb(integer_value=981), + "quux": _value_pb(double_value=2.875), + "quuz": _value_pb( + timestamp_value=timestamp_pb2.Timestamp( + seconds=dt_seconds, nanos=dt_nanos + ) + ), + "corge": _value_pb(string_value=u"\N{snowman}"), + "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), + "garply": _value_pb( + array_value=ArrayValue( + values=[ + _value_pb(string_value=u"fork"), + _value_pb(double_value=4.0), + ] + ) + ), + "waldo": _value_pb( + map_value=MapValue( + fields={ + "fred": _value_pb(string_value=u"zap"), + "thud": _value_pb(boolean_value=False), + } + ) + ), + FieldPath("a", "b", "c").to_api_repr(): _value_pb(boolean_value=False), } expected = { - 'foo': None, - 'bar': True, - 'baz': 981, - 'quux': 2.875, - 'quuz': dt_val, - 'corge': u'\N{snowman}', - 'grault': b'\xe2\x98\x83', - 'garply': [ - u'fork', - 4.0, - ], - 'waldo': { - 'fred': u'zap', - 'thud': False, - }, - 'a.b.c': False + "foo": None, + "bar": True, + "baz": 981, + "quux": 2.875, + "quuz": dt_val, + "corge": u"\N{snowman}", + "grault": b"\xe2\x98\x83", + "garply": [u"fork", 4.0], + "waldo": {"fred": u"zap", "thud": False}, + "a.b.c": False, } self.assertEqual(self._call_fut(value_fields), expected) class Test_get_field_path(unittest.TestCase): - @staticmethod def _call_fut(field_names): from google.cloud.firestore_v1beta1._helpers import get_field_path @@ -842,29 +826,28 @@ def _call_fut(field_names): return get_field_path(field_names) def test_w_empty(self): - self.assertEqual(self._call_fut([]), '') + self.assertEqual(self._call_fut([]), "") def test_w_one_simple(self): - self.assertEqual(self._call_fut(['a']), 'a') + self.assertEqual(self._call_fut(["a"]), "a") def test_w_one_starts_w_digit(self): - self.assertEqual(self._call_fut(['0abc']), '`0abc`') + self.assertEqual(self._call_fut(["0abc"]), "`0abc`") def test_w_one_w_non_alphanum(self): - self.assertEqual(self._call_fut(['a b c']), '`a b c`') + self.assertEqual(self._call_fut(["a b c"]), "`a b c`") def test_w_one_w_backtick(self): - self.assertEqual(self._call_fut(['a`b']), '`a\\`b`') + self.assertEqual(self._call_fut(["a`b"]), "`a\\`b`") def test_w_one_w_backslash(self): - self.assertEqual(self._call_fut(['a\\b']), '`a\\\\b`') + self.assertEqual(self._call_fut(["a\\b"]), "`a\\\\b`") def test_multiple(self): - self.assertEqual(self._call_fut(['a', 'b', 'c']), 'a.b.c') + self.assertEqual(self._call_fut(["a", "b", "c"]), "a.b.c") class Test__tokenize_field_path(unittest.TestCase): - @staticmethod def _call_fut(path): from google.cloud.firestore_v1beta1 import _helpers @@ -875,32 +858,31 @@ def _expect(self, path, split_path): self.assertEqual(list(self._call_fut(path)), split_path) def test_w_empty(self): - self._expect('', []) + self._expect("", []) def test_w_single_dot(self): - self._expect('.', ['.']) + self._expect(".", ["."]) def test_w_single_simple(self): - self._expect('abc', ['abc']) + self._expect("abc", ["abc"]) def test_w_single_quoted(self): - self._expect('`c*de`', ['`c*de`']) + self._expect("`c*de`", ["`c*de`"]) def test_w_quoted_embedded_dot(self): - self._expect('`c*.de`', ['`c*.de`']) + self._expect("`c*.de`", ["`c*.de`"]) def test_w_quoted_escaped_backtick(self): - self._expect(r'`c*\`de`', [r'`c*\`de`']) + self._expect(r"`c*\`de`", [r"`c*\`de`"]) def test_w_dotted_quoted(self): - self._expect('`*`.`~`', ['`*`', '.', '`~`']) + self._expect("`*`.`~`", ["`*`", ".", "`~`"]) def test_w_dotted(self): - self._expect('a.b.`c*de`', ['a', '.', 'b', '.', '`c*de`']) + self._expect("a.b.`c*de`", ["a", ".", "b", ".", "`c*de`"]) class Test_split_field_path(unittest.TestCase): - @staticmethod def _call_fut(path): from google.cloud.firestore_v1beta1 import _helpers @@ -909,42 +891,41 @@ def _call_fut(path): def test_w_single_dot(self): with self.assertRaises(ValueError): - self._call_fut('.') + self._call_fut(".") def test_w_leading_dot(self): with self.assertRaises(ValueError): - self._call_fut('.a.b.c') + self._call_fut(".a.b.c") def test_w_trailing_dot(self): with self.assertRaises(ValueError): - self._call_fut('a.b.') + self._call_fut("a.b.") def test_w_missing_dot(self): with self.assertRaises(ValueError): - self._call_fut('a`c*de`f') + self._call_fut("a`c*de`f") def test_w_half_quoted_field(self): with self.assertRaises(ValueError): - self._call_fut('`c*de') + self._call_fut("`c*de") def test_w_empty(self): - self.assertEqual(self._call_fut(''), []) + self.assertEqual(self._call_fut(""), []) def test_w_simple_field(self): - self.assertEqual(self._call_fut('a'), ['a']) + self.assertEqual(self._call_fut("a"), ["a"]) def test_w_dotted_field(self): - self.assertEqual(self._call_fut('a.b.cde'), ['a', 'b', 'cde']) + self.assertEqual(self._call_fut("a.b.cde"), ["a", "b", "cde"]) def test_w_quoted_field(self): - self.assertEqual(self._call_fut('a.b.`c*de`'), ['a', 'b', '`c*de`']) + self.assertEqual(self._call_fut("a.b.`c*de`"), ["a", "b", "`c*de`"]) def test_w_quoted_field_escaped_backtick(self): - self.assertEqual(self._call_fut(r'`c*\`de`'), [r'`c*\`de`']) + self.assertEqual(self._call_fut(r"`c*\`de`"), [r"`c*\`de`"]) class Test_parse_field_path(unittest.TestCase): - @staticmethod def _call_fut(field_path): from google.cloud.firestore_v1beta1._helpers import parse_field_path @@ -952,30 +933,24 @@ def _call_fut(field_path): return parse_field_path(field_path) def test_wo_escaped_names(self): - self.assertEqual(self._call_fut('a.b.c'), ['a', 'b', 'c']) + self.assertEqual(self._call_fut("a.b.c"), ["a", "b", "c"]) def test_w_escaped_backtick(self): - self.assertEqual(self._call_fut('`a\\`b`.c.d'), ['a`b', 'c', 'd']) + self.assertEqual(self._call_fut("`a\\`b`.c.d"), ["a`b", "c", "d"]) def test_w_escaped_backslash(self): - self.assertEqual(self._call_fut('`a\\\\b`.c.d'), ['a\\b', 'c', 'd']) + self.assertEqual(self._call_fut("`a\\\\b`.c.d"), ["a\\b", "c", "d"]) def test_w_first_name_escaped_wo_closing_backtick(self): with self.assertRaises(ValueError): - self._call_fut('`a\\`b.c.d') + self._call_fut("`a\\`b.c.d") class Test_get_nested_value(unittest.TestCase): DATA = { - 'top1': { - 'middle2': { - 'bottom3': 20, - 'bottom4': 22, - }, - 'middle5': True, - }, - 'top6': b'\x00\x01 foo', + "top1": {"middle2": {"bottom3": 20, "bottom4": 22}, "middle5": True}, + "top6": b"\x00\x01 foo", } @staticmethod @@ -985,21 +960,21 @@ def _call_fut(field_path, data): return get_nested_value(field_path, data) def test_simple(self): - self.assertIs(self._call_fut('top1', self.DATA), self.DATA['top1']) + self.assertIs(self._call_fut("top1", self.DATA), self.DATA["top1"]) def test_nested(self): self.assertIs( - self._call_fut('top1.middle2', self.DATA), - self.DATA['top1']['middle2']) + self._call_fut("top1.middle2", self.DATA), self.DATA["top1"]["middle2"] + ) self.assertIs( - self._call_fut('top1.middle2.bottom3', self.DATA), - self.DATA['top1']['middle2']['bottom3']) + self._call_fut("top1.middle2.bottom3", self.DATA), + self.DATA["top1"]["middle2"]["bottom3"], + ) def test_missing_top_level(self): - from google.cloud.firestore_v1beta1._helpers import ( - FIELD_PATH_MISSING_TOP) + from google.cloud.firestore_v1beta1._helpers import FIELD_PATH_MISSING_TOP - field_path = 'top8' + field_path = "top8" with self.assertRaises(KeyError) as exc_info: self._call_fut(field_path, self.DATA) @@ -1007,28 +982,25 @@ def test_missing_top_level(self): self.assertEqual(exc_info.exception.args, (err_msg,)) def test_missing_key(self): - from google.cloud.firestore_v1beta1._helpers import ( - FIELD_PATH_MISSING_KEY) + from google.cloud.firestore_v1beta1._helpers import FIELD_PATH_MISSING_KEY with self.assertRaises(KeyError) as exc_info: - self._call_fut('top1.middle2.nope', self.DATA) + self._call_fut("top1.middle2.nope", self.DATA) - err_msg = FIELD_PATH_MISSING_KEY.format('nope', 'top1.middle2') + err_msg = FIELD_PATH_MISSING_KEY.format("nope", "top1.middle2") self.assertEqual(exc_info.exception.args, (err_msg,)) def test_bad_type(self): - from google.cloud.firestore_v1beta1._helpers import ( - FIELD_PATH_WRONG_TYPE) + from google.cloud.firestore_v1beta1._helpers import FIELD_PATH_WRONG_TYPE with self.assertRaises(KeyError) as exc_info: - self._call_fut('top6.middle7', self.DATA) + self._call_fut("top6.middle7", self.DATA) - err_msg = FIELD_PATH_WRONG_TYPE.format('top6', 'middle7') + err_msg = FIELD_PATH_WRONG_TYPE.format("top6", "middle7") self.assertEqual(exc_info.exception.args, (err_msg,)) class Test_get_doc_id(unittest.TestCase): - @staticmethod def _call_fut(document_pb, expected_prefix): from google.cloud.firestore_v1beta1._helpers import get_doc_id @@ -1039,16 +1011,17 @@ def _call_fut(document_pb, expected_prefix): def _dummy_ref_string(collection_id): from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - project = u'bazzzz' - return u'projects/{}/databases/{}/documents/{}'.format( - project, DEFAULT_DATABASE, collection_id) + project = u"bazzzz" + return u"projects/{}/databases/{}/documents/{}".format( + project, DEFAULT_DATABASE, collection_id + ) def test_success(self): from google.cloud.firestore_v1beta1.proto import document_pb2 - prefix = self._dummy_ref_string('sub-collection') - actual_id = 'this-is-the-one' - name = '{}/{}'.format(prefix, actual_id) + prefix = self._dummy_ref_string("sub-collection") + actual_id = "this-is-the-one" + name = "{}/{}".format(prefix, actual_id) document_pb = document_pb2.Document(name=name) document_id = self._call_fut(document_pb, prefix) @@ -1057,9 +1030,9 @@ def test_success(self): def test_failure(self): from google.cloud.firestore_v1beta1.proto import document_pb2 - actual_prefix = self._dummy_ref_string('the-right-one') - wrong_prefix = self._dummy_ref_string('the-wrong-one') - name = '{}/{}'.format(actual_prefix, 'sorry-wont-works') + actual_prefix = self._dummy_ref_string("the-right-one") + wrong_prefix = self._dummy_ref_string("the-wrong-one") + name = "{}/{}".format(actual_prefix, "sorry-wont-works") document_pb = document_pb2.Document(name=name) with self.assertRaises(ValueError) as exc_info: @@ -1072,13 +1045,13 @@ def test_failure(self): class Test_extract_fields(unittest.TestCase): - @staticmethod def _call_fut(document_data, prefix_path, expand_dots=False): from google.cloud.firestore_v1beta1 import _helpers return _helpers.extract_fields( - document_data, prefix_path, expand_dots=expand_dots) + document_data, prefix_path, expand_dots=expand_dots + ) def test_w_empty_document(self): from google.cloud.firestore_v1beta1._helpers import _EmptyDict @@ -1091,27 +1064,19 @@ def test_w_empty_document(self): self.assertEqual(list(iterator), expected) def test_w_invalid_key_and_expand_dots(self): - document_data = { - 'b': 1, - 'a~d': 2, - 'c': 3, - } + document_data = {"b": 1, "a~d": 2, "c": 3} prefix_path = _make_field_path() with self.assertRaises(ValueError): list(self._call_fut(document_data, prefix_path, expand_dots=True)) def test_w_shallow_keys(self): - document_data = { - 'b': 1, - 'a': 2, - 'c': 3, - } + document_data = {"b": 1, "a": 2, "c": 3} prefix_path = _make_field_path() expected = [ - (_make_field_path('a'), 2), - (_make_field_path('b'), 1), - (_make_field_path('c'), 3), + (_make_field_path("a"), 2), + (_make_field_path("b"), 1), + (_make_field_path("c"), 3), ] iterator = self._call_fut(document_data, prefix_path) @@ -1120,24 +1085,14 @@ def test_w_shallow_keys(self): def test_w_nested(self): from google.cloud.firestore_v1beta1._helpers import _EmptyDict - document_data = { - 'b': { - 'a': { - 'd': 4, - 'c': 3, - 'g': {}, - }, - 'e': 7, - }, - 'f': 5, - } + document_data = {"b": {"a": {"d": 4, "c": 3, "g": {}}, "e": 7}, "f": 5} prefix_path = _make_field_path() expected = [ - (_make_field_path('b', 'a', 'c'), 3), - (_make_field_path('b', 'a', 'd'), 4), - (_make_field_path('b', 'a', 'g'), _EmptyDict), - (_make_field_path('b', 'e'), 7), - (_make_field_path('f'), 5), + (_make_field_path("b", "a", "c"), 3), + (_make_field_path("b", "a", "d"), 4), + (_make_field_path("b", "a", "g"), _EmptyDict), + (_make_field_path("b", "e"), 7), + (_make_field_path("f"), 5), ] iterator = self._call_fut(document_data, prefix_path) @@ -1147,27 +1102,19 @@ def test_w_expand_dotted(self): from google.cloud.firestore_v1beta1._helpers import _EmptyDict document_data = { - 'b': { - 'a': { - 'd': 4, - 'c': 3, - 'g': {}, - 'k.l.m': 17, - }, - 'e': 7, - }, - 'f': 5, - 'h.i.j': 9, + "b": {"a": {"d": 4, "c": 3, "g": {}, "k.l.m": 17}, "e": 7}, + "f": 5, + "h.i.j": 9, } prefix_path = _make_field_path() expected = [ - (_make_field_path('b', 'a', 'c'), 3), - (_make_field_path('b', 'a', 'd'), 4), - (_make_field_path('b', 'a', 'g'), _EmptyDict), - (_make_field_path('b', 'a', 'k.l.m'), 17), - (_make_field_path('b', 'e'), 7), - (_make_field_path('f'), 5), - (_make_field_path('h', 'i', 'j'), 9), + (_make_field_path("b", "a", "c"), 3), + (_make_field_path("b", "a", "d"), 4), + (_make_field_path("b", "a", "g"), _EmptyDict), + (_make_field_path("b", "a", "k.l.m"), 17), + (_make_field_path("b", "e"), 7), + (_make_field_path("f"), 5), + (_make_field_path("h", "i", "j"), 9), ] iterator = self._call_fut(document_data, prefix_path, expand_dots=True) @@ -1175,7 +1122,6 @@ def test_w_expand_dotted(self): class Test_set_field_value(unittest.TestCase): - @staticmethod def _call_fut(document_data, field_path, value): from google.cloud.firestore_v1beta1 import _helpers @@ -1184,47 +1130,46 @@ def _call_fut(document_data, field_path, value): def test_normal_value_w_shallow(self): document = {} - field_path = _make_field_path('a') + field_path = _make_field_path("a") value = 3 self._call_fut(document, field_path, value) - self.assertEqual(document, {'a': 3}) + self.assertEqual(document, {"a": 3}) def test_normal_value_w_nested(self): document = {} - field_path = _make_field_path('a', 'b', 'c') + field_path = _make_field_path("a", "b", "c") value = 3 self._call_fut(document, field_path, value) - self.assertEqual(document, {'a': {'b': {'c': 3}}}) + self.assertEqual(document, {"a": {"b": {"c": 3}}}) def test_empty_dict_w_shallow(self): from google.cloud.firestore_v1beta1._helpers import _EmptyDict document = {} - field_path = _make_field_path('a') + field_path = _make_field_path("a") value = _EmptyDict self._call_fut(document, field_path, value) - self.assertEqual(document, {'a': {}}) + self.assertEqual(document, {"a": {}}) def test_empty_dict_w_nested(self): from google.cloud.firestore_v1beta1._helpers import _EmptyDict document = {} - field_path = _make_field_path('a', 'b', 'c') + field_path = _make_field_path("a", "b", "c") value = _EmptyDict self._call_fut(document, field_path, value) - self.assertEqual(document, {'a': {'b': {'c': {}}}}) + self.assertEqual(document, {"a": {"b": {"c": {}}}}) class Test_get_field_value(unittest.TestCase): - @staticmethod def _call_fut(document_data, field_path): from google.cloud.firestore_v1beta1 import _helpers @@ -1241,39 +1186,26 @@ def test_miss_shallow(self): document = {} with self.assertRaises(KeyError): - self._call_fut(document, _make_field_path('nonesuch')) + self._call_fut(document, _make_field_path("nonesuch")) def test_miss_nested(self): - document = { - 'a': { - 'b': { - }, - }, - } + document = {"a": {"b": {}}} with self.assertRaises(KeyError): - self._call_fut(document, _make_field_path('a', 'b', 'c')) + self._call_fut(document, _make_field_path("a", "b", "c")) def test_hit_shallow(self): - document = {'a': 1} + document = {"a": 1} - self.assertEqual(self._call_fut(document, _make_field_path('a')), 1) + self.assertEqual(self._call_fut(document, _make_field_path("a")), 1) def test_hit_nested(self): - document = { - 'a': { - 'b': { - 'c': 1, - }, - }, - } + document = {"a": {"b": {"c": 1}}} - self.assertEqual( - self._call_fut(document, _make_field_path('a', 'b', 'c')), 1) + self.assertEqual(self._call_fut(document, _make_field_path("a", "b", "c")), 1) class TestDocumentExtractor(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1 import _helpers @@ -1302,15 +1234,13 @@ def test_ctor_w_empty_document(self): def test_ctor_w_delete_field_shallow(self): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - document_data = { - 'a': DELETE_FIELD, - } + document_data = {"a": DELETE_FIELD} inst = self._make_one(document_data) self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, [_make_field_path('a')]) + self.assertEqual(inst.deleted_fields, [_make_field_path("a")]) self.assertEqual(inst.server_timestamps, []) self.assertEqual(inst.array_removes, {}) self.assertEqual(inst.array_unions, {}) @@ -1322,20 +1252,13 @@ def test_ctor_w_delete_field_shallow(self): def test_ctor_w_delete_field_nested(self): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - document_data = { - 'a': { - 'b': { - 'c': DELETE_FIELD, - } - } - } + document_data = {"a": {"b": {"c": DELETE_FIELD}}} inst = self._make_one(document_data) self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) - self.assertEqual( - inst.deleted_fields, [_make_field_path('a', 'b', 'c')]) + self.assertEqual(inst.deleted_fields, [_make_field_path("a", "b", "c")]) self.assertEqual(inst.server_timestamps, []) self.assertEqual(inst.array_removes, {}) self.assertEqual(inst.array_unions, {}) @@ -1347,62 +1270,48 @@ def test_ctor_w_delete_field_nested(self): def test_ctor_w_server_timestamp_shallow(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_data = { - 'a': SERVER_TIMESTAMP, - } + document_data = {"a": SERVER_TIMESTAMP} inst = self._make_one(document_data) self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, [_make_field_path('a')]) + self.assertEqual(inst.server_timestamps, [_make_field_path("a")]) self.assertEqual(inst.array_removes, {}) self.assertEqual(inst.array_unions, {}) self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path('a')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) def test_ctor_w_server_timestamp_nested(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_data = { - 'a': { - 'b': { - 'c': SERVER_TIMESTAMP, - } - } - } + document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} inst = self._make_one(document_data) self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) - self.assertEqual( - inst.server_timestamps, [_make_field_path('a', 'b', 'c')]) + self.assertEqual(inst.server_timestamps, [_make_field_path("a", "b", "c")]) self.assertEqual(inst.array_removes, {}) self.assertEqual(inst.array_unions, {}) self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual( - inst.transform_paths, [_make_field_path('a', 'b', 'c')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) def test_ctor_w_array_remove_shallow(self): from google.cloud.firestore_v1beta1.transforms import ArrayRemove values = [1, 3, 5] - document_data = { - 'a': ArrayRemove(values), - } + document_data = {"a": ArrayRemove(values)} inst = self._make_one(document_data) - expected_array_removes = { - _make_field_path('a'): values, - } + expected_array_removes = {_make_field_path("a"): values} self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) @@ -1412,25 +1321,17 @@ def test_ctor_w_array_remove_shallow(self): self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path('a')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) def test_ctor_w_array_remove_nested(self): from google.cloud.firestore_v1beta1.transforms import ArrayRemove values = [2, 4, 8] - document_data = { - 'a': { - 'b': { - 'c': ArrayRemove(values), - } - } - } + document_data = {"a": {"b": {"c": ArrayRemove(values)}}} inst = self._make_one(document_data) - expected_array_removes = { - _make_field_path('a', 'b', 'c'): values, - } + expected_array_removes = {_make_field_path("a", "b", "c"): values} self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) @@ -1440,22 +1341,17 @@ def test_ctor_w_array_remove_nested(self): self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual( - inst.transform_paths, [_make_field_path('a', 'b', 'c')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) def test_ctor_w_array_union_shallow(self): from google.cloud.firestore_v1beta1.transforms import ArrayUnion values = [1, 3, 5] - document_data = { - 'a': ArrayUnion(values), - } + document_data = {"a": ArrayUnion(values)} inst = self._make_one(document_data) - expected_array_unions = { - _make_field_path('a'): values, - } + expected_array_unions = {_make_field_path("a"): values} self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) @@ -1465,25 +1361,17 @@ def test_ctor_w_array_union_shallow(self): self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path('a')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) def test_ctor_w_array_union_nested(self): from google.cloud.firestore_v1beta1.transforms import ArrayUnion values = [2, 4, 8] - document_data = { - 'a': { - 'b': { - 'c': ArrayUnion(values), - } - } - } + document_data = {"a": {"b": {"c": ArrayUnion(values)}}} inst = self._make_one(document_data) - expected_array_unions = { - _make_field_path('a', 'b', 'c'): values, - } + expected_array_unions = {_make_field_path("a", "b", "c"): values} self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, []) self.assertEqual(inst.deleted_fields, []) @@ -1493,19 +1381,14 @@ def test_ctor_w_array_union_nested(self): self.assertEqual(inst.set_fields, {}) self.assertFalse(inst.empty_document) self.assertTrue(inst.has_transforms) - self.assertEqual( - inst.transform_paths, [_make_field_path('a', 'b', 'c')]) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) def test_ctor_w_empty_dict_shallow(self): - document_data = { - 'a': {}, - } + document_data = {"a": {}} inst = self._make_one(document_data) - expected_field_paths = [ - _make_field_path('a'), - ] + expected_field_paths = [_make_field_path("a")] self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, expected_field_paths) self.assertEqual(inst.deleted_fields, []) @@ -1518,20 +1401,11 @@ def test_ctor_w_empty_dict_shallow(self): self.assertEqual(inst.transform_paths, []) def test_ctor_w_empty_dict_nested(self): - document_data = { - 'a': { - 'b': { - 'c': { - }, - }, - }, - } + document_data = {"a": {"b": {"c": {}}}} inst = self._make_one(document_data) - expected_field_paths = [ - _make_field_path('a', 'b', 'c'), - ] + expected_field_paths = [_make_field_path("a", "b", "c")] self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, expected_field_paths) self.assertEqual(inst.deleted_fields, []) @@ -1544,18 +1418,14 @@ def test_ctor_w_empty_dict_nested(self): self.assertEqual(inst.transform_paths, []) def test_ctor_w_normal_value_shallow(self): - document_data = { - 'b': 1, - 'a': 2, - 'c': 3, - } + document_data = {"b": 1, "a": 2, "c": 3} inst = self._make_one(document_data) expected_field_paths = [ - _make_field_path('a'), - _make_field_path('b'), - _make_field_path('c'), + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), ] self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, expected_field_paths) @@ -1568,24 +1438,15 @@ def test_ctor_w_normal_value_shallow(self): self.assertFalse(inst.has_transforms) def test_ctor_w_normal_value_nested(self): - document_data = { - 'b': { - 'a': { - 'd': 4, - 'c': 3, - }, - 'e': 7, - }, - 'f': 5, - } + document_data = {"b": {"a": {"d": 4, "c": 3}, "e": 7}, "f": 5} inst = self._make_one(document_data) expected_field_paths = [ - _make_field_path('b', 'a', 'c'), - _make_field_path('b', 'a', 'd'), - _make_field_path('b', 'e'), - _make_field_path('f'), + _make_field_path("b", "a", "c"), + _make_field_path("b", "a", "d"), + _make_field_path("b", "e"), + _make_field_path("f"), ] self.assertEqual(inst.document_data, document_data) self.assertEqual(inst.field_paths, expected_field_paths) @@ -1603,46 +1464,44 @@ def test_get_update_pb_w_exists_precondition(self): document_data = {} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) update_pb = inst.get_update_pb(document_path, exists=False) self.assertIsInstance(update_pb, write_pb2.Write) self.assertEqual(update_pb.update.name, document_path) self.assertEqual(update_pb.update.fields, document_data) - self.assertTrue(update_pb.HasField('current_document')) + self.assertTrue(update_pb.HasField("current_document")) self.assertFalse(update_pb.current_document.exists) def test_get_update_pb_wo_exists_precondition(self): from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1._helpers import encode_dict - document_data = {'a': 1} + document_data = {"a": 1} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) update_pb = inst.get_update_pb(document_path) self.assertIsInstance(update_pb, write_pb2.Write) self.assertEqual(update_pb.update.name, document_path) self.assertEqual(update_pb.update.fields, encode_dict(document_data)) - self.assertFalse(update_pb.HasField('current_document')) + self.assertFalse(update_pb.HasField("current_document")) def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self): from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM - document_data = { - 'a': SERVER_TIMESTAMP, - } + document_data = {"a": SERVER_TIMESTAMP} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) transform_pb = inst.get_transform_pb(document_path, exists=False) @@ -1651,9 +1510,9 @@ def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self): transforms = transform_pb.transform.field_transforms self.assertEqual(len(transforms), 1) transform = transforms[0] - self.assertEqual(transform.field_path, 'a') + self.assertEqual(transform.field_path, "a") self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) - self.assertTrue(transform_pb.HasField('current_document')) + self.assertTrue(transform_pb.HasField("current_document")) self.assertFalse(transform_pb.current_document.exists) def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self): @@ -1661,17 +1520,11 @@ def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM - document_data = { - 'a': { - 'b': { - 'c': SERVER_TIMESTAMP, - }, - }, - } + document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) transform_pb = inst.get_transform_pb(document_path) @@ -1680,35 +1533,26 @@ def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self): transforms = transform_pb.transform.field_transforms self.assertEqual(len(transforms), 1) transform = transforms[0] - self.assertEqual(transform.field_path, 'a.b.c') + self.assertEqual(transform.field_path, "a.b.c") self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) - self.assertFalse(transform_pb.HasField('current_document')) + self.assertFalse(transform_pb.HasField("current_document")) @staticmethod def _array_value_to_list(array_value): from google.cloud.firestore_v1beta1._helpers import decode_value - return [ - decode_value(element, client=None) - for element in array_value.values - ] + return [decode_value(element, client=None) for element in array_value.values] def test_get_transform_pb_w_array_remove(self): from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1.transforms import ArrayRemove values = [2, 4, 8] - document_data = { - 'a': { - 'b': { - 'c': ArrayRemove(values), - }, - }, - } + document_data = {"a": {"b": {"c": ArrayRemove(values)}}} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) transform_pb = inst.get_transform_pb(document_path) @@ -1717,27 +1561,21 @@ def test_get_transform_pb_w_array_remove(self): transforms = transform_pb.transform.field_transforms self.assertEqual(len(transforms), 1) transform = transforms[0] - self.assertEqual(transform.field_path, 'a.b.c') + self.assertEqual(transform.field_path, "a.b.c") removed = self._array_value_to_list(transform.remove_all_from_array) self.assertEqual(removed, values) - self.assertFalse(transform_pb.HasField('current_document')) + self.assertFalse(transform_pb.HasField("current_document")) def test_get_transform_pb_w_array_union(self): from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1.transforms import ArrayUnion values = [1, 3, 5] - document_data = { - 'a': { - 'b': { - 'c': ArrayUnion(values), - }, - }, - } + document_data = {"a": {"b": {"c": ArrayUnion(values)}}} inst = self._make_one(document_data) document_path = ( - 'projects/project-id/databases/(default)/' - 'documents/document-id') + "projects/project-id/databases/(default)/" "documents/document-id" + ) transform_pb = inst.get_transform_pb(document_path) @@ -1746,14 +1584,13 @@ def test_get_transform_pb_w_array_union(self): transforms = transform_pb.transform.field_transforms self.assertEqual(len(transforms), 1) transform = transforms[0] - self.assertEqual(transform.field_path, 'a.b.c') + self.assertEqual(transform.field_path, "a.b.c") added = self._array_value_to_list(transform.append_missing_elements) self.assertEqual(added, values) - self.assertFalse(transform_pb.HasField('current_document')) + self.assertFalse(transform_pb.HasField("current_document")) class Test_pbs_for_create(unittest.TestCase): - @staticmethod def _call_fut(document_path, document_data): from google.cloud.firestore_v1beta1._helpers import pbs_for_create @@ -1768,10 +1605,7 @@ def _make_write_w_document(document_path, **data): from google.cloud.firestore_v1beta1.proto import common_pb2 return write_pb2.Write( - update=document_pb2.Document( - name=document_path, - fields=encode_dict(data), - ), + update=document_pb2.Document(name=document_path, fields=encode_dict(data)), current_document=common_pb2.Precondition(exists=False), ) @@ -1783,47 +1617,45 @@ def _make_write_w_transform(document_path, fields): server_val = enums.DocumentTransform.FieldTransform.ServerValue transforms = [ write_pb2.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME) + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) for field in fields ] return write_pb2.Write( transform=write_pb2.DocumentTransform( - document=document_path, - field_transforms=transforms, - ), + document=document_path, field_transforms=transforms + ) ) def _helper(self, do_transform=False, empty_val=False): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - document_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} if do_transform: - document_data['butter'] = SERVER_TIMESTAMP + document_data["butter"] = SERVER_TIMESTAMP if empty_val: - document_data['mustard'] = {} + document_data["mustard"] = {} write_pbs = self._call_fut(document_path, document_data) if empty_val: update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, mustard={}, + document_path, cheese=1.5, crackers=True, mustard={} ) else: update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, + document_path, cheese=1.5, crackers=True ) expected_pbs = [update_pb] if do_transform: expected_pbs.append( - self._make_write_w_transform(document_path, fields=['butter'])) + self._make_write_w_transform(document_path, fields=["butter"]) + ) self.assertEqual(write_pbs, expected_pbs) @@ -1838,7 +1670,6 @@ def test_w_transform_and_empty_value(self): class Test_pbs_for_set_no_merge(unittest.TestCase): - @staticmethod def _call_fut(document_path, document_data): from google.cloud.firestore_v1beta1 import _helpers @@ -1852,10 +1683,7 @@ def _make_write_w_document(document_path, **data): from google.cloud.firestore_v1beta1._helpers import encode_dict return write_pb2.Write( - update=document_pb2.Document( - name=document_path, - fields=encode_dict(data), - ), + update=document_pb2.Document(name=document_path, fields=encode_dict(data)) ) @staticmethod @@ -1866,19 +1694,19 @@ def _make_write_w_transform(document_path, fields): server_val = enums.DocumentTransform.FieldTransform.ServerValue transforms = [ write_pb2.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME) + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) for field in fields ] return write_pb2.Write( transform=write_pb2.DocumentTransform( - document=document_path, - field_transforms=transforms, - ), + document=document_path, field_transforms=transforms + ) ) def test_w_empty_document(self): - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") document_data = {} write_pbs = self._call_fut(document_path, document_data) @@ -1890,46 +1718,44 @@ def test_w_empty_document(self): def test_w_only_server_timestamp(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - document_data = {'butter': SERVER_TIMESTAMP} + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"butter": SERVER_TIMESTAMP} write_pbs = self._call_fut(document_path, document_data) update_pb = self._make_write_w_document(document_path) - transform_pb = self._make_write_w_transform(document_path, ['butter']) + transform_pb = self._make_write_w_transform(document_path, ["butter"]) expected_pbs = [update_pb, transform_pb] self.assertEqual(write_pbs, expected_pbs) def _helper(self, do_transform=False, empty_val=False): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - document_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} if do_transform: - document_data['butter'] = SERVER_TIMESTAMP + document_data["butter"] = SERVER_TIMESTAMP if empty_val: - document_data['mustard'] = {} + document_data["mustard"] = {} write_pbs = self._call_fut(document_path, document_data) if empty_val: update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, mustard={}, + document_path, cheese=1.5, crackers=True, mustard={} ) else: update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, + document_path, cheese=1.5, crackers=True ) expected_pbs = [update_pb] if do_transform: expected_pbs.append( - self._make_write_w_transform(document_path, fields=['butter'])) + self._make_write_w_transform(document_path, fields=["butter"]) + ) self.assertEqual(write_pbs, expected_pbs) @@ -1945,7 +1771,6 @@ def test_w_transform_and_empty_value(self): class TestDocumentExtractorForMerge(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1 import _helpers @@ -1978,17 +1803,14 @@ def test_apply_merge_all_w_empty_document(self): def test_apply_merge_all_w_delete(self): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - document_data = { - 'write_me': 'value', - 'delete_me': DELETE_FIELD, - } + document_data = {"write_me": "value", "delete_me": DELETE_FIELD} inst = self._make_one(document_data) inst.apply_merge(True) expected_data_merge = [ - _make_field_path('delete_me'), - _make_field_path('write_me'), + _make_field_path("delete_me"), + _make_field_path("write_me"), ] self.assertEqual(inst.data_merge, expected_data_merge) self.assertEqual(inst.transform_merge, []) @@ -1998,24 +1820,14 @@ def test_apply_merge_all_w_delete(self): def test_apply_merge_all_w_server_timestamp(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_data = { - 'write_me': 'value', - 'timestamp': SERVER_TIMESTAMP, - } + document_data = {"write_me": "value", "timestamp": SERVER_TIMESTAMP} inst = self._make_one(document_data) inst.apply_merge(True) - expected_data_merge = [ - _make_field_path('write_me'), - ] - expected_transform_merge = [ - _make_field_path('timestamp'), - ] - expected_merge = [ - _make_field_path('timestamp'), - _make_field_path('write_me'), - ] + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("timestamp")] + expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] self.assertEqual(inst.data_merge, expected_data_merge) self.assertEqual(inst.transform_merge, expected_transform_merge) self.assertEqual(inst.merge, expected_merge) @@ -2026,82 +1838,64 @@ def test_apply_merge_list_fields_w_empty_document(self): inst = self._make_one(document_data) with self.assertRaises(ValueError): - inst.apply_merge(['nonesuch', 'or.this']) + inst.apply_merge(["nonesuch", "or.this"]) def test_apply_merge_list_fields_w_unmerged_delete(self): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD document_data = { - 'write_me': 'value', - 'delete_me': DELETE_FIELD, - 'ignore_me': 123, - 'unmerged_delete': DELETE_FIELD, + "write_me": "value", + "delete_me": DELETE_FIELD, + "ignore_me": 123, + "unmerged_delete": DELETE_FIELD, } inst = self._make_one(document_data) with self.assertRaises(ValueError): - inst.apply_merge(['write_me', 'delete_me']) + inst.apply_merge(["write_me", "delete_me"]) def test_apply_merge_list_fields_w_delete(self): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD document_data = { - 'write_me': 'value', - 'delete_me': DELETE_FIELD, - 'ignore_me': 123, + "write_me": "value", + "delete_me": DELETE_FIELD, + "ignore_me": 123, } inst = self._make_one(document_data) - inst.apply_merge(['write_me', 'delete_me']) + inst.apply_merge(["write_me", "delete_me"]) - expected_set_fields = { - 'write_me': 'value', - } - expected_deleted_fields = [ - _make_field_path('delete_me'), - ] + expected_set_fields = {"write_me": "value"} + expected_deleted_fields = [_make_field_path("delete_me")] self.assertEqual(inst.set_fields, expected_set_fields) self.assertEqual(inst.deleted_fields, expected_deleted_fields) self.assertTrue(inst.has_updates) def test_apply_merge_list_fields_w_prefixes(self): - document_data = { - 'a': { - 'b': { - 'c': 123, - }, - }, - } + document_data = {"a": {"b": {"c": 123}}} inst = self._make_one(document_data) with self.assertRaises(ValueError): - inst.apply_merge(['a', 'a.b']) + inst.apply_merge(["a", "a.b"]) def test_apply_merge_list_fields_w_missing_data_string_paths(self): - document_data = { - 'write_me': 'value', - 'ignore_me': 123, - } + document_data = {"write_me": "value", "ignore_me": 123} inst = self._make_one(document_data) with self.assertRaises(ValueError): - inst.apply_merge(['write_me', 'nonesuch']) + inst.apply_merge(["write_me", "nonesuch"]) def test_apply_merge_list_fields_w_non_merge_field(self): - document_data = { - 'write_me': 'value', - 'ignore_me': 123, - } + document_data = {"write_me": "value", "ignore_me": 123} inst = self._make_one(document_data) - inst.apply_merge([_make_field_path('write_me')]) + inst.apply_merge([_make_field_path("write_me")]) - expected_set_fields = { - 'write_me': 'value', - } + expected_set_fields = {"write_me": "value"} self.assertEqual(inst.set_fields, expected_set_fields) self.assertTrue(inst.has_updates) @@ -2109,31 +1903,21 @@ def test_apply_merge_list_fields_w_server_timestamp(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP document_data = { - 'write_me': 'value', - 'timestamp': SERVER_TIMESTAMP, - 'ignored_stamp': SERVER_TIMESTAMP, + "write_me": "value", + "timestamp": SERVER_TIMESTAMP, + "ignored_stamp": SERVER_TIMESTAMP, } inst = self._make_one(document_data) - inst.apply_merge( - [_make_field_path('write_me'), _make_field_path('timestamp')]) + inst.apply_merge([_make_field_path("write_me"), _make_field_path("timestamp")]) - expected_data_merge = [ - _make_field_path('write_me'), - ] - expected_transform_merge = [ - _make_field_path('timestamp'), - ] - expected_merge = [ - _make_field_path('timestamp'), - _make_field_path('write_me'), - ] + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("timestamp")] + expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] self.assertEqual(inst.data_merge, expected_data_merge) self.assertEqual(inst.transform_merge, expected_transform_merge) self.assertEqual(inst.merge, expected_merge) - expected_server_timestamps = [ - _make_field_path('timestamp'), - ] + expected_server_timestamps = [_make_field_path("timestamp")] self.assertEqual(inst.server_timestamps, expected_server_timestamps) self.assertTrue(inst.has_updates) @@ -2142,31 +1926,21 @@ def test_apply_merge_list_fields_w_array_remove(self): values = [2, 4, 8] document_data = { - 'write_me': 'value', - 'remove_me': ArrayRemove(values), - 'ignored_remove_me': ArrayRemove((1, 3, 5)), + "write_me": "value", + "remove_me": ArrayRemove(values), + "ignored_remove_me": ArrayRemove((1, 3, 5)), } inst = self._make_one(document_data) - inst.apply_merge( - [_make_field_path('write_me'), _make_field_path('remove_me')]) + inst.apply_merge([_make_field_path("write_me"), _make_field_path("remove_me")]) - expected_data_merge = [ - _make_field_path('write_me'), - ] - expected_transform_merge = [ - _make_field_path('remove_me'), - ] - expected_merge = [ - _make_field_path('remove_me'), - _make_field_path('write_me'), - ] + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("remove_me")] + expected_merge = [_make_field_path("remove_me"), _make_field_path("write_me")] self.assertEqual(inst.data_merge, expected_data_merge) self.assertEqual(inst.transform_merge, expected_transform_merge) self.assertEqual(inst.merge, expected_merge) - expected_array_removes = { - _make_field_path('remove_me'): values, - } + expected_array_removes = {_make_field_path("remove_me"): values} self.assertEqual(inst.array_removes, expected_array_removes) self.assertTrue(inst.has_updates) @@ -2175,43 +1949,33 @@ def test_apply_merge_list_fields_w_array_union(self): values = [1, 3, 5] document_data = { - 'write_me': 'value', - 'union_me': ArrayUnion(values), - 'ignored_union_me': ArrayUnion((2, 4, 8)), + "write_me": "value", + "union_me": ArrayUnion(values), + "ignored_union_me": ArrayUnion((2, 4, 8)), } inst = self._make_one(document_data) - inst.apply_merge( - [_make_field_path('write_me'), _make_field_path('union_me')]) + inst.apply_merge([_make_field_path("write_me"), _make_field_path("union_me")]) - expected_data_merge = [ - _make_field_path('write_me'), - ] - expected_transform_merge = [ - _make_field_path('union_me'), - ] - expected_merge = [ - _make_field_path('union_me'), - _make_field_path('write_me'), - ] + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("union_me")] + expected_merge = [_make_field_path("union_me"), _make_field_path("write_me")] self.assertEqual(inst.data_merge, expected_data_merge) self.assertEqual(inst.transform_merge, expected_transform_merge) self.assertEqual(inst.merge, expected_merge) - expected_array_unions = { - _make_field_path('union_me'): values, - } + expected_array_unions = {_make_field_path("union_me"): values} self.assertEqual(inst.array_unions, expected_array_unions) self.assertTrue(inst.has_updates) class Test_pbs_for_set_with_merge(unittest.TestCase): - @staticmethod def _call_fut(document_path, document_data, merge): from google.cloud.firestore_v1beta1 import _helpers return _helpers.pbs_for_set_with_merge( - document_path, document_data, merge=merge) + document_path, document_data, merge=merge + ) @staticmethod def _make_write_w_document(document_path, **data): @@ -2220,10 +1984,7 @@ def _make_write_w_document(document_path, **data): from google.cloud.firestore_v1beta1._helpers import encode_dict return write_pb2.Write( - update=document_pb2.Document( - name=document_path, - fields=encode_dict(data), - ), + update=document_pb2.Document(name=document_path, fields=encode_dict(data)) ) @staticmethod @@ -2234,15 +1995,15 @@ def _make_write_w_transform(document_path, fields): server_val = enums.DocumentTransform.FieldTransform.ServerValue transforms = [ write_pb2.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME) + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) for field in fields ] return write_pb2.Write( transform=write_pb2.DocumentTransform( - document=document_path, - field_transforms=transforms, - ), + document=document_path, field_transforms=transforms + ) ) @staticmethod @@ -2250,146 +2011,110 @@ def _update_document_mask(update_pb, field_paths): from google.cloud.firestore_v1beta1.proto import common_pb2 update_pb.update_mask.CopyFrom( - common_pb2.DocumentMask(field_paths=sorted(field_paths))) + common_pb2.DocumentMask(field_paths=sorted(field_paths)) + ) def test_with_merge_true_wo_transform(self): - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - document_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} write_pbs = self._call_fut(document_path, document_data, merge=True) update_pb = self._make_write_w_document(document_path, **document_data) - self._update_document_mask( - update_pb, field_paths=sorted(document_data)) + self._update_document_mask(update_pb, field_paths=sorted(document_data)) expected_pbs = [update_pb] self.assertEqual(write_pbs, expected_pbs) def test_with_merge_field_wo_transform(self): - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - document_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} - write_pbs = self._call_fut( - document_path, document_data, merge=['cheese']) + write_pbs = self._call_fut(document_path, document_data, merge=["cheese"]) update_pb = self._make_write_w_document( - document_path, cheese=document_data['cheese']) - self._update_document_mask( - update_pb, field_paths=['cheese']) + document_path, cheese=document_data["cheese"] + ) + self._update_document_mask(update_pb, field_paths=["cheese"]) expected_pbs = [update_pb] self.assertEqual(write_pbs, expected_pbs) def test_with_merge_true_w_transform(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - update_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} document_data = update_data.copy() - document_data['butter'] = SERVER_TIMESTAMP + document_data["butter"] = SERVER_TIMESTAMP write_pbs = self._call_fut(document_path, document_data, merge=True) update_pb = self._make_write_w_document(document_path, **update_data) - self._update_document_mask( - update_pb, field_paths=sorted(update_data)) - transform_pb = self._make_write_w_transform( - document_path, fields=['butter']) - expected_pbs = [ - update_pb, - transform_pb, - ] + self._update_document_mask(update_pb, field_paths=sorted(update_data)) + transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) + expected_pbs = [update_pb, transform_pb] self.assertEqual(write_pbs, expected_pbs) def test_with_merge_field_w_transform(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - update_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} document_data = update_data.copy() - document_data['butter'] = SERVER_TIMESTAMP + document_data["butter"] = SERVER_TIMESTAMP write_pbs = self._call_fut( - document_path, document_data, merge=['cheese', 'butter']) + document_path, document_data, merge=["cheese", "butter"] + ) update_pb = self._make_write_w_document( - document_path, cheese=document_data['cheese']) - self._update_document_mask(update_pb, ['cheese']) - transform_pb = self._make_write_w_transform( - document_path, fields=['butter']) - expected_pbs = [ - update_pb, - transform_pb, - ] + document_path, cheese=document_data["cheese"] + ) + self._update_document_mask(update_pb, ["cheese"]) + transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) + expected_pbs = [update_pb, transform_pb] self.assertEqual(write_pbs, expected_pbs) def test_with_merge_field_w_transform_masking_simple(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - update_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} document_data = update_data.copy() - document_data['butter'] = {'pecan': SERVER_TIMESTAMP} + document_data["butter"] = {"pecan": SERVER_TIMESTAMP} - write_pbs = self._call_fut( - document_path, document_data, merge=['butter.pecan']) + write_pbs = self._call_fut(document_path, document_data, merge=["butter.pecan"]) update_pb = self._make_write_w_document(document_path) transform_pb = self._make_write_w_transform( - document_path, fields=['butter.pecan']) - expected_pbs = [ - update_pb, - transform_pb, - ] + document_path, fields=["butter.pecan"] + ) + expected_pbs = [update_pb, transform_pb] self.assertEqual(write_pbs, expected_pbs) def test_with_merge_field_w_transform_parent(self): from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - document_path = _make_ref_string(u'little', u'town', u'of', u'ham') - update_data = { - 'cheese': 1.5, - 'crackers': True, - } + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} document_data = update_data.copy() - document_data['butter'] = { - 'popcorn': 'yum', - 'pecan': SERVER_TIMESTAMP, - } + document_data["butter"] = {"popcorn": "yum", "pecan": SERVER_TIMESTAMP} write_pbs = self._call_fut( - document_path, document_data, merge=['cheese', 'butter']) + document_path, document_data, merge=["cheese", "butter"] + ) update_pb = self._make_write_w_document( - document_path, - cheese=update_data['cheese'], - butter={'popcorn': 'yum'}, + document_path, cheese=update_data["cheese"], butter={"popcorn": "yum"} ) - self._update_document_mask(update_pb, ['cheese', 'butter']) + self._update_document_mask(update_pb, ["cheese", "butter"]) transform_pb = self._make_write_w_transform( - document_path, fields=['butter.pecan']) - expected_pbs = [ - update_pb, - transform_pb, - ] + document_path, fields=["butter.pecan"] + ) + expected_pbs = [update_pb, transform_pb] self.assertEqual(write_pbs, expected_pbs) class TestDocumentExtractorForUpdate(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1 import _helpers @@ -2406,90 +2131,53 @@ def test_ctor_w_empty_document(self): self.assertEqual(inst.top_level_paths, []) def test_ctor_w_simple_keys(self): - document_data = { - 'a': 1, - 'b': 2, - 'c': 3, - } + document_data = {"a": 1, "b": 2, "c": 3} expected_paths = [ - _make_field_path('a'), - _make_field_path('b'), - _make_field_path('c'), + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), ] inst = self._make_one(document_data) self.assertEqual(inst.top_level_paths, expected_paths) def test_ctor_w_nested_keys(self): - document_data = { - 'a': { - 'd': { - 'e': 1, - }, - }, - 'b': { - 'f': 7, - }, - 'c': 3, - } + document_data = {"a": {"d": {"e": 1}}, "b": {"f": 7}, "c": 3} expected_paths = [ - _make_field_path('a'), - _make_field_path('b'), - _make_field_path('c'), + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), ] inst = self._make_one(document_data) self.assertEqual(inst.top_level_paths, expected_paths) def test_ctor_w_dotted_keys(self): - document_data = { - 'a.d.e': 1, - 'b.f': 7, - 'c': 3, - } + document_data = {"a.d.e": 1, "b.f": 7, "c": 3} expected_paths = [ - _make_field_path('a', 'd', 'e'), - _make_field_path('b', 'f'), - _make_field_path('c'), + _make_field_path("a", "d", "e"), + _make_field_path("b", "f"), + _make_field_path("c"), ] inst = self._make_one(document_data) self.assertEqual(inst.top_level_paths, expected_paths) def test_ctor_w_nested_dotted_keys(self): - document_data = { - 'a.d.e': 1, - 'b.f': { - 'h.i': 9, - }, - 'c': 3, - } + document_data = {"a.d.e": 1, "b.f": {"h.i": 9}, "c": 3} expected_paths = [ - _make_field_path('a', 'd', 'e'), - _make_field_path('b', 'f'), - _make_field_path('c'), + _make_field_path("a", "d", "e"), + _make_field_path("b", "f"), + _make_field_path("c"), ] - expected_set_fields = { - 'a': { - 'd': { - 'e': 1, - }, - }, - 'b': { - 'f': { - 'h.i': 9, - }, - }, - 'c': 3, - } + expected_set_fields = {"a": {"d": {"e": 1}}, "b": {"f": {"h.i": 9}}, "c": 3} inst = self._make_one(document_data) self.assertEqual(inst.top_level_paths, expected_paths) self.assertEqual(inst.set_fields, expected_set_fields) class Test_pbs_for_update(unittest.TestCase): - @staticmethod def _call_fut(document_path, field_updates, option): from google.cloud.firestore_v1beta1._helpers import pbs_for_update @@ -2504,11 +2192,10 @@ def _helper(self, option=None, do_transform=False, **write_kwargs): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 - document_path = _make_ref_string( - u'toy', u'car', u'onion', u'garlic') - field_path1 = 'bitez.yum' - value = b'\x00\x01' - field_path2 = 'blog.internet' + document_path = _make_ref_string(u"toy", u"car", u"onion", u"garlic") + field_path1 = "bitez.yum" + value = b"\x00\x01" + field_path2 = "blog.internet" field_updates = {field_path1: value} if do_transform: @@ -2516,19 +2203,16 @@ def _helper(self, option=None, do_transform=False, **write_kwargs): write_pbs = self._call_fut(document_path, field_updates, option) - map_pb = document_pb2.MapValue(fields={ - 'yum': _value_pb(bytes_value=value), - }) + map_pb = document_pb2.MapValue(fields={"yum": _value_pb(bytes_value=value)}) if do_transform: - field_paths = [field_path1, 'blog'] + field_paths = [field_path1, "blog"] else: field_paths = [field_path1] expected_update_pb = write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields={'bitez': _value_pb(map_value=map_pb)}, + name=document_path, fields={"bitez": _value_pb(map_value=map_pb)} ), update_mask=common_pb2.DocumentMask(field_paths=field_paths), **write_kwargs @@ -2547,9 +2231,9 @@ def _helper(self, option=None, do_transform=False, **write_kwargs): write_pb2.DocumentTransform.FieldTransform( field_path=transform_paths.to_api_repr(), set_to_server_value=server_val.REQUEST_TIME, - ), + ) ], - ), + ) ) expected_pbs.append(expected_transform_pb) self.assertEqual(write_pbs, expected_pbs) @@ -2574,7 +2258,6 @@ def test_update_and_transform(self): class Test_pb_for_delete(unittest.TestCase): - @staticmethod def _call_fut(document_path, option): from google.cloud.firestore_v1beta1._helpers import pb_for_delete @@ -2584,14 +2267,10 @@ def _call_fut(document_path, option): def _helper(self, option=None, **write_kwargs): from google.cloud.firestore_v1beta1.proto import write_pb2 - document_path = _make_ref_string( - u'chicken', u'philly', u'one', u'two') + document_path = _make_ref_string(u"chicken", u"philly", u"one", u"two") write_pb = self._call_fut(document_path, option) - expected_pb = write_pb2.Write( - delete=document_path, - **write_kwargs - ) + expected_pb = write_pb2.Write(delete=document_path, **write_kwargs) self.assertEqual(write_pb, expected_pb) def test_without_option(self): @@ -2602,17 +2281,13 @@ def test_with_option(self): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1 import _helpers - update_time = timestamp_pb2.Timestamp( - seconds=1309700594, - nanos=822211297, - ) + update_time = timestamp_pb2.Timestamp(seconds=1309700594, nanos=822211297) option = _helpers.LastUpdateOption(update_time) precondition = common_pb2.Precondition(update_time=update_time) self._helper(option=option, current_document=precondition) class Test_get_transaction_id(unittest.TestCase): - @staticmethod def _call_fut(transaction, **kwargs): from google.cloud.firestore_v1beta1._helpers import get_transaction_id @@ -2636,7 +2311,7 @@ def test_after_writes_not_allowed(self): from google.cloud.firestore_v1beta1.transaction import Transaction transaction = Transaction(mock.sentinel.client) - transaction._id = b'under-hook' + transaction._id = b"under-hook" transaction._write_pbs.append(mock.sentinel.write) with self.assertRaises(ReadAfterWriteError): @@ -2646,7 +2321,7 @@ def test_after_writes_allowed(self): from google.cloud.firestore_v1beta1.transaction import Transaction transaction = Transaction(mock.sentinel.client) - txn_id = b'we-are-0fine' + txn_id = b"we-are-0fine" transaction._id = txn_id transaction._write_pbs.append(mock.sentinel.write) @@ -2657,7 +2332,7 @@ def test_good_transaction(self): from google.cloud.firestore_v1beta1.transaction import Transaction transaction = Transaction(mock.sentinel.client) - txn_id = b'doubt-it' + txn_id = b"doubt-it" transaction._id = txn_id self.assertTrue(transaction.in_progress) @@ -2665,25 +2340,20 @@ def test_good_transaction(self): class Test_metadata_with_prefix(unittest.TestCase): - @staticmethod def _call_fut(database_string): - from google.cloud.firestore_v1beta1._helpers import ( - metadata_with_prefix) + from google.cloud.firestore_v1beta1._helpers import metadata_with_prefix return metadata_with_prefix(database_string) def test_it(self): - database_string = u'projects/prahj/databases/dee-bee' + database_string = u"projects/prahj/databases/dee-bee" metadata = self._call_fut(database_string) - self.assertEqual(metadata, [ - ('google-cloud-resource-prefix', database_string), - ]) + self.assertEqual(metadata, [("google-cloud-resource-prefix", database_string)]) class TestWriteOption(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1._helpers import WriteOption @@ -2701,7 +2371,6 @@ def test_modify_write(self): class TestLastUpdateOption(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1._helpers import LastUpdateOption @@ -2721,10 +2390,7 @@ def test_modify_write_update_time(self): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 - timestamp_pb = timestamp_pb2.Timestamp( - seconds=683893592, - nanos=229362000, - ) + timestamp_pb = timestamp_pb2.Timestamp(seconds=683893592, nanos=229362000) option = self._make_one(timestamp_pb) write_pb = write_pb2.Write() ret_val = option.modify_write(write_pb) @@ -2735,7 +2401,6 @@ def test_modify_write_update_time(self): class TestExistsOption(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1._helpers import ExistsOption @@ -2774,8 +2439,9 @@ def _make_ref_string(project, database, *path): from google.cloud.firestore_v1beta1 import _helpers doc_rel_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path) - return u'projects/{}/databases/{}/documents/{}'.format( - project, database, doc_rel_path) + return u"projects/{}/databases/{}/documents/{}".format( + project, database, doc_rel_path + ) def _make_credentials(): @@ -2784,7 +2450,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='quark'): +def _make_client(project="quark"): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() diff --git a/firestore/tests/unit/test_batch.py b/firestore/tests/unit/test_batch.py index 4a310f762339..6469dd9ae06d 100644 --- a/firestore/tests/unit/test_batch.py +++ b/firestore/tests/unit/test_batch.py @@ -18,7 +18,6 @@ class TestWriteBatch(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.batch import WriteBatch @@ -38,8 +37,7 @@ def test__add_write_pbs(self): batch = self._make_one(mock.sentinel.client) self.assertEqual(batch._write_pbs, []) batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2]) - self.assertEqual( - batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2]) + self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2]) def test_create(self): from google.cloud.firestore_v1beta1.proto import common_pb2 @@ -50,16 +48,16 @@ def test_create(self): batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) - reference = client.document('this', 'one') - document_data = {'a': 10, 'b': 2.5} + reference = client.document("this", "one") + document_data = {"a": 10, "b": 2.5} ret_val = batch.create(reference, document_data) self.assertIsNone(ret_val) new_write_pb = write_pb2.Write( update=document_pb2.Document( name=reference._document_path, fields={ - 'a': _value_pb(integer_value=document_data['a']), - 'b': _value_pb(double_value=document_data['b']), + "a": _value_pb(integer_value=document_data["a"]), + "b": _value_pb(double_value=document_data["b"]), }, ), current_document=common_pb2.Precondition(exists=False), @@ -74,19 +72,17 @@ def test_set(self): batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) - reference = client.document('another', 'one') - field = 'zapzap' - value = u'meadows and flowers' + reference = client.document("another", "one") + field = "zapzap" + value = u"meadows and flowers" document_data = {field: value} ret_val = batch.set(reference, document_data) self.assertIsNone(ret_val) new_write_pb = write_pb2.Write( update=document_pb2.Document( name=reference._document_path, - fields={ - field: _value_pb(string_value=value), - }, - ), + fields={field: _value_pb(string_value=value)}, + ) ) self.assertEqual(batch._write_pbs, [new_write_pb]) @@ -98,20 +94,18 @@ def test_set_merge(self): batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) - reference = client.document('another', 'one') - field = 'zapzap' - value = u'meadows and flowers' + reference = client.document("another", "one") + field = "zapzap" + value = u"meadows and flowers" document_data = {field: value} ret_val = batch.set(reference, document_data, merge=True) self.assertIsNone(ret_val) new_write_pb = write_pb2.Write( update=document_pb2.Document( name=reference._document_path, - fields={ - field: _value_pb(string_value=value), - }, + fields={field: _value_pb(string_value=value)}, ), - update_mask={'field_paths': [field]} + update_mask={"field_paths": [field]}, ) self.assertEqual(batch._write_pbs, [new_write_pb]) @@ -124,21 +118,19 @@ def test_update(self): batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) - reference = client.document('cats', 'cradle') - field_path = 'head.foot' - value = u'knees toes shoulders' + reference = client.document("cats", "cradle") + field_path = "head.foot" + value = u"knees toes shoulders" field_updates = {field_path: value} ret_val = batch.update(reference, field_updates) self.assertIsNone(ret_val) - map_pb = document_pb2.MapValue(fields={ - 'foot': _value_pb(string_value=value), - }) + map_pb = document_pb2.MapValue(fields={"foot": _value_pb(string_value=value)}) new_write_pb = write_pb2.Write( update=document_pb2.Document( name=reference._document_path, - fields={'head': _value_pb(map_value=map_pb)}, + fields={"head": _value_pb(map_value=map_pb)}, ), update_mask=common_pb2.DocumentMask(field_paths=[field_path]), current_document=common_pb2.Precondition(exists=True), @@ -152,7 +144,7 @@ def test_delete(self): batch = self._make_one(client) self.assertEqual(batch._write_pbs, []) - reference = client.document('early', 'mornin', 'dawn', 'now') + reference = client.document("early", "mornin", "dawn", "now") ret_val = batch.delete(reference) self.assertIsNone(ret_val) new_write_pb = write_pb2.Write(delete=reference._document_path) @@ -163,24 +155,21 @@ def test_commit(self): from google.cloud.firestore_v1beta1.proto import write_pb2 # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = firestore_pb2.CommitResponse( - write_results=[ - write_pb2.WriteResult(), - write_pb2.WriteResult(), - ], + write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()] ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('grand') + client = _make_client("grand") client._firestore_api_internal = firestore_api # Actually make a batch with some mutations and call commit(). batch = self._make_one(client) - document1 = client.document('a', 'b') - batch.create(document1, {'ten': 10, 'buck': u'ets'}) - document2 = client.document('c', 'd', 'e', 'f') + document1 = client.document("a", "b") + batch.create(document1, {"ten": 10, "buck": u"ets"}) + document2 = client.document("c", "d", "e", "f") batch.delete(document2) write_pbs = batch._write_pbs[::] @@ -191,8 +180,11 @@ def test_commit(self): # Verify the mocks. firestore_api.commit.assert_called_once_with( - client._database_string, write_pbs, transaction=None, - metadata=client._rpc_metadata) + client._database_string, + write_pbs, + transaction=None, + metadata=client._rpc_metadata, + ) def _value_pb(**kwargs): @@ -207,7 +199,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='seventy-nine'): +def _make_client(project="seventy-nine"): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() diff --git a/firestore/tests/unit/test_client.py b/firestore/tests/unit/test_client.py index c0b1f5431633..e3368d2108ad 100644 --- a/firestore/tests/unit/test_client.py +++ b/firestore/tests/unit/test_client.py @@ -21,7 +21,7 @@ class TestClient(unittest.TestCase): - PROJECT = 'my-prahjekt' + PROJECT = "my-prahjekt" @staticmethod def _get_target_class(): @@ -48,18 +48,19 @@ def test_constructor(self): def test_constructor_explicit(self): credentials = _make_credentials() - database = 'now-db' + database = "now-db" client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database) + project=self.PROJECT, credentials=credentials, database=database + ) self.assertEqual(client.project, self.PROJECT) self.assertEqual(client._credentials, credentials) self.assertEqual(client._database, database) @mock.patch( - 'google.cloud.firestore_v1beta1.gapic.firestore_client.' - 'FirestoreClient', + "google.cloud.firestore_v1beta1.gapic.firestore_client." "FirestoreClient", autospec=True, - return_value=mock.sentinel.firestore_api) + return_value=mock.sentinel.firestore_api, + ) def test__firestore_api_property(self, mock_client): client = self._make_default_one() self.assertIsNone(client._firestore_api_internal) @@ -74,13 +75,13 @@ def test__firestore_api_property(self, mock_client): def test___database_string_property(self): credentials = _make_credentials() - database = 'cheeeeez' + database = "cheeeeez" client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database) + project=self.PROJECT, credentials=credentials, database=database + ) self.assertIsNone(client._database_string_internal) database_string = client._database_string - expected = 'projects/{}/databases/{}'.format( - client.project, client._database) + expected = "projects/{}/databases/{}".format(client.project, client._database) self.assertEqual(database_string, expected) self.assertIs(database_string, client._database_string_internal) @@ -90,19 +91,20 @@ def test___database_string_property(self): def test___rpc_metadata_property(self): credentials = _make_credentials() - database = 'quanta' + database = "quanta" client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database) + project=self.PROJECT, credentials=credentials, database=database + ) - self.assertEqual(client._rpc_metadata, [ - ('google-cloud-resource-prefix', client._database_string), - ]) + self.assertEqual( + client._rpc_metadata, + [("google-cloud-resource-prefix", client._database_string)], + ) def test_collection_factory(self): - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference - collection_id = 'users' + collection_id = "users" client = self._make_default_one() collection = client.collection(collection_id) @@ -111,12 +113,11 @@ def test_collection_factory(self): self.assertIsInstance(collection, CollectionReference) def test_collection_factory_nested(self): - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference client = self._make_default_one() - parts = ('users', 'alovelace', 'beep') - collection_path = '/'.join(parts) + parts = ("users", "alovelace", "beep") + collection_path = "/".join(parts) collection1 = client.collection(collection_path) self.assertEqual(collection1._path, parts) @@ -132,9 +133,9 @@ def test_collection_factory_nested(self): def test_document_factory(self): from google.cloud.firestore_v1beta1.document import DocumentReference - parts = ('rooms', 'roomA') + parts = ("rooms", "roomA") client = self._make_default_one() - doc_path = '/'.join(parts) + doc_path = "/".join(parts) document1 = client.document(doc_path) self.assertEqual(document1._path, parts) @@ -151,8 +152,8 @@ def test_document_factory_nested(self): from google.cloud.firestore_v1beta1.document import DocumentReference client = self._make_default_one() - parts = ('rooms', 'roomA', 'shoes', 'dressy') - doc_path = '/'.join(parts) + parts = ("rooms", "roomA", "shoes", "dressy") + doc_path = "/".join(parts) document1 = client.document(doc_path) self.assertEqual(document1._path, parts) @@ -167,16 +168,13 @@ def test_document_factory_nested(self): def test_field_path(self): klass = self._get_target_class() - self.assertEqual(klass.field_path('a', 'b', 'c'), 'a.b.c') + self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c") def test_write_option_last_update(self): from google.protobuf import timestamp_pb2 from google.cloud.firestore_v1beta1._helpers import LastUpdateOption - timestamp = timestamp_pb2.Timestamp( - seconds=1299767599, - nanos=811111097, - ) + timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097) klass = self._get_target_class() option = klass.write_option(last_update_time=timestamp) @@ -210,9 +208,7 @@ def test_write_multiple_args(self): klass = self._get_target_class() with self.assertRaises(TypeError) as exc_info: - klass.write_option( - exists=False, - last_update_time=mock.sentinel.timestamp) + klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp) self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,)) @@ -221,24 +217,22 @@ def test_write_bad_arg(self): klass = self._get_target_class() with self.assertRaises(TypeError) as exc_info: - klass.write_option(spinach='popeye') + klass.write_option(spinach="popeye") - extra = '{!r} was provided'.format('spinach') + extra = "{!r} was provided".format("spinach") self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra)) def test_collections(self): from google.api_core.page_iterator import Iterator from google.api_core.page_iterator import Page - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference - collection_ids = ['users', 'projects'] + collection_ids = ["users", "projects"] client = self._make_default_one() - firestore_api = mock.Mock(spec=['list_collection_ids']) + firestore_api = mock.Mock(spec=["list_collection_ids"]) client._firestore_api_internal = firestore_api class _Iterator(Iterator): - def __init__(self, pages): super(_Iterator, self).__init__(client=None) self._pages = pages @@ -260,13 +254,12 @@ def _next_page(self): self.assertEqual(collection.id, collection_id) firestore_api.list_collection_ids.assert_called_once_with( - client._database_string, - metadata=client._rpc_metadata, + client._database_string, metadata=client._rpc_metadata ) def _get_all_helper(self, client, references, document_pbs, **kwargs): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['batch_get_documents']) + firestore_api = mock.Mock(spec=["batch_get_documents"]) response_iterator = iter(document_pbs) firestore_api.batch_get_documents.return_value = response_iterator @@ -281,19 +274,15 @@ def _get_all_helper(self, client, references, document_pbs, **kwargs): def _info_for_get_all(self, data1, data2): client = self._make_default_one() - document1 = client.document('pineapple', 'lamp1') - document2 = client.document('pineapple', 'lamp2') + document1 = client.document("pineapple", "lamp1") + document2 = client.document("pineapple", "lamp2") # Make response protobufs. - document_pb1, read_time = _doc_get_info( - document1._document_path, data1) - response1 = _make_batch_response( - found=document_pb1, read_time=read_time) + document_pb1, read_time = _doc_get_info(document1._document_path, data1) + response1 = _make_batch_response(found=document_pb1, read_time=read_time) - document_pb2, read_time = _doc_get_info( - document2._document_path, data2) - response2 = _make_batch_response( - found=document_pb2, read_time=read_time) + document_pb2, read_time = _doc_get_info(document2._document_path, data2) + response2 = _make_batch_response(found=document_pb2, read_time=read_time) return client, document1, document2, response1, response2 @@ -301,16 +290,19 @@ def test_get_all(self): from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.document import DocumentSnapshot - data1 = {'a': u'cheese'} - data2 = {'b': True, 'c': 18} + data1 = {"a": u"cheese"} + data2 = {"b": True, "c": 18} info = self._info_for_get_all(data1, data2) client, document1, document2, response1, response2 = info # Exercise the mocked ``batch_get_documents``. - field_paths = ['a', 'b'] + field_paths = ["a", "b"] snapshots = self._get_all_helper( - client, [document1, document2], [response1, response2], - field_paths=field_paths) + client, + [document1, document2], + [response1, response2], + field_paths=field_paths, + ) self.assertEqual(len(snapshots), 2) snapshot1 = snapshots[0] @@ -327,22 +319,27 @@ def test_get_all(self): doc_paths = [document1._document_path, document2._document_path] mask = common_pb2.DocumentMask(field_paths=field_paths) client._firestore_api.batch_get_documents.assert_called_once_with( - client._database_string, doc_paths, mask, transaction=None, - metadata=client._rpc_metadata) + client._database_string, + doc_paths, + mask, + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_all_with_transaction(self): from google.cloud.firestore_v1beta1.document import DocumentSnapshot - data = {'so-much': 484} + data = {"so-much": 484} info = self._info_for_get_all(data, {}) client, document, _, response, _ = info transaction = client.transaction() - txn_id = b'the-man-is-non-stop' + txn_id = b"the-man-is-non-stop" transaction._id = txn_id # Exercise the mocked ``batch_get_documents``. snapshots = self._get_all_helper( - client, [document], [response], transaction=transaction) + client, [document], [response], transaction=transaction + ) self.assertEqual(len(snapshots), 1) snapshot = snapshots[0] @@ -353,19 +350,22 @@ def test_get_all_with_transaction(self): # Verify the call to the mock. doc_paths = [document._document_path] client._firestore_api.batch_get_documents.assert_called_once_with( - client._database_string, doc_paths, None, transaction=txn_id, - metadata=client._rpc_metadata) + client._database_string, + doc_paths, + None, + transaction=txn_id, + metadata=client._rpc_metadata, + ) def test_get_all_unknown_result(self): from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE - info = self._info_for_get_all({'z': 28.5}, {}) + info = self._info_for_get_all({"z": 28.5}, {}) client, document, _, _, response = info # Exercise the mocked ``batch_get_documents``. with self.assertRaises(ValueError) as exc_info: - self._get_all_helper( - client, [document], [response]) + self._get_all_helper(client, [document], [response]) err_msg = _BAD_DOC_TEMPLATE.format(response.found.name) self.assertEqual(exc_info.exception.args, (err_msg,)) @@ -373,23 +373,27 @@ def test_get_all_unknown_result(self): # Verify the call to the mock. doc_paths = [document._document_path] client._firestore_api.batch_get_documents.assert_called_once_with( - client._database_string, doc_paths, None, transaction=None, - metadata=client._rpc_metadata) + client._database_string, + doc_paths, + None, + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_all_wrong_order(self): from google.cloud.firestore_v1beta1.document import DocumentSnapshot - data1 = {'up': 10} - data2 = {'down': -10} + data1 = {"up": 10} + data2 = {"down": -10} info = self._info_for_get_all(data1, data2) client, document1, document2, response1, response2 = info - document3 = client.document('pineapple', 'lamp3') + document3 = client.document("pineapple", "lamp3") response3 = _make_batch_response(missing=document3._document_path) # Exercise the mocked ``batch_get_documents``. snapshots = self._get_all_helper( - client, [document1, document2, document3], - [response2, response1, response3]) + client, [document1, document2, document3], [response2, response1, response3] + ) self.assertEqual(len(snapshots), 3) @@ -412,8 +416,12 @@ def test_get_all_wrong_order(self): document3._document_path, ] client._firestore_api.batch_get_documents.assert_called_once_with( - client._database_string, doc_paths, None, transaction=None, - metadata=client._rpc_metadata) + client._database_string, + doc_paths, + None, + transaction=None, + metadata=client._rpc_metadata, + ) def test_batch(self): from google.cloud.firestore_v1beta1.batch import WriteBatch @@ -437,7 +445,6 @@ def test_transaction(self): class Test__reference_info(unittest.TestCase): - @staticmethod def _call_fut(references): from google.cloud.firestore_v1beta1.client import _reference_info @@ -448,12 +455,12 @@ def test_it(self): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() - client = Client(project='hi-projject', credentials=credentials) + client = Client(project="hi-projject", credentials=credentials) - reference1 = client.document('a', 'b') - reference2 = client.document('a', 'b', 'c', 'd') - reference3 = client.document('a', 'b') - reference4 = client.document('f', 'g') + reference1 = client.document("a", "b") + reference2 = client.document("a", "b", "c", "d") + reference3 = client.document("a", "b") + reference4 = client.document("f", "g") doc_path1 = reference1._document_path doc_path2 = reference2._document_path @@ -462,9 +469,9 @@ def test_it(self): self.assertEqual(doc_path1, doc_path3) document_paths, reference_map = self._call_fut( - [reference1, reference2, reference3, reference4]) - self.assertEqual( - document_paths, [doc_path1, doc_path2, doc_path3, doc_path4]) + [reference1, reference2, reference3, reference4] + ) + self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4]) # reference3 over-rides reference1. expected_map = { doc_path2: reference2, @@ -475,7 +482,6 @@ def test_it(self): class Test__get_reference(unittest.TestCase): - @staticmethod def _call_fut(document_path, reference_map): from google.cloud.firestore_v1beta1.client import _get_reference @@ -483,15 +489,14 @@ def _call_fut(document_path, reference_map): return _get_reference(document_path, reference_map) def test_success(self): - doc_path = 'a/b/c' + doc_path = "a/b/c" reference_map = {doc_path: mock.sentinel.reference} - self.assertIs( - self._call_fut(doc_path, reference_map), mock.sentinel.reference) + self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference) def test_failure(self): from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE - doc_path = '1/888/call-now' + doc_path = "1/888/call-now" with self.assertRaises(ValueError) as exc_info: self._call_fut(doc_path, {}) @@ -500,10 +505,8 @@ def test_failure(self): class Test__parse_batch_get(unittest.TestCase): - @staticmethod - def _call_fut( - get_doc_response, reference_map, client=mock.sentinel.client): + def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client): from google.cloud.firestore_v1beta1.client import _parse_batch_get return _parse_batch_get(get_doc_response, reference_map, client) @@ -512,11 +515,12 @@ def _call_fut( def _dummy_ref_string(): from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - project = u'bazzzz' - collection_id = u'fizz' - document_id = u'buzz' - return u'projects/{}/databases/{}/documents/{}/{}'.format( - project, DEFAULT_DATABASE, collection_id, document_id) + project = u"bazzzz" + collection_id = u"fizz" + document_id = u"buzz" + return u"projects/{}/databases/{}/documents/{}/{}".format( + project, DEFAULT_DATABASE, collection_id, document_id + ) def test_found(self): from google.cloud.firestore_v1beta1.proto import document_pb2 @@ -533,22 +537,19 @@ def test_found(self): document_pb = document_pb2.Document( name=ref_string, fields={ - 'foo': document_pb2.Value(double_value=1.5), - 'bar': document_pb2.Value(string_value=u'skillz'), + "foo": document_pb2.Value(double_value=1.5), + "bar": document_pb2.Value(string_value=u"skillz"), }, create_time=create_time, update_time=update_time, ) - response_pb = _make_batch_response( - found=document_pb, - read_time=read_time, - ) + response_pb = _make_batch_response(found=document_pb, read_time=read_time) reference_map = {ref_string: mock.sentinel.reference} snapshot = self._call_fut(response_pb, reference_map) self.assertIsInstance(snapshot, DocumentSnapshot) self.assertIs(snapshot._reference, mock.sentinel.reference) - self.assertEqual(snapshot._data, {'foo': 1.5, 'bar': u'skillz'}) + self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"}) self.assertTrue(snapshot._exists) self.assertEqual(snapshot.read_time, read_time) self.assertEqual(snapshot.create_time, create_time) @@ -567,17 +568,16 @@ def test_unset_result_type(self): self._call_fut(response_pb, {}) def test_unknown_result_type(self): - response_pb = mock.Mock(spec=['WhichOneof']) - response_pb.WhichOneof.return_value = 'zoob_value' + response_pb = mock.Mock(spec=["WhichOneof"]) + response_pb.WhichOneof.return_value = "zoob_value" with self.assertRaises(ValueError): self._call_fut(response_pb, {}) - response_pb.WhichOneof.assert_called_once_with('result') + response_pb.WhichOneof.assert_called_once_with("result") class Test__get_doc_mask(unittest.TestCase): - @staticmethod def _call_fut(field_paths): from google.cloud.firestore_v1beta1.client import _get_doc_mask @@ -590,7 +590,7 @@ def test_none(self): def test_paths(self): from google.cloud.firestore_v1beta1.proto import common_pb2 - field_paths = ['a.b', 'c'] + field_paths = ["a.b", "c"] result = self._call_fut(field_paths) expected = common_pb2.DocumentMask(field_paths=field_paths) self.assertEqual(result, expected) diff --git a/firestore/tests/unit/test_collection.py b/firestore/tests/unit/test_collection.py index ab4da4ccee8f..6e0074239bb0 100644 --- a/firestore/tests/unit/test_collection.py +++ b/firestore/tests/unit/test_collection.py @@ -21,11 +21,9 @@ class TestCollectionReference(unittest.TestCase): - @staticmethod def _get_target_class(): - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference return CollectionReference @@ -38,8 +36,8 @@ def _get_public_methods(klass): return set( name for name, value in six.iteritems(klass.__dict__) - if (not name.startswith('_') and - isinstance(value, types.FunctionType))) + if (not name.startswith("_") and isinstance(value, types.FunctionType)) + ) def test_query_method_matching(self): from google.cloud.firestore_v1beta1.query import Query @@ -52,13 +50,14 @@ def test_query_method_matching(self): self.assertLessEqual(query_methods, collection_methods) def test_constructor(self): - collection_id1 = 'rooms' - document_id = 'roomA' - collection_id2 = 'messages' + collection_id1 = "rooms" + document_id = "roomA" + collection_id2 = "messages" client = mock.sentinel.client collection = self._make_one( - collection_id1, document_id, collection_id2, client=client) + collection_id1, document_id, collection_id2, client=client + ) self.assertIs(collection._client, client) expected_path = (collection_id1, document_id, collection_id2) self.assertEqual(collection._path, expected_path) @@ -67,30 +66,31 @@ def test_constructor_invalid_path(self): with self.assertRaises(ValueError): self._make_one() with self.assertRaises(ValueError): - self._make_one(99, 'doc', 'bad-collection-id') + self._make_one(99, "doc", "bad-collection-id") with self.assertRaises(ValueError): - self._make_one('bad-document-ID', None, 'sub-collection') + self._make_one("bad-document-ID", None, "sub-collection") with self.assertRaises(ValueError): - self._make_one('Just', 'A-Document') + self._make_one("Just", "A-Document") def test_constructor_invalid_kwarg(self): with self.assertRaises(TypeError): - self._make_one('Coh-lek-shun', donut=True) + self._make_one("Coh-lek-shun", donut=True) def test_id_property(self): - collection_id = 'hi-bob' + collection_id = "hi-bob" collection = self._make_one(collection_id) self.assertEqual(collection.id, collection_id) def test_parent_property(self): from google.cloud.firestore_v1beta1.document import DocumentReference - collection_id1 = 'grocery-store' - document_id = 'market' - collection_id2 = 'darth' + collection_id1 = "grocery-store" + document_id = "market" + collection_id2 = "darth" client = _make_client() collection = self._make_one( - collection_id1, document_id, collection_id2, client=client) + collection_id1, document_id, collection_id2, client=client + ) parent = collection.parent self.assertIsInstance(parent, DocumentReference) @@ -98,67 +98,70 @@ def test_parent_property(self): self.assertEqual(parent._path, (collection_id1, document_id)) def test_parent_property_top_level(self): - collection = self._make_one('tahp-leh-vull') + collection = self._make_one("tahp-leh-vull") self.assertIsNone(collection.parent) def test_document_factory_explicit_id(self): from google.cloud.firestore_v1beta1.document import DocumentReference - collection_id = 'grocery-store' - document_id = 'market' + collection_id = "grocery-store" + document_id = "market" client = _make_client() collection = self._make_one(collection_id, client=client) child = collection.document(document_id) self.assertIsInstance(child, DocumentReference) self.assertIs(child._client, client) - self.assertEqual( - child._path, (collection_id, document_id)) + self.assertEqual(child._path, (collection_id, document_id)) - @mock.patch('google.cloud.firestore_v1beta1.collection._auto_id', - return_value='zorpzorpthreezorp012') + @mock.patch( + "google.cloud.firestore_v1beta1.collection._auto_id", + return_value="zorpzorpthreezorp012", + ) def test_document_factory_auto_id(self, mock_auto_id): from google.cloud.firestore_v1beta1.document import DocumentReference - collection_name = 'space-town' + collection_name = "space-town" client = _make_client() collection = self._make_one(collection_name, client=client) child = collection.document() self.assertIsInstance(child, DocumentReference) self.assertIs(child._client, client) - self.assertEqual( - child._path, (collection_name, mock_auto_id.return_value)) + self.assertEqual(child._path, (collection_name, mock_auto_id.return_value)) mock_auto_id.assert_called_once_with() def test__parent_info_top_level(self): client = _make_client() - collection_id = 'soap' + collection_id = "soap" collection = self._make_one(collection_id, client=client) parent_path, expected_prefix = collection._parent_info() - expected_path = 'projects/{}/databases/{}/documents'.format( - client.project, client._database) + expected_path = "projects/{}/databases/{}/documents".format( + client.project, client._database + ) self.assertEqual(parent_path, expected_path) - prefix = '{}/{}'.format(expected_path, collection_id) + prefix = "{}/{}".format(expected_path, collection_id) self.assertEqual(expected_prefix, prefix) def test__parent_info_nested(self): - collection_id1 = 'bar' - document_id = 'baz' - collection_id2 = 'chunk' + collection_id1 = "bar" + document_id = "baz" + collection_id2 = "chunk" client = _make_client() collection = self._make_one( - collection_id1, document_id, collection_id2, client=client) + collection_id1, document_id, collection_id2, client=client + ) parent_path, expected_prefix = collection._parent_info() - expected_path = 'projects/{}/databases/{}/documents/{}/{}'.format( - client.project, client._database, collection_id1, document_id) + expected_path = "projects/{}/databases/{}/documents/{}/{}".format( + client.project, client._database, collection_id1, document_id + ) self.assertEqual(parent_path, expected_path) - prefix = '{}/{}'.format(expected_path, collection_id2) + prefix = "{}/{}".format(expected_path, collection_id2) self.assertEqual(expected_prefix, prefix) def test_add_auto_assigned(self): @@ -167,28 +170,25 @@ def test_add_auto_assigned(self): from google.cloud.firestore_v1beta1.document import DocumentReference # Create a minimal fake GAPIC add attach it to a real client. - firestore_api = mock.Mock(spec=['create_document']) + firestore_api = mock.Mock(spec=["create_document"]) create_doc_response = document_pb2.Document() firestore_api.create_document.return_value = create_doc_response client = _make_client() client._firestore_api_internal = firestore_api # Actually make a collection. - collection = self._make_one( - 'grand-parent', 'parent', 'child', client=client) + collection = self._make_one("grand-parent", "parent", "child", client=client) # Add a dummy response for the fake GAPIC. parent_path = collection.parent._document_path - auto_assigned_id = 'cheezburger' - name = '{}/{}/{}'.format( - parent_path, collection.id, auto_assigned_id) + auto_assigned_id = "cheezburger" + name = "{}/{}/{}".format(parent_path, collection.id, auto_assigned_id) create_doc_response = document_pb2.Document(name=name) - create_doc_response.update_time.FromDatetime( - datetime.datetime.utcnow()) + create_doc_response.update_time.FromDatetime(datetime.datetime.utcnow()) firestore_api.create_document.return_value = create_doc_response # Actually call add() on our collection. - document_data = {'been': 'here'} + document_data = {"been": "here"} update_time, document_ref = collection.add(document_data) # Verify the response and the mocks. @@ -199,11 +199,16 @@ def test_add_auto_assigned(self): self.assertEqual(document_ref._path, expected_path) expected_document_pb = document_pb2.Document( - fields=_helpers.encode_dict(document_data)) + fields=_helpers.encode_dict(document_data) + ) firestore_api.create_document.assert_called_once_with( - parent_path, collection_id=collection.id, document_id=None, - document=expected_document_pb, mask=None, - metadata=client._rpc_metadata) + parent_path, + collection_id=collection.id, + document_id=None, + document=expected_document_pb, + mask=None, + metadata=client._rpc_metadata, + ) @staticmethod def _write_pb_for_create(document_path, document_data): @@ -214,8 +219,7 @@ def _write_pb_for_create(document_path, document_data): return write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields=_helpers.encode_dict(document_data), + name=document_path, fields=_helpers.encode_dict(document_data) ), current_document=common_pb2.Precondition(exists=False), ) @@ -224,11 +228,13 @@ def test_add_explicit_id(self): from google.cloud.firestore_v1beta1.document import DocumentReference # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) write_result = mock.Mock( - update_time=mock.sentinel.update_time, spec=['update_time']) + update_time=mock.sentinel.update_time, spec=["update_time"] + ) commit_response = mock.Mock( - write_results=[write_result], spec=['write_results']) + write_results=[write_result], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. @@ -236,14 +242,10 @@ def test_add_explicit_id(self): client._firestore_api_internal = firestore_api # Actually make a collection and call add(). - collection = self._make_one('parent', client=client) - document_data = { - 'zorp': 208.75, - 'i-did-not': b'know that', - } - doc_id = 'child' - update_time, document_ref = collection.add( - document_data, document_id=doc_id) + collection = self._make_one("parent", client=client) + document_data = {"zorp": 208.75, "i-did-not": b"know that"} + doc_id = "child" + update_time, document_ref = collection.add(document_data, document_id=doc_id) # Verify the response and the mocks. self.assertIs(update_time, mock.sentinel.update_time) @@ -251,23 +253,26 @@ def test_add_explicit_id(self): self.assertIs(document_ref._client, client) self.assertEqual(document_ref._path, (collection.id, doc_id)) - write_pb = self._write_pb_for_create( - document_ref._document_path, document_data) + write_pb = self._write_pb_for_create(document_ref._document_path, document_data) firestore_api.commit.assert_called_once_with( - client._database_string, [write_pb], transaction=None, - metadata=client._rpc_metadata) + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) def test_select(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - field_paths = ['a', 'b'] + collection = self._make_one("collection") + field_paths = ["a", "b"] query = collection.select(field_paths) self.assertIsInstance(query, Query) self.assertIs(query._parent, collection) - projection_paths = [field_ref.field_path - for field_ref in query._projection.fields] + projection_paths = [ + field_ref.field_path for field_ref in query._projection.fields + ] self.assertEqual(projection_paths, field_paths) @staticmethod @@ -277,9 +282,7 @@ def _make_field_filter_pb(field_path, op_string, value): from google.cloud.firestore_v1beta1.query import _enum_from_op_string return query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), op=_enum_from_op_string(op_string), value=_helpers.encode_value(value), ) @@ -287,9 +290,9 @@ def _make_field_filter_pb(field_path, op_string, value): def test_where(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - field_path = 'foo' - op_string = '==' + collection = self._make_one("collection") + field_path = "foo" + op_string = "==" value = 45 query = collection.where(field_path, op_string, value) @@ -298,8 +301,8 @@ def test_where(self): self.assertEqual(len(query._field_filters), 1) field_filter_pb = query._field_filters[0] self.assertEqual( - field_filter_pb, - self._make_field_filter_pb(field_path, op_string, value)) + field_filter_pb, self._make_field_filter_pb(field_path, op_string, value) + ) @staticmethod def _make_order_pb(field_path, direction): @@ -307,17 +310,15 @@ def _make_order_pb(field_path, direction): from google.cloud.firestore_v1beta1.query import _enum_from_direction return query_pb2.StructuredQuery.Order( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), direction=_enum_from_direction(direction), ) def test_order_by(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - field_path = 'foo' + collection = self._make_one("collection") + field_path = "foo" direction = Query.DESCENDING query = collection.order_by(field_path, direction=direction) @@ -325,13 +326,12 @@ def test_order_by(self): self.assertIs(query._parent, collection) self.assertEqual(len(query._orders), 1) order_pb = query._orders[0] - self.assertEqual( - order_pb, self._make_order_pb(field_path, direction)) + self.assertEqual(order_pb, self._make_order_pb(field_path, direction)) def test_limit(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') + collection = self._make_one("collection") limit = 15 query = collection.limit(limit) @@ -342,7 +342,7 @@ def test_limit(self): def test_offset(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') + collection = self._make_one("collection") offset = 113 query = collection.offset(offset) @@ -353,8 +353,8 @@ def test_offset(self): def test_start_at(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - doc_fields = {'a': 'b'} + collection = self._make_one("collection") + doc_fields = {"a": "b"} query = collection.start_at(doc_fields) self.assertIsInstance(query, Query) @@ -364,8 +364,8 @@ def test_start_at(self): def test_start_after(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - doc_fields = {'d': 'foo', 'e': 10} + collection = self._make_one("collection") + doc_fields = {"d": "foo", "e": 10} query = collection.start_after(doc_fields) self.assertIsInstance(query, Query) @@ -375,8 +375,8 @@ def test_start_after(self): def test_end_before(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - doc_fields = {'bar': 10.5} + collection = self._make_one("collection") + doc_fields = {"bar": 10.5} query = collection.end_before(doc_fields) self.assertIsInstance(query, Query) @@ -386,17 +386,17 @@ def test_end_before(self): def test_end_at(self): from google.cloud.firestore_v1beta1.query import Query - collection = self._make_one('collection') - doc_fields = {'opportunity': True, 'reason': 9} + collection = self._make_one("collection") + doc_fields = {"opportunity": True, "reason": 9} query = collection.end_at(doc_fields) self.assertIsInstance(query, Query) self.assertIs(query._parent, collection) self.assertEqual(query._end_at, (doc_fields, False)) - @mock.patch('google.cloud.firestore_v1beta1.query.Query', autospec=True) + @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) def test_get(self, query_class): - collection = self._make_one('collection') + collection = self._make_one("collection") get_response = collection.get() query_class.assert_called_once_with(collection) @@ -404,9 +404,9 @@ def test_get(self, query_class): self.assertIs(get_response, query_instance.get.return_value) query_instance.get.assert_called_once_with(transaction=None) - @mock.patch('google.cloud.firestore_v1beta1.query.Query', autospec=True) + @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) def test_get_with_transaction(self, query_class): - collection = self._make_one('collection') + collection = self._make_one("collection") transaction = mock.sentinel.txn get_response = collection.get(transaction=transaction) @@ -415,27 +415,25 @@ def test_get_with_transaction(self, query_class): self.assertIs(get_response, query_instance.get.return_value) query_instance.get.assert_called_once_with(transaction=transaction) - @mock.patch('google.cloud.firestore_v1beta1.collection.Watch', - autospec=True) + @mock.patch("google.cloud.firestore_v1beta1.collection.Watch", autospec=True) def test_on_snapshot(self, watch): - collection = self._make_one('collection') + collection = self._make_one("collection") collection.on_snapshot(None) watch.for_query.assert_called_once() class Test__auto_id(unittest.TestCase): - @staticmethod def _call_fut(): from google.cloud.firestore_v1beta1.collection import _auto_id return _auto_id() - @mock.patch('random.choice') + @mock.patch("random.choice") def test_it(self, mock_rand_choice): from google.cloud.firestore_v1beta1.collection import _AUTO_ID_CHARS - mock_result = '0123456789abcdefghij' + mock_result = "0123456789abcdefghij" mock_rand_choice.side_effect = list(mock_result) result = self._call_fut() self.assertEqual(result, mock_result) @@ -454,4 +452,4 @@ def _make_client(): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() - return Client(project='project-project', credentials=credentials) + return Client(project="project-project", credentials=credentials) diff --git a/firestore/tests/unit/test_cross_language.py b/firestore/tests/unit/test_cross_language.py index 5190eadc6c4f..4d999d5c8435 100644 --- a/firestore/tests/unit/test_cross_language.py +++ b/firestore/tests/unit/test_cross_language.py @@ -28,55 +28,67 @@ def _load_testproto(filename): - with open(filename, 'r') as tp_file: + with open(filename, "r") as tp_file: tp_text = tp_file.read() test_proto = test_pb2.Test() text_format.Merge(tp_text, test_proto) shortname = os.path.split(filename)[-1] - test_proto.description = ( - test_proto.description + ' (%s)' % shortname - ) + test_proto.description = test_proto.description + " (%s)" % shortname return test_proto ALL_TESTPROTOS = [ - _load_testproto(filename) for filename in sorted( - glob.glob('tests/unit/testdata/*.textproto')) + _load_testproto(filename) + for filename in sorted(glob.glob("tests/unit/testdata/*.textproto")) ] _CREATE_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'create'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "create" +] _GET_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'get'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "get" +] _SET_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'set'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "set" +] _UPDATE_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'update'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "update" +] _UPDATE_PATHS_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'update_paths'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "update_paths" +] _DELETE_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'delete'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "delete" +] _LISTEN_TESTPROTOS = [ - test_proto for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof('test') == 'listen'] + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "listen" +] def _mock_firestore_api(): - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = firestore_pb2.CommitResponse( - write_results=[write_pb2.WriteResult()], + write_results=[write_pb2.WriteResult()] ) firestore_api.commit.return_value = commit_response return firestore_api @@ -87,7 +99,7 @@ def _make_client_document(firestore_api, testcase): from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE import google.auth.credentials - _, project, _, database, _, doc_path = testcase.doc_ref_path.split('/', 5) + _, project, _, database, _, doc_path = testcase.doc_ref_path.split("/", 5) assert database == DEFAULT_DATABASE # Attach the fake GAPIC to a real client. @@ -98,7 +110,7 @@ def _make_client_document(firestore_api, testcase): def _run_testcase(testcase, call, firestore_api, client): - if getattr(testcase, 'is_error', False): + if getattr(testcase, "is_error", False): # TODO: is there a subclass of Exception we can check for? with pytest.raises(Exception): call() @@ -108,10 +120,11 @@ def _run_testcase(testcase, call, firestore_api, client): client._database_string, list(testcase.request.writes), transaction=None, - metadata=client._rpc_metadata) + metadata=client._rpc_metadata, + ) -@pytest.mark.parametrize('test_proto', _CREATE_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _CREATE_TESTPROTOS) def test_create_testprotos(test_proto): testcase = test_proto.create firestore_api = _mock_firestore_api() @@ -121,10 +134,10 @@ def test_create_testprotos(test_proto): _run_testcase(testcase, call, firestore_api, client) -@pytest.mark.parametrize('test_proto', _GET_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _GET_TESTPROTOS) def test_get_testprotos(test_proto): testcase = test_proto.get - firestore_api = mock.Mock(spec=['get_document']) + firestore_api = mock.Mock(spec=["get_document"]) response = document_pb2.Document() firestore_api.get_document.return_value = response client, document = _make_client_document(firestore_api, testcase) @@ -135,10 +148,11 @@ def test_get_testprotos(test_proto): document._document_path, mask=None, transaction=None, - metadata=client._rpc_metadata) + metadata=client._rpc_metadata, + ) -@pytest.mark.parametrize('test_proto', _SET_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _SET_TESTPROTOS) def test_set_testprotos(test_proto): testcase = test_proto.set firestore_api = _mock_firestore_api() @@ -152,7 +166,7 @@ def test_set_testprotos(test_proto): _run_testcase(testcase, call, firestore_api, client) -@pytest.mark.parametrize('test_proto', _UPDATE_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _UPDATE_TESTPROTOS) def test_update_testprotos(test_proto): testcase = test_proto.update firestore_api = _mock_firestore_api() @@ -166,14 +180,13 @@ def test_update_testprotos(test_proto): _run_testcase(testcase, call, firestore_api, client) -@pytest.mark.skip( - reason="Python has no way to call update with a list of field paths.") -@pytest.mark.parametrize('test_proto', _UPDATE_PATHS_TESTPROTOS) +@pytest.mark.skip(reason="Python has no way to call update with a list of field paths.") +@pytest.mark.parametrize("test_proto", _UPDATE_PATHS_TESTPROTOS) def test_update_paths_testprotos(test_proto): # pragma: NO COVER pass -@pytest.mark.parametrize('test_proto', _DELETE_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _DELETE_TESTPROTOS) def test_delete_testprotos(test_proto): testcase = test_proto.delete firestore_api = _mock_firestore_api() @@ -187,7 +200,7 @@ def test_delete_testprotos(test_proto): @pytest.mark.skip(reason="Watch aka listen not yet implemented in Python.") -@pytest.mark.parametrize('test_proto', _LISTEN_TESTPROTOS) +@pytest.mark.parametrize("test_proto", _LISTEN_TESTPROTOS) def test_listen_paths_testprotos(test_proto): # pragma: NO COVER pass @@ -200,14 +213,14 @@ def convert_data(v): from google.cloud.firestore_v1beta1 import DELETE_FIELD from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP - if v == 'ServerTimestamp': + if v == "ServerTimestamp": return SERVER_TIMESTAMP - elif v == 'Delete': + elif v == "Delete": return DELETE_FIELD elif isinstance(v, list): - if v[0] == 'ArrayRemove': + if v[0] == "ArrayRemove": return ArrayRemove([convert_data(e) for e in v[1:]]) - if v[0] == 'ArrayUnion': + if v[0] == "ArrayUnion": return ArrayUnion([convert_data(e) for e in v[1:]]) return [convert_data(e) for e in v] elif isinstance(v, dict): @@ -221,8 +234,7 @@ def convert_set_option(option): if option.fields: return [ - _helpers.FieldPath(*field.field).to_api_repr() - for field in option.fields + _helpers.FieldPath(*field.field).to_api_repr() for field in option.fields ] assert option.all @@ -232,8 +244,8 @@ def convert_set_option(option): def convert_precondition(precond): from google.cloud.firestore_v1beta1 import Client - if precond.HasField('exists'): + if precond.HasField("exists"): return Client.write_option(exists=precond.exists) - assert precond.HasField('update_time') + assert precond.HasField("update_time") return Client.write_option(last_update_time=precond.update_time) diff --git a/firestore/tests/unit/test_document.py b/firestore/tests/unit/test_document.py index 0145372a75e0..408d90b4ae05 100644 --- a/firestore/tests/unit/test_document.py +++ b/firestore/tests/unit/test_document.py @@ -19,7 +19,6 @@ class TestDocumentReference(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.document import DocumentReference @@ -31,37 +30,36 @@ def _make_one(self, *args, **kwargs): return klass(*args, **kwargs) def test_constructor(self): - collection_id1 = 'users' - document_id1 = 'alovelace' - collection_id2 = 'platform' - document_id2 = '*nix' + collection_id1 = "users" + document_id1 = "alovelace" + collection_id2 = "platform" + document_id2 = "*nix" client = mock.sentinel.client document = self._make_one( - collection_id1, document_id1, - collection_id2, document_id2, client=client) + collection_id1, document_id1, collection_id2, document_id2, client=client + ) self.assertIs(document._client, client) - expected_path = ( - collection_id1, document_id1, collection_id2, document_id2) + expected_path = (collection_id1, document_id1, collection_id2, document_id2) self.assertEqual(document._path, expected_path) def test_constructor_invalid_path(self): with self.assertRaises(ValueError): self._make_one() with self.assertRaises(ValueError): - self._make_one(None, 'before', 'bad-collection-id', 'fifteen') + self._make_one(None, "before", "bad-collection-id", "fifteen") with self.assertRaises(ValueError): - self._make_one('bad-document-ID', None) + self._make_one("bad-document-ID", None) with self.assertRaises(ValueError): - self._make_one('Just', 'A-Collection', 'Sub') + self._make_one("Just", "A-Collection", "Sub") def test_constructor_invalid_kwarg(self): with self.assertRaises(TypeError): - self._make_one('Coh-lek-shun', 'Dahk-yu-mehnt', burger=18.75) + self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75) def test___copy__(self): - client = _make_client('rain') - document = self._make_one('a', 'b', client=client) + client = _make_client("rain") + document = self._make_one("a", "b", client=client) # Access the document path so it is copied. doc_path = document._document_path self.assertEqual(doc_path, document._document_path_internal) @@ -71,14 +69,13 @@ def test___copy__(self): self.assertIs(new_document._client, document._client) self.assertEqual(new_document._path, document._path) self.assertEqual( - new_document._document_path_internal, - document._document_path_internal) + new_document._document_path_internal, document._document_path_internal + ) def test___deepcopy__calls_copy(self): client = mock.sentinel.client - document = self._make_one('a', 'b', client=client) - document.__copy__ = mock.Mock( - return_value=mock.sentinel.new_doc, spec=[]) + document = self._make_one("a", "b", client=client) + document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[]) unused_memo = {} new_document = document.__deepcopy__(unused_memo) @@ -86,16 +83,12 @@ def test___deepcopy__calls_copy(self): document.__copy__.assert_called_once_with() def test__eq__same_type(self): - document1 = self._make_one('X', 'YY', client=mock.sentinel.client) - document2 = self._make_one('X', 'ZZ', client=mock.sentinel.client) - document3 = self._make_one('X', 'YY', client=mock.sentinel.client2) - document4 = self._make_one('X', 'YY', client=mock.sentinel.client) - - pairs = ( - (document1, document2), - (document1, document3), - (document2, document3), - ) + document1 = self._make_one("X", "YY", client=mock.sentinel.client) + document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) + document3 = self._make_one("X", "YY", client=mock.sentinel.client2) + document4 = self._make_one("X", "YY", client=mock.sentinel.client) + + pairs = ((document1, document2), (document1, document3), (document2, document3)) for candidate1, candidate2 in pairs: # We use == explicitly since assertNotEqual would use !=. equality_val = candidate1 == candidate2 @@ -106,17 +99,17 @@ def test__eq__same_type(self): self.assertIsNot(document1, document4) def test__eq__other_type(self): - document = self._make_one('X', 'YY', client=mock.sentinel.client) + document = self._make_one("X", "YY", client=mock.sentinel.client) other = object() equality_val = document == other self.assertFalse(equality_val) self.assertIs(document.__eq__(other), NotImplemented) def test__ne__same_type(self): - document1 = self._make_one('X', 'YY', client=mock.sentinel.client) - document2 = self._make_one('X', 'ZZ', client=mock.sentinel.client) - document3 = self._make_one('X', 'YY', client=mock.sentinel.client2) - document4 = self._make_one('X', 'YY', client=mock.sentinel.client) + document1 = self._make_one("X", "YY", client=mock.sentinel.client) + document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) + document3 = self._make_one("X", "YY", client=mock.sentinel.client2) + document4 = self._make_one("X", "YY", client=mock.sentinel.client) self.assertNotEqual(document1, document2) self.assertNotEqual(document1, document3) @@ -128,21 +121,22 @@ def test__ne__same_type(self): self.assertIsNot(document1, document4) def test__ne__other_type(self): - document = self._make_one('X', 'YY', client=mock.sentinel.client) + document = self._make_one("X", "YY", client=mock.sentinel.client) other = object() self.assertNotEqual(document, other) self.assertIs(document.__ne__(other), NotImplemented) def test__document_path_property(self): - project = 'hi-its-me-ok-bye' + project = "hi-its-me-ok-bye" client = _make_client(project=project) - collection_id = 'then' - document_id = '090909iii' + collection_id = "then" + document_id = "090909iii" document = self._make_one(collection_id, document_id, client=client) doc_path = document._document_path - expected = 'projects/{}/databases/{}/documents/{}/{}'.format( - project, client._database, collection_id, document_id) + expected = "projects/{}/databases/{}/documents/{}/{}".format( + project, client._database, collection_id, document_id + ) self.assertEqual(doc_path, expected) self.assertIs(document._document_path_internal, doc_path) @@ -151,24 +145,23 @@ def test__document_path_property(self): self.assertIs(document._document_path, mock.sentinel.cached) def test__document_path_property_no_client(self): - document = self._make_one('hi', 'bye') + document = self._make_one("hi", "bye") self.assertIsNone(document._client) with self.assertRaises(ValueError): - getattr(document, '_document_path') + getattr(document, "_document_path") self.assertIsNone(document._document_path_internal) def test_id_property(self): - document_id = '867-5309' - document = self._make_one('Co-lek-shun', document_id) + document_id = "867-5309" + document = self._make_one("Co-lek-shun", document_id) self.assertEqual(document.id, document_id) def test_parent_property(self): - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference - collection_id = 'grocery-store' - document_id = 'market' + collection_id = "grocery-store" + document_id = "market" client = _make_client() document = self._make_one(collection_id, document_id, client=client) @@ -178,21 +171,18 @@ def test_parent_property(self): self.assertEqual(parent._path, (collection_id,)) def test_collection_factory(self): - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference - collection_id = 'grocery-store' - document_id = 'market' - new_collection = 'fruits' + collection_id = "grocery-store" + document_id = "market" + new_collection = "fruits" client = _make_client() - document = self._make_one( - collection_id, document_id, client=client) + document = self._make_one(collection_id, document_id, client=client) child = document.collection(new_collection) self.assertIsInstance(child, CollectionReference) self.assertIs(child._client, client) - self.assertEqual( - child._path, (collection_id, document_id, new_collection)) + self.assertEqual(child._path, (collection_id, document_id, new_collection)) @staticmethod def _write_pb_for_create(document_path, document_data): @@ -203,63 +193,61 @@ def _write_pb_for_create(document_path, document_data): return write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields=_helpers.encode_dict(document_data), + name=document_path, fields=_helpers.encode_dict(document_data) ), current_document=common_pb2.Precondition(exists=False), ) def test_create(self): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = mock.Mock( - write_results=[mock.sentinel.write_result], - spec=['write_results']) + write_results=[mock.sentinel.write_result], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('dignity') + client = _make_client("dignity") client._firestore_api_internal = firestore_api # Actually make a document and call create(). - document = self._make_one('foo', 'twelve', client=client) - document_data = { - 'hello': 'goodbye', - 'count': 99, - } + document = self._make_one("foo", "twelve", client=client) + document_data = {"hello": "goodbye", "count": 99} write_result = document.create(document_data) # Verify the response and the mocks. self.assertIs(write_result, mock.sentinel.write_result) - write_pb = self._write_pb_for_create( - document._document_path, document_data) + write_pb = self._write_pb_for_create(document._document_path, document_data) firestore_api.commit.assert_called_once_with( - client._database_string, [write_pb], transaction=None, - metadata=client._rpc_metadata) + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) def test_create_empty(self): # Create a minimal fake GAPIC with a dummy response. from google.cloud.firestore_v1beta1.document import DocumentReference from google.cloud.firestore_v1beta1.document import DocumentSnapshot - firestore_api = mock.Mock(spec=['commit']) + + firestore_api = mock.Mock(spec=["commit"]) document_reference = mock.create_autospec(DocumentReference) snapshot = mock.create_autospec(DocumentSnapshot) snapshot.exists = True document_reference.get.return_value = snapshot commit_response = mock.Mock( - write_results=[document_reference], - get=[snapshot], - spec=['write_results']) + write_results=[document_reference], get=[snapshot], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('dignity') + client = _make_client("dignity") client._firestore_api_internal = firestore_api client.get_all = mock.MagicMock() client.get_all.exists.return_value = True # Actually make a document and call create(). - document = self._make_one('foo', 'twelve', client=client) + document = self._make_one("foo", "twelve", client=client) document_data = {} write_result = document.create(document_data) self.assertTrue(write_result.get().exists) @@ -270,16 +258,18 @@ def _write_pb_for_set(document_path, document_data, merge): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 from google.cloud.firestore_v1beta1 import _helpers + write_pbs = write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields=_helpers.encode_dict(document_data), - ), + name=document_path, fields=_helpers.encode_dict(document_data) + ) ) if merge: field_paths = [ - field_path for field_path, value in _helpers.extract_fields( - document_data, _helpers.FieldPath()) + field_path + for field_path, value in _helpers.extract_fields( + document_data, _helpers.FieldPath() + ) ] field_paths = [ field_path.to_api_repr() for field_path in sorted(field_paths) @@ -290,32 +280,31 @@ def _write_pb_for_set(document_path, document_data, merge): def _set_helper(self, merge=False, **option_kwargs): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = mock.Mock( - write_results=[mock.sentinel.write_result], - spec=['write_results']) + write_results=[mock.sentinel.write_result], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('db-dee-bee') + client = _make_client("db-dee-bee") client._firestore_api_internal = firestore_api # Actually make a document and call create(). - document = self._make_one('User', 'Interface', client=client) - document_data = { - 'And': 500, - 'Now': b'\xba\xaa\xaa \xba\xaa\xaa', - } + document = self._make_one("User", "Interface", client=client) + document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"} write_result = document.set(document_data, merge) # Verify the response and the mocks. self.assertIs(write_result, mock.sentinel.write_result) - write_pb = self._write_pb_for_set( - document._document_path, document_data, merge) + write_pb = self._write_pb_for_set(document._document_path, document_data, merge) firestore_api.commit.assert_called_once_with( - client._database_string, [write_pb], transaction=None, - metadata=client._rpc_metadata) + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) def test_set(self): self._set_helper() @@ -332,8 +321,7 @@ def _write_pb_for_update(document_path, update_values, field_paths): return write_pb2.Write( update=document_pb2.Document( - name=document_path, - fields=_helpers.encode_dict(update_values), + name=document_path, fields=_helpers.encode_dict(update_values) ), update_mask=common_pb2.DocumentMask(field_paths=field_paths), current_document=common_pb2.Precondition(exists=True), @@ -343,24 +331,22 @@ def _update_helper(self, **option_kwargs): from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = mock.Mock( - write_results=[mock.sentinel.write_result], - spec=['write_results']) + write_results=[mock.sentinel.write_result], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('potato-chip') + client = _make_client("potato-chip") client._firestore_api_internal = firestore_api # Actually make a document and call create(). - document = self._make_one('baked', 'Alaska', client=client) + document = self._make_one("baked", "Alaska", client=client) # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. - field_updates = collections.OrderedDict(( - ('hello', 1), - ('then.do', False), - ('goodbye', DELETE_FIELD), - )) + field_updates = collections.OrderedDict( + (("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD)) + ) if option_kwargs: option = client.write_option(**option_kwargs) write_result = document.update(field_updates, option=option) @@ -371,19 +357,21 @@ def _update_helper(self, **option_kwargs): # Verify the response and the mocks. self.assertIs(write_result, mock.sentinel.write_result) update_values = { - 'hello': field_updates['hello'], - 'then': { - 'do': field_updates['then.do'], - } + "hello": field_updates["hello"], + "then": {"do": field_updates["then.do"]}, } field_paths = list(field_updates.keys()) write_pb = self._write_pb_for_update( - document._document_path, update_values, sorted(field_paths)) + document._document_path, update_values, sorted(field_paths) + ) if option is not None: option.modify_write(write_pb) firestore_api.commit.assert_called_once_with( - client._database_string, [write_pb], transaction=None, - metadata=client._rpc_metadata) + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) def test_update_with_exists(self): with self.assertRaises(ValueError): @@ -395,26 +383,23 @@ def test_update(self): def test_update_with_precondition(self): from google.protobuf import timestamp_pb2 - timestamp = timestamp_pb2.Timestamp( - seconds=1058655101, - nanos=100022244, - ) + timestamp = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) self._update_helper(last_update_time=timestamp) def test_empty_update(self): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = mock.Mock( - write_results=[mock.sentinel.write_result], - spec=['write_results']) + write_results=[mock.sentinel.write_result], spec=["write_results"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('potato-chip') + client = _make_client("potato-chip") client._firestore_api_internal = firestore_api # Actually make a document and call create(). - document = self._make_one('baked', 'Alaska', client=client) + document = self._make_one("baked", "Alaska", client=client) # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. field_updates = {} with self.assertRaises(ValueError): @@ -424,17 +409,18 @@ def _delete_helper(self, **option_kwargs): from google.cloud.firestore_v1beta1.proto import write_pb2 # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['commit']) + firestore_api = mock.Mock(spec=["commit"]) commit_response = mock.Mock( - commit_time=mock.sentinel.commit_time, spec=['commit_time']) + commit_time=mock.sentinel.commit_time, spec=["commit_time"] + ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('donut-base') + client = _make_client("donut-base") client._firestore_api_internal = firestore_api # Actually make a document and call delete(). - document = self._make_one('where', 'we-are', client=client) + document = self._make_one("where", "we-are", client=client) if option_kwargs: option = client.write_option(**option_kwargs) delete_time = document.delete(option=option) @@ -448,8 +434,11 @@ def _delete_helper(self, **option_kwargs): if option is not None: option.modify_write(write_pb) firestore_api.commit.assert_called_once_with( - client._database_string, [write_pb], transaction=None, - metadata=client._rpc_metadata) + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) def test_delete(self): self._delete_helper() @@ -457,14 +446,10 @@ def test_delete(self): def test_delete_with_option(self): from google.protobuf import timestamp_pb2 - timestamp_pb = timestamp_pb2.Timestamp( - seconds=1058655101, - nanos=100022244, - ) + timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) self._delete_helper(last_update_time=timestamp_pb) - def _get_helper( - self, field_paths=None, use_transaction=False, not_found=False): + def _get_helper(self, field_paths=None, use_transaction=False, not_found=False): from google.api_core.exceptions import NotFound from google.cloud.firestore_v1beta1.proto import common_pb2 from google.cloud.firestore_v1beta1.proto import document_pb2 @@ -473,30 +458,29 @@ def _get_helper( # Create a minimal fake GAPIC with a dummy response. create_time = 123 update_time = 234 - firestore_api = mock.Mock(spec=['get_document']) + firestore_api = mock.Mock(spec=["get_document"]) response = mock.create_autospec(document_pb2.Document) response.fields = {} response.create_time = create_time response.update_time = update_time if not_found: - firestore_api.get_document.side_effect = NotFound('testing') + firestore_api.get_document.side_effect = NotFound("testing") else: firestore_api.get_document.return_value = response - client = _make_client('donut-base') + client = _make_client("donut-base") client._firestore_api_internal = firestore_api - document = self._make_one('where', 'we-are', client=client) + document = self._make_one("where", "we-are", client=client) if use_transaction: transaction = Transaction(client) - transaction_id = transaction._id = b'asking-me-2' + transaction_id = transaction._id = b"asking-me-2" else: transaction = None - snapshot = document.get( - field_paths=field_paths, transaction=transaction) + snapshot = document.get(field_paths=field_paths, transaction=transaction) self.assertIs(snapshot.reference, document) if not_found: @@ -527,7 +511,8 @@ def _get_helper( document._document_path, mask=mask, transaction=expected_transaction_id, - metadata=client._rpc_metadata) + metadata=client._rpc_metadata, + ) def test_get_not_found(self): self._get_helper(not_found=True) @@ -537,13 +522,13 @@ def test_get_default(self): def test_get_w_string_field_path(self): with self.assertRaises(ValueError): - self._get_helper(field_paths='foo') + self._get_helper(field_paths="foo") def test_get_with_field_path(self): - self._get_helper(field_paths=['foo']) + self._get_helper(field_paths=["foo"]) def test_get_with_multiple_field_paths(self): - self._get_helper(field_paths=['foo', 'bar.baz']) + self._get_helper(field_paths=["foo", "bar.baz"]) def test_get_with_transaction(self): self._get_helper(use_transaction=True) @@ -551,13 +536,12 @@ def test_get_with_transaction(self): def _collections_helper(self, page_size=None): from google.api_core.page_iterator import Iterator from google.api_core.page_iterator import Page - from google.cloud.firestore_v1beta1.collection import ( - CollectionReference) + from google.cloud.firestore_v1beta1.collection import CollectionReference from google.cloud.firestore_v1beta1.gapic.firestore_client import ( - FirestoreClient) + FirestoreClient, + ) class _Iterator(Iterator): - def __init__(self, pages): super(_Iterator, self).__init__(client=None) self._pages = pages @@ -567,7 +551,7 @@ def _next_page(self): page, self._pages = self._pages[0], self._pages[1:] return Page(self, page, self.item_to_value) - collection_ids = ['coll-1', 'coll-2'] + collection_ids = ["coll-1", "coll-2"] iterator = _Iterator(pages=[collection_ids]) api_client = mock.create_autospec(FirestoreClient) api_client.list_collection_ids.return_value = iterator @@ -576,7 +560,7 @@ def _next_page(self): client._firestore_api_internal = api_client # Actually make a document and call delete(). - document = self._make_one('where', 'we-are', client=client) + document = self._make_one("where", "we-are", client=client) if page_size is not None: collections = list(document.collections(page_size=page_size)) else: @@ -590,9 +574,7 @@ def _next_page(self): self.assertEqual(collection.id, collection_id) api_client.list_collection_ids.assert_called_once_with( - document._document_path, - page_size=page_size, - metadata=client._rpc_metadata, + document._document_path, page_size=page_size, metadata=client._rpc_metadata ) def test_collections_wo_page_size(self): @@ -601,18 +583,15 @@ def test_collections_wo_page_size(self): def test_collections_w_page_size(self): self._collections_helper(page_size=10) - @mock.patch('google.cloud.firestore_v1beta1.document.Watch', autospec=True) + @mock.patch("google.cloud.firestore_v1beta1.document.Watch", autospec=True) def test_on_snapshot(self, watch): - client = mock.Mock( - _database_string='sprinklez', - spec=['_database_string']) - document = self._make_one('yellow', 'mellow', client=client) + client = mock.Mock(_database_string="sprinklez", spec=["_database_string"]) + document = self._make_one("yellow", "mellow", client=client) document.on_snapshot(None) watch.for_document.assert_called_once() class TestDocumentSnapshot(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.document import DocumentSnapshot @@ -629,12 +608,16 @@ def _make_reference(self, *args, **kwargs): return DocumentReference(*args, **kwargs) def test_constructor(self): - reference = self._make_reference( - 'hi', 'bye', client=mock.sentinel.client) - data = {'zoop': 83} + reference = self._make_reference("hi", "bye", client=mock.sentinel.client) + data = {"zoop": 83} snapshot = self._make_one( - reference, data, True, mock.sentinel.read_time, - mock.sentinel.create_time, mock.sentinel.update_time) + reference, + data, + True, + mock.sentinel.read_time, + mock.sentinel.create_time, + mock.sentinel.update_time, + ) self.assertIs(snapshot._reference, reference) self.assertEqual(snapshot._data, data) self.assertIsNot(snapshot._data, data) # Make sure copied. @@ -645,7 +628,8 @@ def test_constructor(self): def test__client_property(self): reference = self._make_reference( - 'ok', 'fine', 'now', 'fore', client=mock.sentinel.client) + "ok", "fine", "now", "fore", client=mock.sentinel.client + ) snapshot = self._make_one(reference, {}, False, None, None, None) self.assertIs(snapshot._client, mock.sentinel.client) @@ -658,48 +642,44 @@ def test_exists_property(self): self.assertTrue(snapshot2.exists) def test_id_property(self): - document_id = 'around' + document_id = "around" reference = self._make_reference( - 'look', document_id, client=mock.sentinel.client) + "look", document_id, client=mock.sentinel.client + ) snapshot = self._make_one(reference, {}, True, None, None, None) self.assertEqual(snapshot.id, document_id) self.assertEqual(reference.id, document_id) def test_reference_property(self): - snapshot = self._make_one( - mock.sentinel.reference, {}, True, None, None, None) + snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None) self.assertIs(snapshot.reference, mock.sentinel.reference) def test_get(self): - data = {'one': {'bold': 'move'}} + data = {"one": {"bold": "move"}} snapshot = self._make_one(None, data, True, None, None, None) - first_read = snapshot.get('one') - second_read = snapshot.get('one') - self.assertEqual(first_read, data.get('one')) - self.assertIsNot(first_read, data.get('one')) + first_read = snapshot.get("one") + second_read = snapshot.get("one") + self.assertEqual(first_read, data.get("one")) + self.assertIsNot(first_read, data.get("one")) self.assertEqual(first_read, second_read) self.assertIsNot(first_read, second_read) with self.assertRaises(KeyError): - snapshot.get('two') + snapshot.get("two") def test_nonexistent_snapshot(self): snapshot = self._make_one(None, None, False, None, None, None) - self.assertIsNone(snapshot.get('one')) + self.assertIsNone(snapshot.get("one")) def test_to_dict(self): - data = { - 'a': 10, - 'b': ['definitely', 'mutable'], - 'c': {'45': 50}, - } + data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}} snapshot = self._make_one(None, data, True, None, None, None) as_dict = snapshot.to_dict() self.assertEqual(as_dict, data) self.assertIsNot(as_dict, data) # Check that the data remains unchanged. - as_dict['b'].append('hi') + as_dict["b"].append("hi") self.assertEqual(data, snapshot.to_dict()) self.assertNotEqual(data, as_dict) @@ -710,7 +690,6 @@ def test_non_existent(self): class Test__get_document_path(unittest.TestCase): - @staticmethod def _call_fut(client, path): from google.cloud.firestore_v1beta1.document import _get_document_path @@ -718,18 +697,18 @@ def _call_fut(client, path): return _get_document_path(client, path) def test_it(self): - project = 'prah-jekt' + project = "prah-jekt" client = _make_client(project=project) - path = ('Some', 'Document', 'Child', 'Shockument') + path = ("Some", "Document", "Child", "Shockument") document_path = self._call_fut(client, path) - expected = 'projects/{}/databases/{}/documents/{}'.format( - project, client._database, '/'.join(path)) + expected = "projects/{}/databases/{}/documents/{}".format( + project, client._database, "/".join(path) + ) self.assertEqual(document_path, expected) class Test__consume_single_get(unittest.TestCase): - @staticmethod def _call_fut(response_iterator): from google.cloud.firestore_v1beta1.document import _consume_single_get @@ -753,7 +732,6 @@ def test_failure_too_many(self): class Test__first_write_result(unittest.TestCase): - @staticmethod def _call_fut(write_results): from google.cloud.firestore_v1beta1.document import _first_write_result @@ -765,10 +743,7 @@ def test_success(self): from google.cloud.firestore_v1beta1.proto import write_pb2 single_result = write_pb2.WriteResult( - update_time=timestamp_pb2.Timestamp( - seconds=1368767504, - nanos=458000123, - ), + update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123) ) write_results = [single_result] result = self._call_fut(write_results) @@ -795,7 +770,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='project-project'): +def _make_client(project="project-project"): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() diff --git a/firestore/tests/unit/test_order.py b/firestore/tests/unit/test_order.py index 71f411e2c2d7..a68f3ae1b250 100644 --- a/firestore/tests/unit/test_order.py +++ b/firestore/tests/unit/test_order.py @@ -46,8 +46,8 @@ def test_order(self): int_max_value = 2 ** 31 - 1 int_min_value = -(2 ** 31) float_min_value = 1.175494351 ** -38 - float_nan = float('nan') - inf = float('inf') + float_nan = float("nan") + inf = float("inf") groups = [None] * 65 @@ -66,8 +66,12 @@ def test_order(self): groups[8] = [_int_value(-1), _double_value(-1.0)] groups[9] = [_double_value(-float_min_value)] # zeros all compare the same. - groups[10] = [_int_value(0), _double_value(-0.0), - _double_value(0.0), _double_value(+0.0)] + groups[10] = [ + _int_value(0), + _double_value(-0.0), + _double_value(0.0), + _double_value(+0.0), + ] groups[11] = [_double_value(float_min_value)] groups[12] = [_int_value(1), _double_value(1.0)] groups[13] = [_double_value(1.1)] @@ -92,33 +96,26 @@ def test_order(self): groups[27] = [_string_value("\u00e9a")] # blobs - groups[28] = [_blob_value(b'')] - groups[29] = [_blob_value(b'\x00')] - groups[30] = [_blob_value(b'\x00\x01\x02\x03\x04')] - groups[31] = [_blob_value(b'\x00\x01\x02\x04\x03')] - groups[32] = [_blob_value(b'\x7f')] + groups[28] = [_blob_value(b"")] + groups[29] = [_blob_value(b"\x00")] + groups[30] = [_blob_value(b"\x00\x01\x02\x03\x04")] + groups[31] = [_blob_value(b"\x00\x01\x02\x04\x03")] + groups[32] = [_blob_value(b"\x7f")] # resource names - groups[33] = [ - _reference_value("projects/p1/databases/d1/documents/c1/doc1")] - groups[34] = [ - _reference_value("projects/p1/databases/d1/documents/c1/doc2")] + groups[33] = [_reference_value("projects/p1/databases/d1/documents/c1/doc1")] + groups[34] = [_reference_value("projects/p1/databases/d1/documents/c1/doc2")] groups[35] = [ - _reference_value( - "projects/p1/databases/d1/documents/c1/doc2/c2/doc1")] + _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc1") + ] groups[36] = [ - _reference_value( - "projects/p1/databases/d1/documents/c1/doc2/c2/doc2")] - groups[37] = [ - _reference_value("projects/p1/databases/d1/documents/c10/doc1")] - groups[38] = [ - _reference_value("projects/p1/databases/d1/documents/c2/doc1")] - groups[39] = [ - _reference_value("projects/p2/databases/d2/documents/c1/doc1")] - groups[40] = [ - _reference_value("projects/p2/databases/d2/documents/c1-/doc1")] - groups[41] = [ - _reference_value("projects/p2/databases/d3/documents/c1-/doc1")] + _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc2") + ] + groups[37] = [_reference_value("projects/p1/databases/d1/documents/c10/doc1")] + groups[38] = [_reference_value("projects/p1/databases/d1/documents/c2/doc1")] + groups[39] = [_reference_value("projects/p2/databases/d2/documents/c1/doc1")] + groups[40] = [_reference_value("projects/p2/databases/d2/documents/c1-/doc1")] + groups[41] = [_reference_value("projects/p2/databases/d3/documents/c1-/doc1")] # geo points groups[42] = [_geoPoint_value(-90, -180)] @@ -144,10 +141,7 @@ def test_order(self): # objects groups[60] = [_object_value({"bar": 0})] - groups[61] = [_object_value({ - "bar": 0, - "foo": 1 - })] + groups[61] = [_object_value({"bar": 0, "foo": 1})] groups[62] = [_object_value({"bar": 1})] groups[63] = [_object_value({"bar": 2})] groups[64] = [_object_value({"bar": "0"})] @@ -161,17 +155,20 @@ def test_order(self): expected = Order._compare_to(i, j) self.assertEqual( - target.compare(left, right), expected, + target.compare(left, right), + expected, "comparing L->R {} ({}) to {} ({})".format( - i, left, j, right) + i, left, j, right + ), ) expected = Order._compare_to(j, i) self.assertEqual( - target.compare(right, left), expected, + target.compare(right, left), + expected, "comparing R->L {} ({}) to {} ({})".format( - j, right, i, left) - + j, right, i, left + ), ) def test_typeorder_type_failure(self): @@ -189,11 +186,9 @@ def test_failure_to_find_type(self): right = mock.Mock() # Patch from value to get to the deep compare. Since left is a bad type # expect this to fail with value error. - with mock.patch.object(TypeOrder, 'from_value',) as to: + with mock.patch.object(TypeOrder, "from_value") as to: to.value = None - with self.assertRaisesRegex( - ValueError, "'Unknown ``value_type``" - ): + with self.assertRaisesRegex(ValueError, "'Unknown ``value_type``"): target.compare(left, right) def test_compare_objects_different_keys(self): @@ -236,7 +231,8 @@ def nullValue(): def _timestamp_value(seconds, nanos): return document_pb2.Value( - timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)) + timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) + ) def _geoPoint_value(latitude, longitude): diff --git a/firestore/tests/unit/test_query.py b/firestore/tests/unit/test_query.py index 31d7a6eba7ab..2a71f3ec7391 100644 --- a/firestore/tests/unit/test_query.py +++ b/firestore/tests/unit/test_query.py @@ -48,13 +48,13 @@ def test_constructor_defaults(self): def _make_one_all_fields(self, limit=9876, offset=12, skip_fields=()): kwargs = { - 'projection': mock.sentinel.projection, - 'field_filters': mock.sentinel.filters, - 'orders': mock.sentinel.orders, - 'limit': limit, - 'offset': offset, - 'start_at': mock.sentinel.start_at, - 'end_at': mock.sentinel.end_at, + "projection": mock.sentinel.projection, + "field_filters": mock.sentinel.filters, + "orders": mock.sentinel.orders, + "limit": limit, + "offset": offset, + "start_at": mock.sentinel.start_at, + "end_at": mock.sentinel.end_at, } for field in skip_fields: kwargs.pop(field) @@ -74,7 +74,7 @@ def test_constructor_explicit(self): self.assertIs(query._end_at, mock.sentinel.end_at) def test__client_property(self): - parent = mock.Mock(_client=mock.sentinel.client, spec=['_client']) + parent = mock.Mock(_client=mock.sentinel.client, spec=["_client"]) query = self._make_one(parent) self.assertIs(query._client, mock.sentinel.client) @@ -98,50 +98,50 @@ def _make_projection_for_select(field_paths): fields=[ query_pb2.StructuredQuery.FieldReference(field_path=field_path) for field_path in field_paths - ], + ] ) def test_select_invalid_path(self): query = self._make_one(mock.sentinel.parent) with self.assertRaises(ValueError): - query.select(['*']) + query.select(["*"]) def test_select(self): query1 = self._make_one_all_fields() - field_paths2 = ['foo', 'bar'] + field_paths2 = ["foo", "bar"] query2 = query1.select(field_paths2) self.assertIsNot(query2, query1) self.assertIsInstance(query2, self._get_target_class()) self.assertEqual( - query2._projection, - self._make_projection_for_select(field_paths2)) - self._compare_queries(query1, query2, '_projection') + query2._projection, self._make_projection_for_select(field_paths2) + ) + self._compare_queries(query1, query2, "_projection") # Make sure it overrides. - field_paths3 = ['foo.baz'] + field_paths3 = ["foo.baz"] query3 = query2.select(field_paths3) self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual( - query3._projection, - self._make_projection_for_select(field_paths3)) - self._compare_queries(query2, query3, '_projection') + query3._projection, self._make_projection_for_select(field_paths3) + ) + self._compare_queries(query2, query3, "_projection") def test_where_invalid_path(self): query = self._make_one(mock.sentinel.parent) with self.assertRaises(ValueError): - query.where('*', '==', 1) + query.where("*", "==", 1) def test_where(self): from google.cloud.firestore_v1beta1.gapic import enums from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - query = self._make_one_all_fields(skip_fields=('field_filters',)) - new_query = query.where('power.level', '>', 9000) + query = self._make_one_all_fields(skip_fields=("field_filters",)) + new_query = query.where("power.level", ">", 9000) self.assertIsNot(query, new_query) self.assertIsInstance(new_query, self._get_target_class()) @@ -149,20 +149,18 @@ def test_where(self): field_pb = new_query._field_filters[0] expected_pb = query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='power.level', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="power.level"), op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, value=document_pb2.Value(integer_value=9000), ) self.assertEqual(field_pb, expected_pb) - self._compare_queries(query, new_query, '_field_filters') + self._compare_queries(query, new_query, "_field_filters") - def _where_unary_helper(self, value, op_enum, op_string='=='): + def _where_unary_helper(self, value, op_enum, op_string="=="): from google.cloud.firestore_v1beta1.proto import query_pb2 - query = self._make_one_all_fields(skip_fields=('field_filters',)) - field_path = 'feeeld' + query = self._make_one_all_fields(skip_fields=("field_filters",)) + field_path = "feeeld" new_query = query.where(field_path, op_string, value) self.assertIsNot(query, new_query) @@ -171,13 +169,11 @@ def _where_unary_helper(self, value, op_enum, op_string='=='): field_pb = new_query._field_filters[0] expected_pb = query_pb2.StructuredQuery.UnaryFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), op=op_enum, ) self.assertEqual(field_pb, expected_pb) - self._compare_queries(query, new_query, '_field_filters') + self._compare_queries(query, new_query, "_field_filters") def test_where_eq_null(self): from google.cloud.firestore_v1beta1.gapic import enums @@ -187,18 +183,17 @@ def test_where_eq_null(self): def test_where_gt_null(self): with self.assertRaises(ValueError): - self._where_unary_helper(None, 0, op_string='>') + self._where_unary_helper(None, 0, op_string=">") def test_where_eq_nan(self): from google.cloud.firestore_v1beta1.gapic import enums op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NAN - self._where_unary_helper(float('nan'), op_enum) + self._where_unary_helper(float("nan"), op_enum) def test_where_le_nan(self): with self.assertRaises(ValueError): - self._where_unary_helper(float('nan'), 0, op_string='<=') - + self._where_unary_helper(float("nan"), 0, op_string="<=") def test_where_w_delete(self): from google.cloud.firestore_v1beta1 import DELETE_FIELD @@ -228,33 +223,34 @@ def test_order_by_invalid_path(self): query = self._make_one(mock.sentinel.parent) with self.assertRaises(ValueError): - query.order_by('*') - + query.order_by("*") def test_order_by(self): from google.cloud.firestore_v1beta1.gapic import enums klass = self._get_target_class() - query1 = self._make_one_all_fields(skip_fields=('orders',)) + query1 = self._make_one_all_fields(skip_fields=("orders",)) - field_path2 = 'a' + field_path2 = "a" query2 = query1.order_by(field_path2) self.assertIsNot(query2, query1) self.assertIsInstance(query2, klass) order_pb2 = _make_order_pb( - field_path2, enums.StructuredQuery.Direction.ASCENDING) + field_path2, enums.StructuredQuery.Direction.ASCENDING + ) self.assertEqual(query2._orders, (order_pb2,)) - self._compare_queries(query1, query2, '_orders') + self._compare_queries(query1, query2, "_orders") # Make sure it appends to the orders. - field_path3 = 'b' + field_path3 = "b" query3 = query2.order_by(field_path3, direction=klass.DESCENDING) self.assertIsNot(query3, query2) self.assertIsInstance(query3, klass) order_pb3 = _make_order_pb( - field_path3, enums.StructuredQuery.Direction.DESCENDING) + field_path3, enums.StructuredQuery.Direction.DESCENDING + ) self.assertEqual(query3._orders, (order_pb2, order_pb3)) - self._compare_queries(query2, query3, '_orders') + self._compare_queries(query2, query3, "_orders") def test_limit(self): query1 = self._make_one_all_fields() @@ -264,7 +260,7 @@ def test_limit(self): self.assertIsNot(query2, query1) self.assertIsInstance(query2, self._get_target_class()) self.assertEqual(query2._limit, limit2) - self._compare_queries(query1, query2, '_limit') + self._compare_queries(query1, query2, "_limit") # Make sure it overrides. limit3 = 10 @@ -272,7 +268,7 @@ def test_limit(self): self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._limit, limit3) - self._compare_queries(query2, query3, '_limit') + self._compare_queries(query2, query3, "_limit") def test_offset(self): query1 = self._make_one_all_fields() @@ -282,7 +278,7 @@ def test_offset(self): self.assertIsNot(query2, query1) self.assertIsInstance(query2, self._get_target_class()) self.assertEqual(query2._offset, offset2) - self._compare_queries(query1, query2, '_offset') + self._compare_queries(query1, query2, "_offset") # Make sure it overrides. offset3 = 35 @@ -290,7 +286,7 @@ def test_offset(self): self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._offset, offset3) - self._compare_queries(query2, query3, '_offset') + self._compare_queries(query2, query3, "_offset") @staticmethod def _make_snapshot(values): @@ -299,7 +295,7 @@ def _make_snapshot(values): return DocumentSnapshot(None, values, True, None, None, None) def test__cursor_helper_w_dict(self): - values = {'a': 7, 'b': 'foo'} + values = {"a": 7, "b": "foo"} query1 = self._make_one(mock.sentinel.parent) query2 = query1._cursor_helper(values, True, True) @@ -317,7 +313,7 @@ def test__cursor_helper_w_dict(self): self.assertTrue(before) def test__cursor_helper_w_tuple(self): - values = (7, 'foo') + values = (7, "foo") query1 = self._make_one(mock.sentinel.parent) query2 = query1._cursor_helper(values, False, True) @@ -335,7 +331,7 @@ def test__cursor_helper_w_tuple(self): self.assertFalse(before) def test__cursor_helper_w_list(self): - values = [7, 'foo'] + values = [7, "foo"] query1 = self._make_one(mock.sentinel.parent) query2 = query1._cursor_helper(values, True, False) @@ -355,7 +351,7 @@ def test__cursor_helper_w_list(self): def test__cursor_helper_w_snapshot(self): - values = {'a': 7, 'b': 'foo'} + values = {"a": 7, "b": "foo"} snapshot = self._make_snapshot(values) query1 = self._make_one(mock.sentinel.parent) @@ -375,88 +371,88 @@ def test__cursor_helper_w_snapshot(self): self.assertFalse(before) def test_start_at(self): - query1 = self._make_one_all_fields(skip_fields=('orders',)) - query2 = query1.order_by('hi') + query1 = self._make_one_all_fields(skip_fields=("orders",)) + query2 = query1.order_by("hi") - document_fields3 = {'hi': 'mom'} + document_fields3 = {"hi": "mom"} query3 = query2.start_at(document_fields3) self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._start_at, (document_fields3, True)) - self._compare_queries(query2, query3, '_start_at') + self._compare_queries(query2, query3, "_start_at") # Make sure it overrides. - query4 = query3.order_by('bye') - values5 = {'hi': 'zap', 'bye': 88} + query4 = query3.order_by("bye") + values5 = {"hi": "zap", "bye": 88} document_fields5 = self._make_snapshot(values5) query5 = query4.start_at(document_fields5) self.assertIsNot(query5, query4) self.assertIsInstance(query5, self._get_target_class()) self.assertEqual(query5._start_at, (values5, True)) - self._compare_queries(query4, query5, '_start_at') + self._compare_queries(query4, query5, "_start_at") def test_start_after(self): - query1 = self._make_one_all_fields(skip_fields=('orders',)) - query2 = query1.order_by('down') + query1 = self._make_one_all_fields(skip_fields=("orders",)) + query2 = query1.order_by("down") - document_fields3 = {'down': 99.75} + document_fields3 = {"down": 99.75} query3 = query2.start_after(document_fields3) self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._start_at, (document_fields3, False)) - self._compare_queries(query2, query3, '_start_at') + self._compare_queries(query2, query3, "_start_at") # Make sure it overrides. - query4 = query3.order_by('out') - values5 = {'down': 100.25, 'out': b'\x00\x01'} + query4 = query3.order_by("out") + values5 = {"down": 100.25, "out": b"\x00\x01"} document_fields5 = self._make_snapshot(values5) query5 = query4.start_after(document_fields5) self.assertIsNot(query5, query4) self.assertIsInstance(query5, self._get_target_class()) self.assertEqual(query5._start_at, (values5, False)) - self._compare_queries(query4, query5, '_start_at') + self._compare_queries(query4, query5, "_start_at") def test_end_before(self): - query1 = self._make_one_all_fields(skip_fields=('orders',)) - query2 = query1.order_by('down') + query1 = self._make_one_all_fields(skip_fields=("orders",)) + query2 = query1.order_by("down") - document_fields3 = {'down': 99.75} + document_fields3 = {"down": 99.75} query3 = query2.end_before(document_fields3) self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._end_at, (document_fields3, True)) - self._compare_queries(query2, query3, '_end_at') + self._compare_queries(query2, query3, "_end_at") # Make sure it overrides. - query4 = query3.order_by('out') - values5 = {'down': 100.25, 'out': b'\x00\x01'} + query4 = query3.order_by("out") + values5 = {"down": 100.25, "out": b"\x00\x01"} document_fields5 = self._make_snapshot(values5) query5 = query4.end_before(document_fields5) self.assertIsNot(query5, query4) self.assertIsInstance(query5, self._get_target_class()) self.assertEqual(query5._end_at, (values5, True)) - self._compare_queries(query4, query5, '_end_at') + self._compare_queries(query4, query5, "_end_at") def test_end_at(self): - query1 = self._make_one_all_fields(skip_fields=('orders',)) - query2 = query1.order_by('hi') + query1 = self._make_one_all_fields(skip_fields=("orders",)) + query2 = query1.order_by("hi") - document_fields3 = {'hi': 'mom'} + document_fields3 = {"hi": "mom"} query3 = query2.end_at(document_fields3) self.assertIsNot(query3, query2) self.assertIsInstance(query3, self._get_target_class()) self.assertEqual(query3._end_at, (document_fields3, False)) - self._compare_queries(query2, query3, '_end_at') + self._compare_queries(query2, query3, "_end_at") # Make sure it overrides. - query4 = query3.order_by('bye') - values5 = {'hi': 'zap', 'bye': 88} + query4 = query3.order_by("bye") + values5 = {"hi": "zap", "bye": 88} document_fields5 = self._make_snapshot(values5) query5 = query4.end_at(document_fields5) self.assertIsNot(query5, query4) self.assertIsInstance(query5, self._get_target_class()) self.assertEqual(query5._end_at, (values5, False)) - self._compare_queries(query4, query5, '_end_at') + self._compare_queries(query4, query5, "_end_at") def test__filters_pb_empty(self): query = self._make_one(mock.sentinel.parent) @@ -469,16 +465,14 @@ def test__filters_pb_single(self): from google.cloud.firestore_v1beta1.proto import query_pb2 query1 = self._make_one(mock.sentinel.parent) - query2 = query1.where('x.y', '>', 50.5) + query2 = query1.where("x.y", ">", 50.5) filter_pb = query2._filters_pb() expected_pb = query_pb2.StructuredQuery.Filter( field_filter=query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='x.y', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="x.y"), op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, value=document_pb2.Value(double_value=50.5), - ), + ) ) self.assertEqual(filter_pb, expected_pb) @@ -488,8 +482,8 @@ def test__filters_pb_multi(self): from google.cloud.firestore_v1beta1.proto import query_pb2 query1 = self._make_one(mock.sentinel.parent) - query2 = query1.where('x.y', '>', 50.5) - query3 = query2.where('ABC', '==', 123) + query2 = query1.where("x.y", ">", 50.5) + query3 = query2.where("ABC", "==", 123) filter_pb = query3._filters_pb() op_class = enums.StructuredQuery.FieldFilter.Operator @@ -500,23 +494,23 @@ def test__filters_pb_multi(self): query_pb2.StructuredQuery.Filter( field_filter=query_pb2.StructuredQuery.FieldFilter( field=query_pb2.StructuredQuery.FieldReference( - field_path='x.y', + field_path="x.y" ), op=op_class.GREATER_THAN, value=document_pb2.Value(double_value=50.5), - ), + ) ), query_pb2.StructuredQuery.Filter( field_filter=query_pb2.StructuredQuery.FieldFilter( field=query_pb2.StructuredQuery.FieldReference( - field_path='ABC', + field_path="ABC" ), op=op_class.EQUAL, value=document_pb2.Value(integer_value=123), - ), + ) ), ], - ), + ) ) self.assertEqual(filter_pb, expected_pb) @@ -528,12 +522,11 @@ def test__normalize_projection_empty(self): projection = self._make_projection_for_select([]) query = self._make_one(mock.sentinel.parent) normalized = query._normalize_projection(projection) - field_paths = [ - field_ref.field_path for field_ref in normalized.fields] - self.assertEqual(field_paths, ['__name__']) + field_paths = [field_ref.field_path for field_ref in normalized.fields] + self.assertEqual(field_paths, ["__name__"]) def test__normalize_projection_non_empty(self): - projection = self._make_projection_for_select(['a', 'b']) + projection = self._make_projection_for_select(["a", "b"]) query = self._make_one(mock.sentinel.parent) self.assertIs(query._normalize_projection(projection), projection) @@ -550,16 +543,14 @@ def test__normalize_cursor_no_order(self): def test__normalize_cursor_as_list_mismatched_order(self): cursor = ([1, 2], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) def test__normalize_cursor_as_dict_mismatched_order(self): - cursor = ({'a': 1}, True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + cursor = ({"a": 1}, True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) @@ -568,8 +559,7 @@ def test__normalize_cursor_w_delete(self): from google.cloud.firestore_v1beta1 import DELETE_FIELD cursor = ([DELETE_FIELD], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) @@ -578,8 +568,7 @@ def test__normalize_cursor_w_server_timestamp(self): from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP cursor = ([SERVER_TIMESTAMP], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) @@ -588,8 +577,7 @@ def test__normalize_cursor_w_array_remove(self): from google.cloud.firestore_v1beta1 import ArrayRemove cursor = ([ArrayRemove([1, 3, 5])], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) @@ -598,27 +586,22 @@ def test__normalize_cursor_w_array_union(self): from google.cloud.firestore_v1beta1 import ArrayUnion cursor = ([ArrayUnion([2, 4, 8])], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") with self.assertRaises(ValueError): query._normalize_cursor(cursor, query._orders) def test__normalize_cursor_as_list_hit(self): cursor = ([1], True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - self.assertEqual( - query._normalize_cursor(cursor, query._orders), ([1], True)) + self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) def test__normalize_cursor_as_dict_hit(self): - cursor = ({'b': 1}, True) - query = self._make_one( - mock.sentinel.parent).order_by('b', 'ASCENDING') + cursor = ({"b": 1}, True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - self.assertEqual( - query._normalize_cursor(cursor, query._orders), ([1], True)) + self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) def test__to_protobuf_all_fields(self): from google.protobuf import wrappers_pb2 @@ -626,59 +609,43 @@ def test__to_protobuf_all_fields(self): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='cat', spec=['id']) + parent = mock.Mock(id="cat", spec=["id"]) query1 = self._make_one(parent) - query2 = query1.select(['X', 'Y', 'Z']) - query3 = query2.where('Y', '>', 2.5) - query4 = query3.order_by('X') + query2 = query1.select(["X", "Y", "Z"]) + query3 = query2.where("Y", ">", 2.5) + query4 = query3.order_by("X") query5 = query4.limit(17) query6 = query5.offset(3) - query7 = query6.start_at({'X': 10}) - query8 = query7.end_at({'X': 25}) + query7 = query6.start_at({"X": 10}) + query8 = query7.end_at({"X": 25}) structured_query_pb = query8._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'select': query_pb2.StructuredQuery.Projection( + "select": query_pb2.StructuredQuery.Projection( fields=[ - query_pb2.StructuredQuery.FieldReference( - field_path=field_path - ) - for field_path in ['X', 'Y', 'Z'] - ], + query_pb2.StructuredQuery.FieldReference(field_path=field_path) + for field_path in ["X", "Y", "Z"] + ] ), - 'where': query_pb2.StructuredQuery.Filter( + "where": query_pb2.StructuredQuery.Filter( field_filter=query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='Y', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="Y"), op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, value=document_pb2.Value(double_value=2.5), - ), + ) ), - 'order_by': [ - _make_order_pb( - 'X', - enums.StructuredQuery.Direction.ASCENDING, - ), + "order_by": [ + _make_order_pb("X", enums.StructuredQuery.Direction.ASCENDING) ], - 'start_at': query_pb2.Cursor( - values=[ - document_pb2.Value(integer_value=10), - ], - before=True, + "start_at": query_pb2.Cursor( + values=[document_pb2.Value(integer_value=10)], before=True ), - 'end_at': query_pb2.Cursor( - values=[ - document_pb2.Value(integer_value=25), - ], - ), - 'offset': 3, - 'limit': wrappers_pb2.Int32Value(value=17), + "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=25)]), + "offset": 3, + "limit": wrappers_pb2.Int32Value(value=17), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) self.assertEqual(structured_query_pb, expected_pb) @@ -686,25 +653,21 @@ def test__to_protobuf_all_fields(self): def test__to_protobuf_select_only(self): from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='cat', spec=['id']) + parent = mock.Mock(id="cat", spec=["id"]) query1 = self._make_one(parent) - field_paths = ['a.b', 'a.c', 'd'] + field_paths = ["a.b", "a.c", "d"] query2 = query1.select(field_paths) structured_query_pb = query2._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'select': query_pb2.StructuredQuery.Projection( + "select": query_pb2.StructuredQuery.Projection( fields=[ - query_pb2.StructuredQuery.FieldReference( - field_path=field_path - ) + query_pb2.StructuredQuery.FieldReference(field_path=field_path) for field_path in field_paths - ], + ] ), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) @@ -715,25 +678,21 @@ def test__to_protobuf_where_only(self): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='dog', spec=['id']) + parent = mock.Mock(id="dog", spec=["id"]) query1 = self._make_one(parent) - query2 = query1.where('a', '==', u'b') + query2 = query1.where("a", "==", u"b") structured_query_pb = query2._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'where': query_pb2.StructuredQuery.Filter( + "where": query_pb2.StructuredQuery.Filter( field_filter=query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='a', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="a"), op=enums.StructuredQuery.FieldFilter.Operator.EQUAL, - value=document_pb2.Value(string_value=u'b'), - ), + value=document_pb2.Value(string_value=u"b"), + ) ), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) @@ -743,22 +702,17 @@ def test__to_protobuf_order_by_only(self): from google.cloud.firestore_v1beta1.gapic import enums from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='fish', spec=['id']) + parent = mock.Mock(id="fish", spec=["id"]) query1 = self._make_one(parent) - query2 = query1.order_by('abc') + query2 = query1.order_by("abc") structured_query_pb = query2._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'order_by': [ - _make_order_pb( - 'abc', - enums.StructuredQuery.Direction.ASCENDING, - ), + "order_by": [ + _make_order_pb("abc", enums.StructuredQuery.Direction.ASCENDING) ], } expected_pb = query_pb2.StructuredQuery(**query_kwargs) @@ -770,27 +724,19 @@ def test__to_protobuf_start_at_only(self): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='phish', spec=['id']) - query = self._make_one( - parent).order_by('X.Y').start_after({'X': {'Y': u'Z'}}) + parent = mock.Mock(id="phish", spec=["id"]) + query = self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}}) structured_query_pb = query._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'order_by': [ - _make_order_pb( - 'X.Y', - enums.StructuredQuery.Direction.ASCENDING, - ), + "order_by": [ + _make_order_pb("X.Y", enums.StructuredQuery.Direction.ASCENDING) ], - 'start_at': query_pb2.Cursor( - values=[ - document_pb2.Value(string_value=u'Z'), - ], + "start_at": query_pb2.Cursor( + values=[document_pb2.Value(string_value=u"Z")] ), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) @@ -802,28 +748,18 @@ def test__to_protobuf_end_at_only(self): from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='ghoti', spec=['id']) - query = self._make_one( - parent).order_by('a').end_at({'a': 88}) + parent = mock.Mock(id="ghoti", spec=["id"]) + query = self._make_one(parent).order_by("a").end_at({"a": 88}) structured_query_pb = query._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'order_by': [ - _make_order_pb( - 'a', - enums.StructuredQuery.Direction.ASCENDING, - ), + "order_by": [ + _make_order_pb("a", enums.StructuredQuery.Direction.ASCENDING) ], - 'end_at': query_pb2.Cursor( - values=[ - document_pb2.Value(integer_value=88), - ], - ), + "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=88)]), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) self.assertEqual(structured_query_pb, expected_pb) @@ -831,19 +767,17 @@ def test__to_protobuf_end_at_only(self): def test__to_protobuf_offset_only(self): from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='cartt', spec=['id']) + parent = mock.Mock(id="cartt", spec=["id"]) query1 = self._make_one(parent) offset = 14 query2 = query1.offset(offset) structured_query_pb = query2._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'offset': offset, + "offset": offset, } expected_pb = query_pb2.StructuredQuery(**query_kwargs) self.assertEqual(structured_query_pb, expected_pb) @@ -852,19 +786,17 @@ def test__to_protobuf_limit_only(self): from google.protobuf import wrappers_pb2 from google.cloud.firestore_v1beta1.proto import query_pb2 - parent = mock.Mock(id='donut', spec=['id']) + parent = mock.Mock(id="donut", spec=["id"]) query1 = self._make_one(parent) limit = 31 query2 = query1.limit(limit) structured_query_pb = query2._to_protobuf() query_kwargs = { - 'from': [ - query_pb2.StructuredQuery.CollectionSelector( - collection_id=parent.id, - ), + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) ], - 'limit': wrappers_pb2.Int32Value(value=limit), + "limit": wrappers_pb2.Int32Value(value=limit), } expected_pb = query_pb2.StructuredQuery(**query_kwargs) @@ -872,19 +804,19 @@ def test__to_protobuf_limit_only(self): def test_get_simple(self): # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Make a **real** collection reference as parent. - parent = client.collection('dee') + parent = client.collection("dee") # Add a dummy response to the minimal fake GAPIC. _, expected_prefix = parent._parent_info() - name = '{}/sleep'.format(expected_prefix) - data = {'snooze': 10} + name = "{}/sleep".format(expected_prefix) + data = {"snooze": 10} response_pb = _make_query_response(name=name, data=data) firestore_api.run_query.return_value = iter([response_pb]) @@ -895,18 +827,21 @@ def test_get_simple(self): returned = list(get_response) self.assertEqual(len(returned), 1) snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ('dee', 'sleep')) + self.assertEqual(snapshot.reference._path, ("dee", "sleep")) self.assertEqual(snapshot.to_dict(), data) # Verify the mock call. parent_path, _ = parent._parent_info() firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=None, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_with_transaction(self): # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) # Attach the fake GAPIC to a real client. client = _make_client() @@ -914,16 +849,16 @@ def test_get_with_transaction(self): # Create a real-ish transaction for this client. transaction = client.transaction() - txn_id = b'\x00\x00\x01-work-\xf2' + txn_id = b"\x00\x00\x01-work-\xf2" transaction._id = txn_id # Make a **real** collection reference as parent. - parent = client.collection('declaration') + parent = client.collection("declaration") # Add a dummy response to the minimal fake GAPIC. parent_path, expected_prefix = parent._parent_info() - name = '{}/burger'.format(expected_prefix) - data = {'lettuce': b'\xee\x87'} + name = "{}/burger".format(expected_prefix) + data = {"lettuce": b"\xee\x87"} response_pb = _make_query_response(name=name, data=data) firestore_api.run_query.return_value = iter([response_pb]) @@ -934,17 +869,20 @@ def test_get_with_transaction(self): returned = list(get_response) self.assertEqual(len(returned), 1) snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ('declaration', 'burger')) + self.assertEqual(snapshot.reference._path, ("declaration", "burger")) self.assertEqual(snapshot.to_dict(), data) # Verify the mock call. firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=txn_id, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=txn_id, + metadata=client._rpc_metadata, + ) def test_get_no_results(self): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) empty_response = _make_query_response() run_query_response = iter([empty_response]) firestore_api.run_query.return_value = run_query_response @@ -954,7 +892,7 @@ def test_get_no_results(self): client._firestore_api_internal = firestore_api # Make a **real** collection reference as parent. - parent = client.collection('dah', 'dah', 'dum') + parent = client.collection("dah", "dah", "dum") query = self._make_one(parent) get_response = query.get() @@ -964,12 +902,15 @@ def test_get_no_results(self): # Verify the mock call. parent_path, _ = parent._parent_info() firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=None, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_second_response_in_empty_stream(self): # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) empty_response1 = _make_query_response() empty_response2 = _make_query_response() run_query_response = iter([empty_response1, empty_response2]) @@ -980,7 +921,7 @@ def test_get_second_response_in_empty_stream(self): client._firestore_api_internal = firestore_api # Make a **real** collection reference as parent. - parent = client.collection('dah', 'dah', 'dum') + parent = client.collection("dah", "dah", "dum") query = self._make_one(parent) get_response = query.get() @@ -996,28 +937,30 @@ def test_get_second_response_in_empty_stream(self): # Verify the mock call. parent_path, _ = parent._parent_info() firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=None, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_with_skipped_results(self): # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Make a **real** collection reference as parent. - parent = client.collection('talk', 'and', 'chew-gum') + parent = client.collection("talk", "and", "chew-gum") # Add two dummy responses to the minimal fake GAPIC. _, expected_prefix = parent._parent_info() response_pb1 = _make_query_response(skipped_results=1) - name = '{}/clock'.format(expected_prefix) - data = {'noon': 12, 'nested': {'bird': 10.5}} + name = "{}/clock".format(expected_prefix) + data = {"noon": 12, "nested": {"bird": 10.5}} response_pb2 = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter( - [response_pb1, response_pb2]) + firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) # Execute the query and check the response. query = self._make_one(parent) @@ -1026,37 +969,38 @@ def test_get_with_skipped_results(self): returned = list(get_response) self.assertEqual(len(returned), 1) snapshot = returned[0] - self.assertEqual( - snapshot.reference._path, ('talk', 'and', 'chew-gum', 'clock')) + self.assertEqual(snapshot.reference._path, ("talk", "and", "chew-gum", "clock")) self.assertEqual(snapshot.to_dict(), data) # Verify the mock call. parent_path, _ = parent._parent_info() firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=None, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) def test_get_empty_after_first_response(self): from google.cloud.firestore_v1beta1.query import _EMPTY_DOC_TEMPLATE # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=['run_query']) + firestore_api = mock.Mock(spec=["run_query"]) # Attach the fake GAPIC to a real client. client = _make_client() client._firestore_api_internal = firestore_api # Make a **real** collection reference as parent. - parent = client.collection('charles') + parent = client.collection("charles") # Add two dummy responses to the minimal fake GAPIC. _, expected_prefix = parent._parent_info() - name = '{}/bark'.format(expected_prefix) - data = {'lee': 'hoop'} + name = "{}/bark".format(expected_prefix) + data = {"lee": "hoop"} response_pb1 = _make_query_response(name=name, data=data) response_pb2 = _make_query_response() - firestore_api.run_query.return_value = iter( - [response_pb1, response_pb2]) + firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) # Execute the query and check the response. query = self._make_one(parent) @@ -1073,10 +1017,13 @@ def test_get_empty_after_first_response(self): # Verify the mock call. parent_path, _ = parent._parent_info() firestore_api.run_query.assert_called_once_with( - parent_path, query._to_protobuf(), transaction=None, - metadata=client._rpc_metadata) + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) - @mock.patch('google.cloud.firestore_v1beta1.query.Watch', autospec=True) + @mock.patch("google.cloud.firestore_v1beta1.query.Watch", autospec=True) def test_on_snapshot(self, watch): query = self._make_one(mock.sentinel.parent) query.on_snapshot(None) @@ -1086,10 +1033,10 @@ def test_comparator_no_ordering(self): query = self._make_one(mock.sentinel.parent) query._orders = [] doc1 = mock.Mock() - doc1.reference._path = ('col', 'adocument1') + doc1.reference._path = ("col", "adocument1") doc2 = mock.Mock() - doc2.reference._path = ('col', 'adocument2') + doc2.reference._path = ("col", "adocument2") sort = query._comparator(doc1, doc2) self.assertEqual(sort, -1) @@ -1098,10 +1045,10 @@ def test_comparator_no_ordering_same_id(self): query = self._make_one(mock.sentinel.parent) query._orders = [] doc1 = mock.Mock() - doc1.reference._path = ('col', 'adocument1') + doc1.reference._path = ("col", "adocument1") doc2 = mock.Mock() - doc2.reference._path = ('col', 'adocument1') + doc2.reference._path = ("col", "adocument1") sort = query._comparator(doc1, doc2) self.assertEqual(sort, 0) @@ -1109,18 +1056,22 @@ def test_comparator_no_ordering_same_id(self): def test_comparator_ordering(self): query = self._make_one(mock.sentinel.parent) orderByMock = mock.Mock() - orderByMock.field.field_path = 'last' + orderByMock.field.field_path = "last" orderByMock.direction = 1 # ascending query._orders = [orderByMock] doc1 = mock.Mock() - doc1.reference._path = ('col', 'adocument1') - doc1._data = {'first': {'stringValue': 'Ada'}, - 'last': {'stringValue': 'secondlovelace'}} + doc1.reference._path = ("col", "adocument1") + doc1._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "secondlovelace"}, + } doc2 = mock.Mock() - doc2.reference._path = ('col', 'adocument2') - doc2._data = {'first': {'stringValue': 'Ada'}, - 'last': {'stringValue': 'lovelace'}} + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } sort = query._comparator(doc1, doc2) self.assertEqual(sort, 1) @@ -1128,18 +1079,22 @@ def test_comparator_ordering(self): def test_comparator_ordering_descending(self): query = self._make_one(mock.sentinel.parent) orderByMock = mock.Mock() - orderByMock.field.field_path = 'last' + orderByMock.field.field_path = "last" orderByMock.direction = -1 # descending query._orders = [orderByMock] doc1 = mock.Mock() - doc1.reference._path = ('col', 'adocument1') - doc1._data = {'first': {'stringValue': 'Ada'}, - 'last': {'stringValue': 'secondlovelace'}} + doc1.reference._path = ("col", "adocument1") + doc1._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "secondlovelace"}, + } doc2 = mock.Mock() - doc2.reference._path = ('col', 'adocument2') - doc2._data = {'first': {'stringValue': 'Ada'}, - 'last': {'stringValue': 'lovelace'}} + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } sort = query._comparator(doc1, doc2) self.assertEqual(sort, -1) @@ -1147,24 +1102,25 @@ def test_comparator_ordering_descending(self): def test_comparator_missing_order_by_field_in_data_raises(self): query = self._make_one(mock.sentinel.parent) orderByMock = mock.Mock() - orderByMock.field.field_path = 'last' + orderByMock.field.field_path = "last" orderByMock.direction = 1 # ascending query._orders = [orderByMock] doc1 = mock.Mock() - doc1.reference._path = ('col', 'adocument1') + doc1.reference._path = ("col", "adocument1") doc1._data = {} doc2 = mock.Mock() - doc2.reference._path = ('col', 'adocument2') - doc2._data = {'first': {'stringValue': 'Ada'}, - 'last': {'stringValue': 'lovelace'}} + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } with self.assertRaisesRegex(ValueError, "Can only compare fields "): query._comparator(doc1, doc2) class Test__enum_from_op_string(unittest.TestCase): - @staticmethod def _call_fut(op_string): from google.cloud.firestore_v1beta1.query import _enum_from_op_string @@ -1175,21 +1131,19 @@ def test_success(self): from google.cloud.firestore_v1beta1.gapic import enums op_class = enums.StructuredQuery.FieldFilter.Operator - self.assertEqual(self._call_fut('<'), op_class.LESS_THAN) - self.assertEqual(self._call_fut('<='), op_class.LESS_THAN_OR_EQUAL) - self.assertEqual(self._call_fut('=='), op_class.EQUAL) - self.assertEqual(self._call_fut('>='), op_class.GREATER_THAN_OR_EQUAL) - self.assertEqual(self._call_fut('>'), op_class.GREATER_THAN) - self.assertEqual( - self._call_fut('array_contains'), op_class.ARRAY_CONTAINS) + self.assertEqual(self._call_fut("<"), op_class.LESS_THAN) + self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL) + self.assertEqual(self._call_fut("=="), op_class.EQUAL) + self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL) + self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN) + self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS) def test_failure(self): with self.assertRaises(ValueError): - self._call_fut('?') + self._call_fut("?") class Test__isnan(unittest.TestCase): - @staticmethod def _call_fut(value): from google.cloud.firestore_v1beta1.query import _isnan @@ -1197,18 +1151,17 @@ def _call_fut(value): return _isnan(value) def test_valid(self): - self.assertTrue(self._call_fut(float('nan'))) + self.assertTrue(self._call_fut(float("nan"))) def test_invalid(self): self.assertFalse(self._call_fut(51.5)) self.assertFalse(self._call_fut(None)) - self.assertFalse(self._call_fut('str')) + self.assertFalse(self._call_fut("str")) self.assertFalse(self._call_fut(int)) self.assertFalse(self._call_fut(1.0 + 1.0j)) class Test__enum_from_direction(unittest.TestCase): - @staticmethod def _call_fut(direction): from google.cloud.firestore_v1beta1.query import _enum_from_direction @@ -1220,18 +1173,15 @@ def test_success(self): from google.cloud.firestore_v1beta1.query import Query dir_class = enums.StructuredQuery.Direction - self.assertEqual( - self._call_fut(Query.ASCENDING), dir_class.ASCENDING) - self.assertEqual( - self._call_fut(Query.DESCENDING), dir_class.DESCENDING) + self.assertEqual(self._call_fut(Query.ASCENDING), dir_class.ASCENDING) + self.assertEqual(self._call_fut(Query.DESCENDING), dir_class.DESCENDING) def test_failure(self): with self.assertRaises(ValueError): - self._call_fut('neither-ASCENDING-nor-DESCENDING') + self._call_fut("neither-ASCENDING-nor-DESCENDING") class Test__filter_pb(unittest.TestCase): - @staticmethod def _call_fut(field_or_unary): from google.cloud.firestore_v1beta1.query import _filter_pb @@ -1243,14 +1193,11 @@ def test_unary(self): from google.cloud.firestore_v1beta1.proto import query_pb2 unary_pb = query_pb2.StructuredQuery.UnaryFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='a.b.c', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="a.b.c"), op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL, ) filter_pb = self._call_fut(unary_pb) - expected_pb = query_pb2.StructuredQuery.Filter( - unary_filter=unary_pb) + expected_pb = query_pb2.StructuredQuery.Filter(unary_filter=unary_pb) self.assertEqual(filter_pb, expected_pb) def test_field(self): @@ -1259,15 +1206,12 @@ def test_field(self): from google.cloud.firestore_v1beta1.proto import query_pb2 field_filter_pb = query_pb2.StructuredQuery.FieldFilter( - field=query_pb2.StructuredQuery.FieldReference( - field_path='XYZ', - ), + field=query_pb2.StructuredQuery.FieldReference(field_path="XYZ"), op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, value=document_pb2.Value(double_value=90.75), ) filter_pb = self._call_fut(field_filter_pb) - expected_pb = query_pb2.StructuredQuery.Filter( - field_filter=field_filter_pb) + expected_pb = query_pb2.StructuredQuery.Filter(field_filter=field_filter_pb) self.assertEqual(filter_pb, expected_pb) def test_bad_type(self): @@ -1276,7 +1220,6 @@ def test_bad_type(self): class Test__cursor_pb(unittest.TestCase): - @staticmethod def _call_fut(cursor_pair): from google.cloud.firestore_v1beta1.query import _cursor_pb @@ -1296,23 +1239,17 @@ def test_success(self): cursor_pb = self._call_fut(cursor_pair) expected_pb = query_pb2.Cursor( - values=[ - _helpers.encode_value(value) for value in data - ], - before=True, + values=[_helpers.encode_value(value) for value in data], before=True ) self.assertEqual(cursor_pb, expected_pb) class Test__query_response_to_snapshot(unittest.TestCase): - @staticmethod def _call_fut(response_pb, collection, expected_prefix): - from google.cloud.firestore_v1beta1.query import ( - _query_response_to_snapshot) + from google.cloud.firestore_v1beta1.query import _query_response_to_snapshot - return _query_response_to_snapshot( - response_pb, collection, expected_prefix) + return _query_response_to_snapshot(response_pb, collection, expected_prefix) def test_empty(self): response_pb = _make_query_response() @@ -1331,17 +1268,18 @@ def test_response(self): from google.cloud.firestore_v1beta1.document import DocumentSnapshot client = _make_client() - collection = client.collection('a', 'b', 'c') + collection = client.collection("a", "b", "c") _, expected_prefix = collection._parent_info() # Create name for the protobuf. - doc_id = 'gigantic' - name = '{}/{}'.format(expected_prefix, doc_id) - data = {'a': 901, 'b': True} + doc_id = "gigantic" + name = "{}/{}".format(expected_prefix, doc_id) + data = {"a": 901, "b": True} response_pb = _make_query_response(name=name, data=data) snapshot, skipped_results = self._call_fut( - response_pb, collection, expected_prefix) + response_pb, collection, expected_prefix + ) self.assertEqual(skipped_results, 0) self.assertIsInstance(snapshot, DocumentSnapshot) expected_path = collection._path + (doc_id,) @@ -1349,10 +1287,8 @@ def test_response(self): self.assertEqual(snapshot.to_dict(), data) self.assertTrue(snapshot.exists) self.assertEqual(snapshot.read_time, response_pb.read_time) - self.assertEqual( - snapshot.create_time, response_pb.document.create_time) - self.assertEqual( - snapshot.update_time, response_pb.document.update_time) + self.assertEqual(snapshot.create_time, response_pb.document.create_time) + self.assertEqual(snapshot.update_time, response_pb.document.update_time) def _make_credentials(): @@ -1361,7 +1297,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='project-project'): +def _make_client(project="project-project"): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() @@ -1372,9 +1308,7 @@ def _make_order_pb(field_path, direction): from google.cloud.firestore_v1beta1.proto import query_pb2 return query_pb2.StructuredQuery.Order( - field=query_pb2.StructuredQuery.FieldReference( - field_path=field_path, - ), + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), direction=direction, ) @@ -1388,14 +1322,13 @@ def _make_query_response(**kwargs): now = datetime.datetime.utcnow() read_time = _datetime_to_pb_timestamp(now) - kwargs['read_time'] = read_time + kwargs["read_time"] = read_time - name = kwargs.pop('name', None) - data = kwargs.pop('data', None) + name = kwargs.pop("name", None) + data = kwargs.pop("data", None) if name is not None and data is not None: document_pb = document_pb2.Document( - name=name, - fields=_helpers.encode_dict(data), + name=name, fields=_helpers.encode_dict(data) ) delta = datetime.timedelta(seconds=100) update_time = _datetime_to_pb_timestamp(now - delta) @@ -1403,6 +1336,6 @@ def _make_query_response(**kwargs): document_pb.update_time.CopyFrom(update_time) document_pb.create_time.CopyFrom(create_time) - kwargs['document'] = document_pb + kwargs["document"] = document_pb return firestore_pb2.RunQueryResponse(**kwargs) diff --git a/firestore/tests/unit/test_transaction.py b/firestore/tests/unit/test_transaction.py index f6139d9b8991..3259e3e227e3 100644 --- a/firestore/tests/unit/test_transaction.py +++ b/firestore/tests/unit/test_transaction.py @@ -18,7 +18,6 @@ class TestTransaction(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.transaction import Transaction @@ -41,7 +40,8 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): transaction = self._make_one( - mock.sentinel.client, max_attempts=10, read_only=True) + mock.sentinel.client, max_attempts=10, read_only=True + ) self.assertIs(transaction._client, mock.sentinel.client) self.assertEqual(transaction._write_pbs, []) self.assertEqual(transaction._max_attempts, 10) @@ -71,15 +71,15 @@ def test__options_protobuf_read_only(self): transaction = self._make_one(mock.sentinel.client, read_only=True) options_pb = transaction._options_protobuf(None) expected_pb = common_pb2.TransactionOptions( - read_only=common_pb2.TransactionOptions.ReadOnly()) + read_only=common_pb2.TransactionOptions.ReadOnly() + ) self.assertEqual(options_pb, expected_pb) def test__options_protobuf_read_only_retry(self): - from google.cloud.firestore_v1beta1.transaction import ( - _CANT_RETRY_READ_ONLY) + from google.cloud.firestore_v1beta1.transaction import _CANT_RETRY_READ_ONLY transaction = self._make_one(mock.sentinel.client, read_only=True) - retry_id = b'illuminate' + retry_id = b"illuminate" with self.assertRaises(ValueError) as exc_info: transaction._options_protobuf(retry_id) @@ -95,11 +95,11 @@ def test__options_protobuf_on_retry(self): from google.cloud.firestore_v1beta1.proto import common_pb2 transaction = self._make_one(mock.sentinel.client) - retry_id = b'hocus-pocus' + retry_id = b"hocus-pocus" options_pb = transaction._options_protobuf(retry_id) expected_pb = common_pb2.TransactionOptions( read_write=common_pb2.TransactionOptions.ReadWrite( - retry_transaction=retry_id, + retry_transaction=retry_id ) ) self.assertEqual(options_pb, expected_pb) @@ -107,7 +107,7 @@ def test__options_protobuf_on_retry(self): def test_in_progress_property(self): transaction = self._make_one(mock.sentinel.client) self.assertFalse(transaction.in_progress) - transaction._id = b'not-none-bites' + transaction._id = b"not-none-bites" self.assertTrue(transaction.in_progress) def test_id_property(self): @@ -121,10 +121,10 @@ def test__begin(self): # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) - txn_id = b'to-begin' - response = firestore_pb2.BeginTransactionResponse( - transaction=txn_id) + firestore_client.FirestoreClient, instance=True + ) + txn_id = b"to-begin" + response = firestore_pb2.BeginTransactionResponse(transaction=txn_id) firestore_api.begin_transaction.return_value = response # Attach the fake GAPIC to a real client. @@ -141,15 +141,15 @@ def test__begin(self): # Verify the called mock. firestore_api.begin_transaction.assert_called_once_with( - client._database_string, options_=None, - metadata=client._rpc_metadata) + client._database_string, options_=None, metadata=client._rpc_metadata + ) def test__begin_failure(self): from google.cloud.firestore_v1beta1.transaction import _CANT_BEGIN client = _make_client() transaction = self._make_one(client) - transaction._id = b'not-none' + transaction._id = b"not-none" with self.assertRaises(ValueError) as exc_info: transaction._begin() @@ -160,8 +160,9 @@ def test__begin_failure(self): def test__clean_up(self): transaction = self._make_one(mock.sentinel.client) transaction._write_pbs.extend( - [mock.sentinel.write_pb1, mock.sentinel.write_pb2]) - transaction._id = b'not-this-time-my-friend' + [mock.sentinel.write_pb1, mock.sentinel.write_pb2] + ) + transaction._id = b"not-this-time-my-friend" ret_val = transaction._clean_up() self.assertIsNone(ret_val) @@ -175,7 +176,8 @@ def test__rollback(self): # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) firestore_api.rollback.return_value = empty_pb2.Empty() # Attach the fake GAPIC to a real client. @@ -184,7 +186,7 @@ def test__rollback(self): # Actually make a transaction and roll it back. transaction = self._make_one(client) - txn_id = b'to-be-r\x00lled' + txn_id = b"to-be-r\x00lled" transaction._id = txn_id ret_val = transaction._rollback() self.assertIsNone(ret_val) @@ -192,7 +194,8 @@ def test__rollback(self): # Verify the called mock. firestore_api.rollback.assert_called_once_with( - client._database_string, txn_id, metadata=client._rpc_metadata) + client._database_string, txn_id, metadata=client._rpc_metadata + ) def test__rollback_not_allowed(self): from google.cloud.firestore_v1beta1.transaction import _CANT_ROLLBACK @@ -212,8 +215,9 @@ def test__rollback_failure(self): # Create a minimal fake GAPIC with a dummy failure. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) - exc = exceptions.InternalServerError('Fire during rollback.') + firestore_client.FirestoreClient, instance=True + ) + exc = exceptions.InternalServerError("Fire during rollback.") firestore_api.rollback.side_effect = exc # Attach the fake GAPIC to a real client. @@ -222,7 +226,7 @@ def test__rollback_failure(self): # Actually make a transaction and roll it back. transaction = self._make_one(client) - txn_id = b'roll-bad-server' + txn_id = b"roll-bad-server" transaction._id = txn_id with self.assertRaises(exceptions.InternalServerError) as exc_info: @@ -234,7 +238,8 @@ def test__rollback_failure(self): # Verify the called mock. firestore_api.rollback.assert_called_once_with( - client._database_string, txn_id, metadata=client._rpc_metadata) + client._database_string, txn_id, metadata=client._rpc_metadata + ) def test__commit(self): from google.cloud.firestore_v1beta1.gapic import firestore_client @@ -243,24 +248,23 @@ def test__commit(self): # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) commit_response = firestore_pb2.CommitResponse( - write_results=[ - write_pb2.WriteResult(), - ], + write_results=[write_pb2.WriteResult()] ) firestore_api.commit.return_value = commit_response # Attach the fake GAPIC to a real client. - client = _make_client('phone-joe') + client = _make_client("phone-joe") client._firestore_api_internal = firestore_api # Actually make a transaction with some mutations and call _commit(). transaction = self._make_one(client) - txn_id = b'under-over-thru-woods' + txn_id = b"under-over-thru-woods" transaction._id = txn_id - document = client.document('zap', 'galaxy', 'ship', 'space') - transaction.set(document, {'apple': 4.5}) + document = client.document("zap", "galaxy", "ship", "space") + transaction.set(document, {"apple": 4.5}) write_pbs = transaction._write_pbs[::] write_results = transaction._commit() @@ -271,8 +275,11 @@ def test__commit(self): # Verify the mocks. firestore_api.commit.assert_called_once_with( - client._database_string, write_pbs, transaction=txn_id, - metadata=client._rpc_metadata) + client._database_string, + write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) def test__commit_not_allowed(self): from google.cloud.firestore_v1beta1.transaction import _CANT_COMMIT @@ -290,8 +297,9 @@ def test__commit_failure(self): # Create a minimal fake GAPIC with a dummy failure. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) - exc = exceptions.InternalServerError('Fire during commit.') + firestore_client.FirestoreClient, instance=True + ) + exc = exceptions.InternalServerError("Fire during commit.") firestore_api.commit.side_effect = exc # Attach the fake GAPIC to a real client. @@ -300,10 +308,10 @@ def test__commit_failure(self): # Actually make a transaction with some mutations and call _commit(). transaction = self._make_one(client) - txn_id = b'beep-fail-commit' + txn_id = b"beep-fail-commit" transaction._id = txn_id - transaction.create(client.document('up', 'down'), {'water': 1.0}) - transaction.delete(client.document('up', 'left')) + transaction.create(client.document("up", "down"), {"water": 1.0}) + transaction.delete(client.document("up", "left")) write_pbs = transaction._write_pbs[::] with self.assertRaises(exceptions.InternalServerError) as exc_info: @@ -315,12 +323,14 @@ def test__commit_failure(self): # Verify the called mock. firestore_api.commit.assert_called_once_with( - client._database_string, write_pbs, transaction=txn_id, - metadata=client._rpc_metadata) + client._database_string, + write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) class Test_Transactional(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.transaction import _Transactional @@ -339,8 +349,8 @@ def test_constructor(self): def test__reset(self): wrapped = self._make_one(mock.sentinel.callable_) - wrapped.current_id = b'not-none' - wrapped.retry_id = b'also-not' + wrapped.current_id = b"not-none" + wrapped.retry_id = b"also-not" ret_val = wrapped._reset() self.assertIsNone(ret_val) @@ -352,9 +362,9 @@ def test__pre_commit_success(self): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'totes-began' + txn_id = b"totes-began" transaction = _make_transaction(txn_id) - result = wrapped._pre_commit(transaction, 'pos', key='word') + result = wrapped._pre_commit(transaction, "pos", key="word") self.assertIs(result, mock.sentinel.result) self.assertEqual(transaction._id, txn_id) @@ -362,11 +372,13 @@ def test__pre_commit_success(self): self.assertEqual(wrapped.retry_id, txn_id) # Verify mocks. - to_wrap.assert_called_once_with(transaction, 'pos', key='word') + to_wrap.assert_called_once_with(transaction, "pos", key="word") firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=None, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_not_called() @@ -375,10 +387,10 @@ def test__pre_commit_retry_id_already_set_success(self): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = self._make_one(to_wrap) - txn_id1 = b'already-set' + txn_id1 = b"already-set" wrapped.retry_id = txn_id1 - txn_id2 = b'ok-here-too' + txn_id2 = b"ok-here-too" transaction = _make_transaction(txn_id2) result = wrapped._pre_commit(transaction) self.assertIs(result, mock.sentinel.result) @@ -392,21 +404,23 @@ def test__pre_commit_retry_id_already_set_success(self): firestore_api = transaction._client._firestore_api options_ = common_pb2.TransactionOptions( read_write=common_pb2.TransactionOptions.ReadWrite( - retry_transaction=txn_id1, - ), + retry_transaction=txn_id1 + ) ) firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=options_, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=options_, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_not_called() def test__pre_commit_failure(self): - exc = RuntimeError('Nope not today.') + exc = RuntimeError("Nope not today.") to_wrap = mock.Mock(side_effect=exc, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'gotta-fail' + txn_id = b"gotta-fail" transaction = _make_transaction(txn_id) with self.assertRaises(RuntimeError) as exc_info: wrapped._pre_commit(transaction, 10, 20) @@ -420,30 +434,34 @@ def test__pre_commit_failure(self): to_wrap.assert_called_once_with(transaction, 10, 20) firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=None, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_called_once_with( - transaction._client._database_string, txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) firestore_api.commit.assert_not_called() def test__pre_commit_failure_with_rollback_failure(self): from google.api_core import exceptions - exc1 = ValueError('I will not be only failure.') + exc1 = ValueError("I will not be only failure.") to_wrap = mock.Mock(side_effect=exc1, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'both-will-fail' + txn_id = b"both-will-fail" transaction = _make_transaction(txn_id) # Actually force the ``rollback`` to fail as well. - exc2 = exceptions.InternalServerError('Rollback blues.') + exc2 = exceptions.InternalServerError("Rollback blues.") firestore_api = transaction._client._firestore_api firestore_api.rollback.side_effect = exc2 # Try to ``_pre_commit`` with self.assertRaises(exceptions.InternalServerError) as exc_info: - wrapped._pre_commit(transaction, a='b', c='zebra') + wrapped._pre_commit(transaction, a="b", c="zebra") self.assertIs(exc_info.exception, exc2) self.assertIsNone(transaction._id) @@ -451,19 +469,23 @@ def test__pre_commit_failure_with_rollback_failure(self): self.assertEqual(wrapped.retry_id, txn_id) # Verify mocks. - to_wrap.assert_called_once_with(transaction, a='b', c='zebra') + to_wrap.assert_called_once_with(transaction, a="b", c="zebra") firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=None, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_called_once_with( - transaction._client._database_string, txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) firestore_api.commit.assert_not_called() def test__maybe_commit_success(self): wrapped = self._make_one(mock.sentinel.callable_) - txn_id = b'nyet' + txn_id = b"nyet" transaction = _make_transaction(txn_id) transaction._id = txn_id # We won't call ``begin()``. succeeded = wrapped._maybe_commit(transaction) @@ -477,15 +499,18 @@ def test__maybe_commit_success(self): firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) def test__maybe_commit_failure_read_only(self): from google.api_core import exceptions wrapped = self._make_one(mock.sentinel.callable_) - txn_id = b'failed' + txn_id = b"failed" transaction = _make_transaction(txn_id, read_only=True) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. @@ -493,7 +518,7 @@ def test__maybe_commit_failure_read_only(self): # Actually force the ``commit`` to fail (use ABORTED, but cannot # retry since read-only). - exc = exceptions.Aborted('Read-only did a bad.') + exc = exceptions.Aborted("Read-only did a bad.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc @@ -509,22 +534,25 @@ def test__maybe_commit_failure_read_only(self): firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) def test__maybe_commit_failure_can_retry(self): from google.api_core import exceptions wrapped = self._make_one(mock.sentinel.callable_) - txn_id = b'failed-but-retry' + txn_id = b"failed-but-retry" transaction = _make_transaction(txn_id) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. # Actually force the ``commit`` to fail. - exc = exceptions.Aborted('Read-write did a bad.') + exc = exceptions.Aborted("Read-write did a bad.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc @@ -539,22 +567,25 @@ def test__maybe_commit_failure_can_retry(self): firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) def test__maybe_commit_failure_cannot_retry(self): from google.api_core import exceptions wrapped = self._make_one(mock.sentinel.callable_) - txn_id = b'failed-but-not-retryable' + txn_id = b"failed-but-not-retryable" transaction = _make_transaction(txn_id) transaction._id = txn_id # We won't call ``begin()``. wrapped.current_id = txn_id # We won't call ``_pre_commit()``. wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. # Actually force the ``commit`` to fail. - exc = exceptions.InternalServerError('Real bad thing') + exc = exceptions.InternalServerError("Real bad thing") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc @@ -570,16 +601,19 @@ def test__maybe_commit_failure_cannot_retry(self): firestore_api.begin_transaction.assert_not_called() firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) def test___call__success_first_attempt(self): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'whole-enchilada' + txn_id = b"whole-enchilada" transaction = _make_transaction(txn_id) - result = wrapped(transaction, 'a', b='c') + result = wrapped(transaction, "a", b="c") self.assertIs(result, mock.sentinel.result) self.assertIsNone(transaction._id) @@ -587,15 +621,20 @@ def test___call__success_first_attempt(self): self.assertEqual(wrapped.retry_id, txn_id) # Verify mocks. - to_wrap.assert_called_once_with(transaction, 'a', b='c') + to_wrap.assert_called_once_with(transaction, "a", b="c") firestore_api = transaction._client._firestore_api firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=None, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_not_called() firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) def test___call__success_second_attempt(self): from google.api_core import exceptions @@ -606,23 +645,19 @@ def test___call__success_second_attempt(self): to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'whole-enchilada' + txn_id = b"whole-enchilada" transaction = _make_transaction(txn_id) # Actually force the ``commit`` to fail on first / succeed on second. - exc = exceptions.Aborted('Contention junction.') + exc = exceptions.Aborted("Contention junction.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = [ exc, - firestore_pb2.CommitResponse( - write_results=[ - write_pb2.WriteResult(), - ], - ), + firestore_pb2.CommitResponse(write_results=[write_pb2.WriteResult()]), ] # Call the __call__-able ``wrapped``. - result = wrapped(transaction, 'a', b='c') + result = wrapped(transaction, "a", b="c") self.assertIs(result, mock.sentinel.result) self.assertIsNone(transaction._id) @@ -630,55 +665,50 @@ def test___call__success_second_attempt(self): self.assertEqual(wrapped.retry_id, txn_id) # Verify mocks. - wrapped_call = mock.call(transaction, 'a', b='c') - self.assertEqual( - to_wrap.mock_calls, - [wrapped_call, wrapped_call]) + wrapped_call = mock.call(transaction, "a", b="c") + self.assertEqual(to_wrap.mock_calls, [wrapped_call, wrapped_call]) firestore_api = transaction._client._firestore_api db_str = transaction._client._database_string options_ = common_pb2.TransactionOptions( - read_write=common_pb2.TransactionOptions.ReadWrite( - retry_transaction=txn_id, - ), + read_write=common_pb2.TransactionOptions.ReadWrite(retry_transaction=txn_id) ) self.assertEqual( firestore_api.begin_transaction.mock_calls, [ mock.call( - db_str, options_=None, - metadata=transaction._client._rpc_metadata), + db_str, options_=None, metadata=transaction._client._rpc_metadata + ), mock.call( - db_str, options_=options_, - metadata=transaction._client._rpc_metadata), + db_str, + options_=options_, + metadata=transaction._client._rpc_metadata, + ), ], ) firestore_api.rollback.assert_not_called() commit_call = mock.call( - db_str, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) - self.assertEqual( - firestore_api.commit.mock_calls, - [commit_call, commit_call]) + db_str, [], transaction=txn_id, metadata=transaction._client._rpc_metadata + ) + self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) def test___call__failure(self): from google.api_core import exceptions - from google.cloud.firestore_v1beta1.transaction import ( - _EXCEED_ATTEMPTS_TEMPLATE) + from google.cloud.firestore_v1beta1.transaction import _EXCEED_ATTEMPTS_TEMPLATE to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) wrapped = self._make_one(to_wrap) - txn_id = b'only-one-shot' + txn_id = b"only-one-shot" transaction = _make_transaction(txn_id, max_attempts=1) # Actually force the ``commit`` to fail. - exc = exceptions.Aborted('Contention just once.') + exc = exceptions.Aborted("Contention just once.") firestore_api = transaction._client._firestore_api firestore_api.commit.side_effect = exc # Call the __call__-able ``wrapped``. with self.assertRaises(ValueError) as exc_info: - wrapped(transaction, 'here', there=1.5) + wrapped(transaction, "here", there=1.5) err_msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) self.assertEqual(exc_info.exception.args, (err_msg,)) @@ -688,20 +718,26 @@ def test___call__failure(self): self.assertEqual(wrapped.retry_id, txn_id) # Verify mocks. - to_wrap.assert_called_once_with(transaction, 'here', there=1.5) + to_wrap.assert_called_once_with(transaction, "here", there=1.5) firestore_api.begin_transaction.assert_called_once_with( - transaction._client._database_string, options_=None, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) firestore_api.rollback.assert_called_once_with( - transaction._client._database_string, txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) firestore_api.commit.assert_called_once_with( - transaction._client._database_string, [], transaction=txn_id, - metadata=transaction._client._rpc_metadata) + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) class Test_transactional(unittest.TestCase): - @staticmethod def _call_fut(to_wrap): from google.cloud.firestore_v1beta1.transaction import transactional @@ -717,62 +753,64 @@ def test_it(self): class Test__commit_with_retry(unittest.TestCase): - @staticmethod def _call_fut(client, write_pbs, transaction_id): - from google.cloud.firestore_v1beta1.transaction import ( - _commit_with_retry) + from google.cloud.firestore_v1beta1.transaction import _commit_with_retry return _commit_with_retry(client, write_pbs, transaction_id) - @mock.patch('google.cloud.firestore_v1beta1.transaction._sleep') + @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep") def test_success_first_attempt(self, _sleep): from google.cloud.firestore_v1beta1.gapic import firestore_client # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) # Attach the fake GAPIC to a real client. - client = _make_client('summer') + client = _make_client("summer") client._firestore_api_internal = firestore_api # Call function and check result. - txn_id = b'cheeeeeez' - commit_response = self._call_fut( - client, mock.sentinel.write_pbs, txn_id) + txn_id = b"cheeeeeez" + commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) self.assertIs(commit_response, firestore_api.commit.return_value) # Verify mocks used. _sleep.assert_not_called() firestore_api.commit.assert_called_once_with( - client._database_string, mock.sentinel.write_pbs, - transaction=txn_id, metadata=client._rpc_metadata) + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) - @mock.patch('google.cloud.firestore_v1beta1.transaction._sleep', - side_effect=[2.0, 4.0]) + @mock.patch( + "google.cloud.firestore_v1beta1.transaction._sleep", side_effect=[2.0, 4.0] + ) def test_success_third_attempt(self, _sleep): from google.api_core import exceptions from google.cloud.firestore_v1beta1.gapic import firestore_client # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) # Make sure the first two requests fail and the third succeeds. firestore_api.commit.side_effect = [ - exceptions.ServiceUnavailable('Server sleepy.'), - exceptions.ServiceUnavailable('Server groggy.'), + exceptions.ServiceUnavailable("Server sleepy."), + exceptions.ServiceUnavailable("Server groggy."), mock.sentinel.commit_response, ] # Attach the fake GAPIC to a real client. - client = _make_client('outside') + client = _make_client("outside") client._firestore_api_internal = firestore_api # Call function and check result. - txn_id = b'the-world\x00' - commit_response = self._call_fut( - client, mock.sentinel.write_pbs, txn_id) + txn_id = b"the-world\x00" + commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) self.assertIs(commit_response, mock.sentinel.commit_response) # Verify mocks used. @@ -781,66 +819,71 @@ def test_success_third_attempt(self, _sleep): _sleep.assert_any_call(2.0) # commit() called same way 3 times. commit_call = mock.call( - client._database_string, mock.sentinel.write_pbs, - transaction=txn_id, metadata=client._rpc_metadata) + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) self.assertEqual( - firestore_api.commit.mock_calls, - [commit_call, commit_call, commit_call]) + firestore_api.commit.mock_calls, [commit_call, commit_call, commit_call] + ) - @mock.patch('google.cloud.firestore_v1beta1.transaction._sleep') + @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep") def test_failure_first_attempt(self, _sleep): from google.api_core import exceptions from google.cloud.firestore_v1beta1.gapic import firestore_client # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) # Make sure the first request fails with an un-retryable error. - exc = exceptions.ResourceExhausted('We ran out of fries.') + exc = exceptions.ResourceExhausted("We ran out of fries.") firestore_api.commit.side_effect = exc # Attach the fake GAPIC to a real client. - client = _make_client('peanut-butter') + client = _make_client("peanut-butter") client._firestore_api_internal = firestore_api # Call function and check result. - txn_id = b'\x08\x06\x07\x05\x03\x00\x09-jenny' + txn_id = b"\x08\x06\x07\x05\x03\x00\x09-jenny" with self.assertRaises(exceptions.ResourceExhausted) as exc_info: - self._call_fut( - client, mock.sentinel.write_pbs, txn_id) + self._call_fut(client, mock.sentinel.write_pbs, txn_id) self.assertIs(exc_info.exception, exc) # Verify mocks used. _sleep.assert_not_called() firestore_api.commit.assert_called_once_with( - client._database_string, mock.sentinel.write_pbs, - transaction=txn_id, metadata=client._rpc_metadata) + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) - @mock.patch('google.cloud.firestore_v1beta1.transaction._sleep', - return_value=2.0) + @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep", return_value=2.0) def test_failure_second_attempt(self, _sleep): from google.api_core import exceptions from google.cloud.firestore_v1beta1.gapic import firestore_client # Create a minimal fake GAPIC with a dummy result. firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) # Make sure the first request fails retry-able and second # fails non-retryable. - exc1 = exceptions.ServiceUnavailable('Come back next time.') - exc2 = exceptions.InternalServerError('Server on fritz.') + exc1 = exceptions.ServiceUnavailable("Come back next time.") + exc2 = exceptions.InternalServerError("Server on fritz.") firestore_api.commit.side_effect = [exc1, exc2] # Attach the fake GAPIC to a real client. - client = _make_client('peanut-butter') + client = _make_client("peanut-butter") client._firestore_api_internal = firestore_api # Call function and check result. - txn_id = b'the-journey-when-and-where-well-go' + txn_id = b"the-journey-when-and-where-well-go" with self.assertRaises(exceptions.InternalServerError) as exc_info: - self._call_fut( - client, mock.sentinel.write_pbs, txn_id) + self._call_fut(client, mock.sentinel.write_pbs, txn_id) self.assertIs(exc_info.exception, exc2) @@ -848,22 +891,23 @@ def test_failure_second_attempt(self, _sleep): _sleep.assert_called_once_with(1.0) # commit() called same way 2 times. commit_call = mock.call( - client._database_string, mock.sentinel.write_pbs, - transaction=txn_id, metadata=client._rpc_metadata) - self.assertEqual( - firestore_api.commit.mock_calls, [commit_call, commit_call]) + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) class Test__sleep(unittest.TestCase): - @staticmethod def _call_fut(current_sleep, **kwargs): from google.cloud.firestore_v1beta1.transaction import _sleep return _sleep(current_sleep, **kwargs) - @mock.patch('random.uniform', return_value=5.5) - @mock.patch('time.sleep', return_value=None) + @mock.patch("random.uniform", return_value=5.5) + @mock.patch("time.sleep", return_value=None) def test_defaults(self, sleep, uniform): curr_sleep = 10.0 self.assertLessEqual(uniform.return_value, curr_sleep) @@ -874,29 +918,27 @@ def test_defaults(self, sleep, uniform): uniform.assert_called_once_with(0.0, curr_sleep) sleep.assert_called_once_with(uniform.return_value) - @mock.patch('random.uniform', return_value=10.5) - @mock.patch('time.sleep', return_value=None) + @mock.patch("random.uniform", return_value=10.5) + @mock.patch("time.sleep", return_value=None) def test_explicit(self, sleep, uniform): curr_sleep = 12.25 self.assertLessEqual(uniform.return_value, curr_sleep) multiplier = 1.5 - new_sleep = self._call_fut( - curr_sleep, max_sleep=100.0, multiplier=multiplier) + new_sleep = self._call_fut(curr_sleep, max_sleep=100.0, multiplier=multiplier) self.assertEqual(new_sleep, multiplier * curr_sleep) uniform.assert_called_once_with(0.0, curr_sleep) sleep.assert_called_once_with(uniform.return_value) - @mock.patch('random.uniform', return_value=6.75) - @mock.patch('time.sleep', return_value=None) + @mock.patch("random.uniform", return_value=6.75) + @mock.patch("time.sleep", return_value=None) def test_exceeds_max(self, sleep, uniform): curr_sleep = 20.0 self.assertLessEqual(uniform.return_value, curr_sleep) max_sleep = 38.5 - new_sleep = self._call_fut( - curr_sleep, max_sleep=max_sleep, multiplier=2.0) + new_sleep = self._call_fut(curr_sleep, max_sleep=max_sleep, multiplier=2.0) self.assertEqual(new_sleep, max_sleep) uniform.assert_called_once_with(0.0, curr_sleep) @@ -909,7 +951,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_client(project='feral-tom-cat'): +def _make_client(project="feral-tom-cat"): from google.cloud.firestore_v1beta1.client import Client credentials = _make_credentials() @@ -925,18 +967,16 @@ def _make_transaction(txn_id, **txn_kwargs): # Create a fake GAPIC ... firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True) + firestore_client.FirestoreClient, instance=True + ) # ... with a dummy ``BeginTransactionResponse`` result ... - begin_response = firestore_pb2.BeginTransactionResponse( - transaction=txn_id) + begin_response = firestore_pb2.BeginTransactionResponse(transaction=txn_id) firestore_api.begin_transaction.return_value = begin_response # ... and a dummy ``Rollback`` result ... firestore_api.rollback.return_value = empty_pb2.Empty() # ... and a dummy ``Commit`` result. commit_response = firestore_pb2.CommitResponse( - write_results=[ - write_pb2.WriteResult(), - ], + write_results=[write_pb2.WriteResult()] ) firestore_api.commit.return_value = commit_response diff --git a/firestore/tests/unit/test_transforms.py b/firestore/tests/unit/test_transforms.py index 8833848833ae..1a825ba06ecb 100644 --- a/firestore/tests/unit/test_transforms.py +++ b/firestore/tests/unit/test_transforms.py @@ -16,7 +16,6 @@ class Test_ValueList(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.firestore_v1beta1.transforms import _ValueList @@ -27,14 +26,7 @@ def _make_one(self, values): return self._get_target_class()(values) def test_ctor_w_non_list_non_tuple(self): - invalid_values = ( - None, - u'phred', - b'DEADBEEF', - 123, - {}, - object(), - ) + invalid_values = (None, u"phred", b"DEADBEEF", 123, {}, object()) for invalid_value in invalid_values: with self.assertRaises(ValueError): self._make_one(invalid_value) @@ -44,11 +36,11 @@ def test_ctor_w_empty(self): self._make_one([]) def test_ctor_w_non_empty_list(self): - values = ['phred', 'bharney'] + values = ["phred", "bharney"] union = self._make_one(values) self.assertEqual(union.values, values) def test_ctor_w_non_empty_tuple(self): - values = ('phred', 'bharney') + values = ("phred", "bharney") union = self._make_one(values) self.assertEqual(union.values, list(values)) diff --git a/firestore/tests/unit/test_watch.py b/firestore/tests/unit/test_watch.py index b04a68ee9acf..d0ce9d8ecc6c 100644 --- a/firestore/tests/unit/test_watch.py +++ b/firestore/tests/unit/test_watch.py @@ -7,80 +7,85 @@ class TestWatchDocTree(unittest.TestCase): def _makeOne(self): from google.cloud.firestore_v1beta1.watch import WatchDocTree + return WatchDocTree() def test_insert_and_keys(self): inst = self._makeOne() - inst = inst.insert('b', 1) - inst = inst.insert('a', 2) - self.assertEqual(sorted(inst.keys()), ['a', 'b']) + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + self.assertEqual(sorted(inst.keys()), ["a", "b"]) def test_remove_and_keys(self): inst = self._makeOne() - inst = inst.insert('b', 1) - inst = inst.insert('a', 2) - inst = inst.remove('a') - self.assertEqual(sorted(inst.keys()), ['b']) + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + inst = inst.remove("a") + self.assertEqual(sorted(inst.keys()), ["b"]) def test_insert_and_find(self): inst = self._makeOne() - inst = inst.insert('b', 1) - inst = inst.insert('a', 2) - val = inst.find('a') + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + val = inst.find("a") self.assertEqual(val.value, 2) def test___len__(self): inst = self._makeOne() - inst = inst.insert('b', 1) - inst = inst.insert('a', 2) + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) self.assertEqual(len(inst), 2) def test___iter__(self): inst = self._makeOne() - inst = inst.insert('b', 1) - inst = inst.insert('a', 2) - self.assertEqual(sorted(list(inst)), ['a', 'b']) + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + self.assertEqual(sorted(list(inst)), ["a", "b"]) def test___contains__(self): inst = self._makeOne() - inst = inst.insert('b', 1) - self.assertTrue('b' in inst) - self.assertFalse('a' in inst) + inst = inst.insert("b", 1) + self.assertTrue("b" in inst) + self.assertFalse("a" in inst) class TestDocumentChange(unittest.TestCase): def _makeOne(self, type, document, old_index, new_index): from google.cloud.firestore_v1beta1.watch import DocumentChange + return DocumentChange(type, document, old_index, new_index) def test_ctor(self): - inst = self._makeOne('type', 'document', 'old_index', 'new_index') - self.assertEqual(inst.type, 'type') - self.assertEqual(inst.document, 'document') - self.assertEqual(inst.old_index, 'old_index') - self.assertEqual(inst.new_index, 'new_index') + inst = self._makeOne("type", "document", "old_index", "new_index") + self.assertEqual(inst.type, "type") + self.assertEqual(inst.document, "document") + self.assertEqual(inst.old_index, "old_index") + self.assertEqual(inst.new_index, "new_index") class TestWatchResult(unittest.TestCase): def _makeOne(self, snapshot, name, change_type): from google.cloud.firestore_v1beta1.watch import WatchResult + return WatchResult(snapshot, name, change_type) def test_ctor(self): - inst = self._makeOne('snapshot', 'name', 'change_type') - self.assertEqual(inst.snapshot, 'snapshot') - self.assertEqual(inst.name, 'name') - self.assertEqual(inst.change_type, 'change_type') + inst = self._makeOne("snapshot", "name", "change_type") + self.assertEqual(inst.snapshot, "snapshot") + self.assertEqual(inst.name, "name") + self.assertEqual(inst.change_type, "change_type") class Test_maybe_wrap_exception(unittest.TestCase): def _callFUT(self, exc): from google.cloud.firestore_v1beta1.watch import _maybe_wrap_exception + return _maybe_wrap_exception(exc) def test_is_grpc_error(self): import grpc from google.api_core.exceptions import GoogleAPICallError + exc = grpc.RpcError() result = self._callFUT(exc) self.assertEqual(result.__class__, GoogleAPICallError) @@ -93,9 +98,8 @@ def test_is_not_grpc_error(self): class Test_document_watch_comparator(unittest.TestCase): def _callFUT(self, doc1, doc2): - from google.cloud.firestore_v1beta1.watch import ( - document_watch_comparator, - ) + from google.cloud.firestore_v1beta1.watch import document_watch_comparator + return document_watch_comparator(doc1, doc2) def test_same_doc(self): @@ -108,27 +112,24 @@ def test_diff_doc(self): class TestWatch(unittest.TestCase): def _makeOne( - self, - document_reference=None, - firestore=None, - target=None, - comparator=None, - snapshot_callback=None, - snapshot_class=None, - reference_class=None - ): # pragma: NO COVER + self, + document_reference=None, + firestore=None, + target=None, + comparator=None, + snapshot_callback=None, + snapshot_class=None, + reference_class=None, + ): # pragma: NO COVER from google.cloud.firestore_v1beta1.watch import Watch + if document_reference is None: document_reference = DummyDocumentReference() if firestore is None: firestore = DummyFirestore() if target is None: WATCH_TARGET_ID = 0x5079 # "Py" - target = { - 'documents': { - 'documents': ['/']}, - 'target_id': WATCH_TARGET_ID - } + target = {"documents": {"documents": ["/"]}, "target_id": WATCH_TARGET_ID} if comparator is None: comparator = self._document_watch_comparator if snapshot_callback is None: @@ -147,7 +148,7 @@ def _makeOne( reference_class, BackgroundConsumer=DummyBackgroundConsumer, ResumableBidiRpc=DummyRpc, - ) + ) return inst def setUp(self): @@ -167,12 +168,10 @@ def test_ctor(self): def test__on_rpc_done(self): inst = self._makeOne() threading = DummyThreading() - with mock.patch( - 'google.cloud.firestore_v1beta1.watch.threading', - threading - ): + with mock.patch("google.cloud.firestore_v1beta1.watch.threading", threading): inst._on_rpc_done(True) from google.cloud.firestore_v1beta1.watch import _RPC_ERROR_THREAD_NAME + self.assertTrue(threading.threads[_RPC_ERROR_THREAD_NAME].started) def test_close(self): @@ -203,57 +202,48 @@ def test_unsubscribe(self): def test_for_document(self): from google.cloud.firestore_v1beta1.watch import Watch + docref = DummyDocumentReference() snapshot_callback = self._snapshot_callback snapshot_class_instance = DummyDocumentSnapshot document_reference_class_instance = DummyDocumentReference - modulename = 'google.cloud.firestore_v1beta1.watch' - with mock.patch( - '%s.Watch.ResumableBidiRpc' % modulename, - DummyRpc, - ): + modulename = "google.cloud.firestore_v1beta1.watch" + with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): with mock.patch( - '%s.Watch.BackgroundConsumer' % modulename, - DummyBackgroundConsumer, - ): + "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer + ): inst = Watch.for_document( docref, snapshot_callback, snapshot_class_instance, - document_reference_class_instance + document_reference_class_instance, ) self.assertTrue(inst._consumer.started) self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) def test_for_query(self): from google.cloud.firestore_v1beta1.watch import Watch + snapshot_callback = self._snapshot_callback snapshot_class_instance = DummyDocumentSnapshot document_reference_class_instance = DummyDocumentReference - modulename = 'google.cloud.firestore_v1beta1.watch' + modulename = "google.cloud.firestore_v1beta1.watch" pb2 = DummyPb2() - with mock.patch( - '%s.firestore_pb2' % modulename, - pb2, - ): - with mock.patch( - '%s.Watch.ResumableBidiRpc' % modulename, - DummyRpc, - ): + with mock.patch("%s.firestore_pb2" % modulename, pb2): + with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): with mock.patch( - '%s.Watch.BackgroundConsumer' % modulename, - DummyBackgroundConsumer, - ): + "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer + ): query = DummyQuery() inst = Watch.for_query( query, snapshot_callback, snapshot_class_instance, - document_reference_class_instance + document_reference_class_instance, ) self.assertTrue(inst._consumer.started) self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) - self.assertEqual(inst._targets['query'], 'dummy query target') + self.assertEqual(inst._targets["query"], "dummy query target") def test_on_snapshot_target_no_change_no_target_ids_not_current(self): inst = self._makeOne() @@ -282,10 +272,7 @@ def test_on_snapshot_target_add(self): proto.target_change.target_ids = [1] # not "Py" with self.assertRaises(Exception) as exc: inst.on_snapshot(proto) - self.assertEqual( - str(exc.exception), - 'Unexpected target ID sent by server' - ) + self.assertEqual(str(exc.exception), "Unexpected target ID sent by server") def test_on_snapshot_target_remove(self): inst = self._makeOne() @@ -294,7 +281,7 @@ def test_on_snapshot_target_remove(self): target_change.target_change_type = firestore_pb2.TargetChange.REMOVE with self.assertRaises(Exception) as exc: inst.on_snapshot(proto) - self.assertEqual(str(exc.exception), 'Error 1: hi') + self.assertEqual(str(exc.exception), "Error 1: hi") def test_on_snapshot_target_remove_nocause(self): inst = self._makeOne() @@ -304,7 +291,7 @@ def test_on_snapshot_target_remove_nocause(self): target_change.target_change_type = firestore_pb2.TargetChange.REMOVE with self.assertRaises(Exception) as exc: inst.on_snapshot(proto) - self.assertEqual(str(exc.exception), 'Error 13: internal error') + self.assertEqual(str(exc.exception), "Error 13: internal error") def test_on_snapshot_target_reset(self): inst = self._makeOne() @@ -331,77 +318,73 @@ def test_on_snapshot_target_current(self): def test_on_snapshot_target_unknown(self): inst = self._makeOne() proto = DummyProto() - proto.target_change.target_change_type = 'unknown' + proto.target_change.target_change_type = "unknown" with self.assertRaises(Exception) as exc: inst.on_snapshot(proto) self.assertTrue(inst._consumer is None) self.assertTrue(inst._rpc is None) - self.assertEqual( - str(exc.exception), - 'Unknown target change type: unknown ' - ) + self.assertEqual(str(exc.exception), "Unknown target change type: unknown ") def test_on_snapshot_document_change_removed(self): - from google.cloud.firestore_v1beta1.watch import ( - WATCH_TARGET_ID, - ChangeType, - ) + from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID, ChangeType + inst = self._makeOne() proto = DummyProto() - proto.target_change = '' + proto.target_change = "" proto.document_change.removed_target_ids = [WATCH_TARGET_ID] class DummyDocument: - name = 'fred' + name = "fred" proto.document_change.document = DummyDocument() inst.on_snapshot(proto) - self.assertTrue(inst.change_map['fred'] is ChangeType.REMOVED) + self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) def test_on_snapshot_document_change_changed(self): from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID + inst = self._makeOne() proto = DummyProto() - proto.target_change = '' + proto.target_change = "" proto.document_change.target_ids = [WATCH_TARGET_ID] class DummyDocument: - name = 'fred' + name = "fred" fields = {} create_time = None update_time = None proto.document_change.document = DummyDocument() inst.on_snapshot(proto) - self.assertEqual(inst.change_map['fred'].data, {}) + self.assertEqual(inst.change_map["fred"].data, {}) def test_on_snapshot_document_change_changed_docname_db_prefix(self): # TODO: Verify the current behavior. The change map currently contains # the db-prefixed document name and not the bare document name. from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID + inst = self._makeOne() proto = DummyProto() - proto.target_change = '' + proto.target_change = "" proto.document_change.target_ids = [WATCH_TARGET_ID] class DummyDocument: - name = 'abc://foo/documents/fred' + name = "abc://foo/documents/fred" fields = {} create_time = None update_time = None proto.document_change.document = DummyDocument() - inst._firestore._database_string = 'abc://foo' + inst._firestore._database_string = "abc://foo" inst.on_snapshot(proto) - self.assertEqual(inst.change_map['abc://foo/documents/fred'].data, - {}) + self.assertEqual(inst.change_map["abc://foo/documents/fred"].data, {}) def test_on_snapshot_document_change_neither_changed_nor_removed(self): inst = self._makeOne() proto = DummyProto() - proto.target_change = '' + proto.target_change = "" proto.document_change.target_ids = [] inst.on_snapshot(proto) @@ -409,25 +392,26 @@ def test_on_snapshot_document_change_neither_changed_nor_removed(self): def test_on_snapshot_document_removed(self): from google.cloud.firestore_v1beta1.watch import ChangeType + inst = self._makeOne() proto = DummyProto() - proto.target_change = '' - proto.document_change = '' + proto.target_change = "" + proto.document_change = "" class DummyRemove(object): - document = 'fred' + document = "fred" remove = DummyRemove() proto.document_remove = remove proto.document_delete = None inst.on_snapshot(proto) - self.assertTrue(inst.change_map['fred'] is ChangeType.REMOVED) + self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) def test_on_snapshot_filter_update(self): inst = self._makeOne() proto = DummyProto() - proto.target_change = '' - proto.document_change = '' + proto.target_change = "" + proto.document_change = "" proto.document_remove = None proto.document_delete = None @@ -446,8 +430,8 @@ def reset(): def test_on_snapshot_filter_update_no_size_change(self): inst = self._makeOne() proto = DummyProto() - proto.target_change = '' - proto.document_change = '' + proto.target_change = "" + proto.document_change = "" proto.document_remove = None proto.document_delete = None @@ -463,16 +447,16 @@ class DummyFilter(object): def test_on_snapshot_unknown_listen_type(self): inst = self._makeOne() proto = DummyProto() - proto.target_change = '' - proto.document_change = '' + proto.target_change = "" + proto.document_change = "" proto.document_remove = None proto.document_delete = None - proto.filter = '' + proto.filter = "" with self.assertRaises(Exception) as exc: inst.on_snapshot(proto) self.assertTrue( - str(exc.exception).startswith('Unknown listen response type'), - str(exc.exception) + str(exc.exception).startswith("Unknown listen response type"), + str(exc.exception), ) def test_push_callback_called_no_changes(self): @@ -482,30 +466,24 @@ class DummyReadTime(object): seconds = 1534858278 inst = self._makeOne() - inst.push(DummyReadTime, 'token') + inst.push(DummyReadTime, "token") self.assertEqual( self.snapshotted, - ( - [], - [], - datetime.datetime.fromtimestamp( - DummyReadTime.seconds, pytz.utc) - ), - ) + ([], [], datetime.datetime.fromtimestamp(DummyReadTime.seconds, pytz.utc)), + ) self.assertTrue(inst.has_pushed) - self.assertEqual(inst.resume_token, 'token') + self.assertEqual(inst.resume_token, "token") def test_push_already_pushed(self): class DummyReadTime(object): seconds = 1534858278 + inst = self._makeOne() inst.has_pushed = True - inst.push(DummyReadTime, 'token') - self.assertEqual( - self.snapshotted, - None) + inst.push(DummyReadTime, "token") + self.assertEqual(self.snapshotted, None) self.assertTrue(inst.has_pushed) - self.assertEqual(inst.resume_token, 'token') + self.assertEqual(inst.resume_token, "token") def test__current_size_empty(self): inst = self._makeOne() @@ -514,7 +492,7 @@ def test__current_size_empty(self): def test__current_size_docmap_has_one(self): inst = self._makeOne() - inst.doc_map['a'] = 1 + inst.doc_map["a"] = 1 result = inst._current_size() self.assertEqual(result, 1) @@ -532,16 +510,18 @@ def test__affects_target_current_id_not_in_target_ids(self): def test__extract_changes_doc_removed(self): from google.cloud.firestore_v1beta1.watch import ChangeType + inst = self._makeOne() - changes = {'name': ChangeType.REMOVED} - doc_map = {'name': True} + changes = {"name": ChangeType.REMOVED} + doc_map = {"name": True} results = inst._extract_changes(doc_map, changes, None) - self.assertEqual(results, (['name'], [], [])) + self.assertEqual(results, (["name"], [], [])) def test__extract_changes_doc_removed_docname_not_in_docmap(self): from google.cloud.firestore_v1beta1.watch import ChangeType + inst = self._makeOne() - changes = {'name': ChangeType.REMOVED} + changes = {"name": ChangeType.REMOVED} doc_map = {} results = inst._extract_changes(doc_map, changes, None) self.assertEqual(results, ([], [], [])) @@ -554,8 +534,8 @@ class Dummy(object): doc = Dummy() snapshot = Dummy() - changes = {'name': snapshot} - doc_map = {'name': doc} + changes = {"name": snapshot} + doc_map = {"name": doc} results = inst._extract_changes(doc_map, changes, 1) self.assertEqual(results, ([], [], [snapshot])) self.assertEqual(snapshot.read_time, 1) @@ -569,8 +549,8 @@ class Dummy(object): doc = Dummy() snapshot = Dummy() snapshot.read_time = None - changes = {'name': snapshot} - doc_map = {'name': doc} + changes = {"name": snapshot} + doc_map = {"name": doc} results = inst._extract_changes(doc_map, changes, None) self.assertEqual(results, ([], [], [snapshot])) self.assertEqual(snapshot.read_time, None) @@ -582,7 +562,7 @@ class Dummy(object): pass snapshot = Dummy() - changes = {'name': snapshot} + changes = {"name": snapshot} doc_map = {} results = inst._extract_changes(doc_map, changes, 1) self.assertEqual(results, ([], [snapshot], [])) @@ -596,7 +576,7 @@ class Dummy(object): snapshot = Dummy() snapshot.read_time = None - changes = {'name': snapshot} + changes = {"name": snapshot} doc_map = {} results = inst._extract_changes(doc_map, changes, None) self.assertEqual(results, ([], [snapshot], [])) @@ -607,12 +587,12 @@ def test__compute_snapshot_doctree_and_docmap_disagree_about_length(self): doc_tree = {} doc_map = {None: None} self.assertRaises( - AssertionError, - inst._compute_snapshot, doc_tree, doc_map, None, None, None, - ) + AssertionError, inst._compute_snapshot, doc_tree, doc_map, None, None, None + ) def test__compute_snapshot_operation_relative_ordering(self): from google.cloud.firestore_v1beta1.watch import WatchDocTree + doc_tree = WatchDocTree() class DummyDoc(object): @@ -620,38 +600,33 @@ class DummyDoc(object): deleted_doc = DummyDoc() added_doc = DummyDoc() - added_doc._document_path = '/added' + added_doc._document_path = "/added" updated_doc = DummyDoc() - updated_doc._document_path = '/updated' + updated_doc._document_path = "/updated" doc_tree = doc_tree.insert(deleted_doc, None) doc_tree = doc_tree.insert(updated_doc, None) - doc_map = {'/deleted': deleted_doc, '/updated': updated_doc} - added_snapshot = DummyDocumentSnapshot(added_doc, None, True, - None, None, None) + doc_map = {"/deleted": deleted_doc, "/updated": updated_doc} + added_snapshot = DummyDocumentSnapshot(added_doc, None, True, None, None, None) added_snapshot.reference = added_doc - updated_snapshot = DummyDocumentSnapshot(updated_doc, None, True, - None, None, None) + updated_snapshot = DummyDocumentSnapshot( + updated_doc, None, True, None, None, None + ) updated_snapshot.reference = updated_doc - delete_changes = ['/deleted'] + delete_changes = ["/deleted"] add_changes = [added_snapshot] update_changes = [updated_snapshot] inst = self._makeOne() updated_tree, updated_map, applied_changes = inst._compute_snapshot( - doc_tree, - doc_map, - delete_changes, - add_changes, - update_changes - ) + doc_tree, doc_map, delete_changes, add_changes, update_changes + ) # TODO: Verify that the assertion here is correct. - self.assertEqual(updated_map, - { - '/updated': updated_snapshot, - '/added': added_snapshot, - }) + self.assertEqual( + updated_map, {"/updated": updated_snapshot, "/added": added_snapshot} + ) def test__compute_snapshot_modify_docs_updated_doc_no_timechange(self): from google.cloud.firestore_v1beta1.watch import WatchDocTree + doc_tree = WatchDocTree() class DummyDoc(object): @@ -659,42 +634,41 @@ class DummyDoc(object): updated_doc_v1 = DummyDoc() updated_doc_v1.update_time = 1 - updated_doc_v1._document_path = '/updated' + updated_doc_v1._document_path = "/updated" updated_doc_v2 = DummyDoc() updated_doc_v2.update_time = 1 - updated_doc_v2._document_path = '/updated' - doc_tree = doc_tree.insert('/updated', updated_doc_v1) - doc_map = {'/updated': updated_doc_v1} - updated_snapshot = DummyDocumentSnapshot(updated_doc_v2, None, True, - None, None, 1) + updated_doc_v2._document_path = "/updated" + doc_tree = doc_tree.insert("/updated", updated_doc_v1) + doc_map = {"/updated": updated_doc_v1} + updated_snapshot = DummyDocumentSnapshot( + updated_doc_v2, None, True, None, None, 1 + ) delete_changes = [] add_changes = [] update_changes = [updated_snapshot] inst = self._makeOne() updated_tree, updated_map, applied_changes = inst._compute_snapshot( - doc_tree, - doc_map, - delete_changes, - add_changes, - update_changes - ) + doc_tree, doc_map, delete_changes, add_changes, update_changes + ) self.assertEqual(updated_map, doc_map) # no change def test__reset_docs(self): from google.cloud.firestore_v1beta1.watch import ChangeType + inst = self._makeOne() inst.change_map = {None: None} from google.cloud.firestore_v1beta1.watch import WatchDocTree + doc = DummyDocumentReference() - doc._document_path = '/doc' + doc._document_path = "/doc" doc_tree = WatchDocTree() - doc_tree = doc_tree.insert('/doc', doc) - doc_tree = doc_tree.insert('/doc', doc) + doc_tree = doc_tree.insert("/doc", doc) + doc_tree = doc_tree.insert("/doc", doc) snapshot = DummyDocumentSnapshot(doc, None, True, None, None, None) snapshot.reference = doc inst.doc_tree = doc_tree inst._reset_docs() - self.assertEqual(inst.change_map, {'/doc': ChangeType.REMOVED}) + self.assertEqual(inst.change_map, {"/doc": ChangeType.REMOVED}) self.assertEqual(inst.resume_token, None) self.assertFalse(inst.current) @@ -706,48 +680,46 @@ def Listen(self): # pragma: NO COVER class DummyFirestoreClient(object): def __init__(self): - self.transport = mock.Mock( - _stubs={'firestore_stub': DummyFirestoreStub()} - ) + self.transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()}) class DummyDocumentReference(object): def __init__(self, *document_path, **kw): - if 'client' not in kw: + if "client" not in kw: self._client = DummyFirestore() else: - self._client = kw['client'] + self._client = kw["client"] self._path = document_path self.__dict__.update(kw) - _document_path = '/' + _document_path = "/" class DummyQuery(object): # pragma: NO COVER def __init__(self, **kw): - if 'client' not in kw: + if "client" not in kw: self._client = DummyFirestore() else: - self._client = kw['client'] + self._client = kw["client"] - if 'comparator' not in kw: + if "comparator" not in kw: # don't really do the comparison, just return 0 (equal) for all self._comparator = lambda x, y: 1 else: - self._comparator = kw['comparator'] + self._comparator = kw["comparator"] def _to_protobuf(self): - return '' + return "" class DummyFirestore(object): _firestore_api = DummyFirestoreClient() - _database_string = 'abc://bar/' + _database_string = "abc://bar/" def document(self, *document_path): # pragma: NO COVER if len(document_path) == 1: - path = document_path[0].split('/') + path = document_path[0].split("/") else: path = document_path @@ -757,8 +729,7 @@ def document(self, *document_path): # pragma: NO COVER class DummyDocumentSnapshot(object): # def __init__(self, **kw): # self.__dict__.update(kw) - def __init__(self, reference, data, exists, - read_time, create_time, update_time): + def __init__(self, reference, data, exists, read_time, create_time, update_time): self.reference = reference self.data = data self.exists = exists @@ -823,7 +794,7 @@ def close(self): class DummyCause(object): code = 1 - message = 'hi' + message = "hi" class DummyChange(object): @@ -845,7 +816,7 @@ def __init__(self): class DummyTarget(object): def QueryTarget(self, **kw): self.kw = kw - return 'dummy query target' + return "dummy query target" class DummyPb2(object): diff --git a/storage/docs/conf.py b/storage/docs/conf.py index 907aba8007a5..71abc5b3dfe3 100644 --- a/storage/docs/conf.py +++ b/storage/docs/conf.py @@ -18,57 +18,55 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -__version__ = '0.90.4' +__version__ = "0.90.4" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", ] # autodoc/autosummary flags -autoclass_content = 'both' -autodoc_default_flags = ['members'] +autoclass_content = "both" +autodoc_default_flags = ["members"] autosummary_generate = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # Allow markdown includes (so releases.md can include CHANGLEOG.md) # http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = { - '.md': 'recommonmark.parser.CommonMarkParser', -} +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud-storage' -copyright = u'2017, Google' -author = u'Google APIs' +project = u"google-cloud-storage" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -77,7 +75,7 @@ # The full version, including alpha/beta/rc tags. release = __version__ # The short X.Y version. -version = '.'.join(release.split('.')[0:2]) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -88,37 +86,37 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -127,31 +125,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -161,78 +159,75 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-storage-doc' +htmlhelp_basename = "google-cloud-storage-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. #'preamble': '', - # Latex figure (float) alignment #'figure_align': 'htbp', } @@ -241,39 +236,51 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'google-cloud-storage.tex', - u'google-cloud-storage Documentation', author, 'manual'), + ( + master_doc, + "google-cloud-storage.tex", + u"google-cloud-storage Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'google-cloud-storage', - u'google-cloud-storage Documentation', [author], 1)] +man_pages = [ + ( + master_doc, + "google-cloud-storage", + u"google-cloud-storage Documentation", + [author], + 1, + ) +] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -281,27 +288,33 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'google-cloud-storage', - u'google-cloud-storage Documentation', author, 'google-cloud-storage', - 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), + ( + master_doc, + "google-cloud-storage", + u"google-cloud-storage Documentation", + author, + "google-cloud-storage", + "GAPIC library for the {metadata.shortName} v1 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('http://python.readthedocs.org/en/latest/', None), - 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), } # Napoleon settings diff --git a/storage/docs/snippets.py b/storage/docs/snippets.py index 2f2934d060fc..ed16c5279e30 100644 --- a/storage/docs/snippets.py +++ b/storage/docs/snippets.py @@ -36,13 +36,13 @@ def snippet(func): def storage_get_started(client, to_delete): # [START storage_get_started] client = storage.Client() - bucket = client.get_bucket('bucket-id-here') + bucket = client.get_bucket("bucket-id-here") # Then do other things... - blob = bucket.get_blob('/remote/path/to/file.txt') - assert blob.download_as_string() == 'My old contents!' - blob.upload_from_string('New contents!') - blob2 = bucket.blob('/remote/path/storage.txt') - blob2.upload_from_filename(filename='/local/path.txt') + blob = bucket.get_blob("/remote/path/to/file.txt") + assert blob.download_as_string() == "My old contents!" + blob.upload_from_string("New contents!") + blob2 = bucket.blob("/remote/path/storage.txt") + blob2.upload_from_filename(filename="/local/path.txt") # [END storage_get_started] to_delete.append(bucket) @@ -50,7 +50,7 @@ def storage_get_started(client, to_delete): @snippet def client_bucket_acl(client, to_delete): - bucket_name = 'system-test-bucket' + bucket_name = "system-test-bucket" bucket = client.bucket(bucket_name) bucket.create() @@ -62,7 +62,7 @@ def client_bucket_acl(client, to_delete): to_delete.append(bucket) # [START acl_user_settings] - acl.user('me@example.org').grant_read() + acl.user("me@example.org").grant_read() acl.all_authenticated().grant_write() # [END acl_user_settings] @@ -90,12 +90,12 @@ def download_to_file(client, to_delete): # [START download_to_file] from google.cloud.storage import Blob - client = storage.Client(project='my-project') - bucket = client.get_bucket('my-bucket') - encryption_key = 'c7f32af42e45e85b9848a6a14dd2a8f6' - blob = Blob('secure-data', bucket, encryption_key=encryption_key) - blob.upload_from_string('my secret message.') - with open('/tmp/my-secure-file', 'wb') as file_obj: + client = storage.Client(project="my-project") + bucket = client.get_bucket("my-bucket") + encryption_key = "c7f32af42e45e85b9848a6a14dd2a8f6" + blob = Blob("secure-data", bucket, encryption_key=encryption_key) + blob.upload_from_string("my secret message.") + with open("/tmp/my-secure-file", "wb") as file_obj: blob.download_to_file(file_obj) # [END download_to_file] @@ -107,11 +107,11 @@ def upload_from_file(client, to_delete): # [START upload_from_file] from google.cloud.storage import Blob - client = storage.Client(project='my-project') - bucket = client.get_bucket('my-bucket') - encryption_key = 'aa426195405adee2c8081bb9e7e74b19' - blob = Blob('secure-data', bucket, encryption_key=encryption_key) - with open('my-file', 'rb') as my_file: + client = storage.Client(project="my-project") + bucket = client.get_bucket("my-bucket") + encryption_key = "aa426195405adee2c8081bb9e7e74b19" + blob = Blob("secure-data", bucket, encryption_key=encryption_key) + with open("my-file", "rb") as my_file: blob.upload_from_file(my_file) # [END upload_from_file] @@ -121,12 +121,13 @@ def upload_from_file(client, to_delete): @snippet def get_blob(client, to_delete): from google.cloud.storage.blob import Blob + # [START get_blob] client = storage.Client() - bucket = client.get_bucket('my-bucket') - assert isinstance(bucket.get_blob('/path/to/blob.txt'), Blob) + bucket = client.get_bucket("my-bucket") + assert isinstance(bucket.get_blob("/path/to/blob.txt"), Blob) # - assert not bucket.get_blob('/does-not-exist.txt') + assert not bucket.get_blob("/does-not-exist.txt") # None # [END get_blob] @@ -137,14 +138,15 @@ def get_blob(client, to_delete): def delete_blob(client, to_delete): # [START delete_blob] from google.cloud.exceptions import NotFound + client = storage.Client() - bucket = client.get_bucket('my-bucket') + bucket = client.get_bucket("my-bucket") blobs = list(bucket.list_blobs()) assert len(blobs) > 0 # [] - bucket.delete_blob('my-file.txt') + bucket.delete_blob("my-file.txt") try: - bucket.delete_blob('doesnt-exist') + bucket.delete_blob("doesnt-exist") except NotFound: pass # [END delete_blob] @@ -159,11 +161,11 @@ def delete_blob(client, to_delete): @snippet def configure_website(client, to_delete): - bucket_name = 'test-bucket' + bucket_name = "test-bucket" # [START configure_website] client = storage.Client() bucket = client.get_bucket(bucket_name) - bucket.configure_website('index.html', '404.html') + bucket.configure_website("index.html", "404.html") # [END configure_website] # [START make_public] @@ -176,11 +178,12 @@ def configure_website(client, to_delete): @snippet def get_bucket(client, to_delete): import google + # [START get_bucket] try: - bucket = client.get_bucket('my-bucket') + bucket = client.get_bucket("my-bucket") except google.cloud.exceptions.NotFound: - print('Sorry, that bucket does not exist!') + print("Sorry, that bucket does not exist!") # [END get_bucket] to_delete.append(bucket) @@ -188,7 +191,7 @@ def get_bucket(client, to_delete): @snippet def add_lifecycle_delete_rule(client, to_delete): # [START add_lifecycle_delete_rule] - bucket = client.get_bucket('my-bucket') + bucket = client.get_bucket("my-bucket") bucket.add_lifecycle_rule_delete(age=2) bucket.patch() # [END add_lifecycle_delete_rule] @@ -198,9 +201,10 @@ def add_lifecycle_delete_rule(client, to_delete): @snippet def add_lifecycle_set_storage_class_rule(client, to_delete): # [START add_lifecycle_set_storage_class_rule] - bucket = client.get_bucket('my-bucket') + bucket = client.get_bucket("my-bucket") bucket.add_lifecycle_rule_set_storage_class( - 'COLD_LINE', matches_storage_class=['NEARLINE']) + "COLD_LINE", matches_storage_class=["NEARLINE"] + ) bucket.patch() # [END add_lifecycle_set_storage_class_rule] to_delete.append(bucket) @@ -209,11 +213,12 @@ def add_lifecycle_set_storage_class_rule(client, to_delete): @snippet def lookup_bucket(client, to_delete): from google.cloud.storage.bucket import Bucket + # [START lookup_bucket] - bucket = client.lookup_bucket('doesnt-exist') + bucket = client.lookup_bucket("doesnt-exist") assert not bucket # None - bucket = client.lookup_bucket('my-bucket') + bucket = client.lookup_bucket("my-bucket") assert isinstance(bucket, Bucket) # # [END lookup_bucket] @@ -224,8 +229,9 @@ def lookup_bucket(client, to_delete): @snippet def create_bucket(client, to_delete): from google.cloud.storage import Bucket + # [START create_bucket] - bucket = client.create_bucket('my-bucket') + bucket = client.create_bucket("my-bucket") assert isinstance(bucket, Bucket) # # [END create_bucket] @@ -248,17 +254,16 @@ def list_buckets(client, to_delete): def policy_document(client, to_delete): # pylint: disable=unused-argument # [START policy_document] - bucket = client.bucket('my-bucket') - conditions = [ - ['starts-with', '$key', ''], - {'acl': 'public-read'}] + bucket = client.bucket("my-bucket") + conditions = [["starts-with", "$key", ""], {"acl": "public-read"}] policy = bucket.generate_upload_policy(conditions) # Generate an upload form using the form fields. - policy_fields = ''.join( + policy_fields = "".join( ''.format( - key=key, value=value) + key=key, value=value + ) for key, value in policy.items() ) @@ -270,21 +275,21 @@ def policy_document(client, to_delete): '' '' '' - '{policy_fields}' - '').format(bucket_name=bucket.name, policy_fields=policy_fields) + "{policy_fields}" + "" + ).format(bucket_name=bucket.name, policy_fields=policy_fields) print(upload_form) # [END policy_document] def _line_no(func): - code = getattr(func, '__code__', None) or getattr(func, 'func_code') + code = getattr(func, "__code__", None) or getattr(func, "func_code") return code.co_firstlineno def _find_examples(): - funcs = [obj for obj in globals().values() - if getattr(obj, '_snippet', False)] + funcs = [obj for obj in globals().values() if getattr(obj, "_snippet", False)] for func in sorted(funcs, key=_line_no): yield func @@ -297,16 +302,16 @@ def main(): client = storage.Client() for example in _find_examples(): to_delete = [] - print('%-25s: %s' % _name_and_doc(example)) + print("%-25s: %s" % _name_and_doc(example)) try: example(client, to_delete) except AssertionError as failure: - print(' FAIL: %s' % (failure,)) + print(" FAIL: %s" % (failure,)) except Exception as error: # pylint: disable=broad-except - print(' ERROR: %r' % (error,)) + print(" ERROR: %r" % (error,)) for item in to_delete: item.delete() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/storage/google/__init__.py b/storage/google/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/storage/google/__init__.py +++ b/storage/google/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/storage/google/cloud/__init__.py b/storage/google/cloud/__init__.py index 9ee9bf4342ab..0e1bc5131ba6 100644 --- a/storage/google/cloud/__init__.py +++ b/storage/google/cloud/__init__.py @@ -14,7 +14,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/storage/google/cloud/storage/__init__.py b/storage/google/cloud/storage/__init__.py index db69236b416e..2b643fc80add 100644 --- a/storage/google/cloud/storage/__init__.py +++ b/storage/google/cloud/storage/__init__.py @@ -32,7 +32,8 @@ from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-storage').version + +__version__ = get_distribution("google-cloud-storage").version from google.cloud.storage.batch import Batch from google.cloud.storage.blob import Blob @@ -40,4 +41,4 @@ from google.cloud.storage.client import Client -__all__ = ['__version__', 'Batch', 'Blob', 'Bucket', 'Client'] +__all__ = ["__version__", "Batch", "Blob", "Bucket", "Client"] diff --git a/storage/google/cloud/storage/_helpers.py b/storage/google/cloud/storage/_helpers.py index 88740ef6cd19..11f2ad556ef1 100644 --- a/storage/google/cloud/storage/_helpers.py +++ b/storage/google/cloud/storage/_helpers.py @@ -35,8 +35,7 @@ def _validate_name(name): # The first and las characters must be alphanumeric. if not all([name[0].isalnum(), name[-1].isalnum()]): - raise ValueError( - 'Bucket names must start and end with a number or letter.') + raise ValueError("Bucket names must start and end with a number or letter.") return name @@ -100,12 +99,12 @@ def reload(self, client=None): client = self._require_client(client) # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. - query_params = {'projection': 'noAcl'} + query_params = {"projection": "noAcl"} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project api_response = client._connection.api_request( - method='GET', path=self.path, query_params=query_params, - _target_object=self) + method="GET", path=self.path, query_params=query_params, _target_object=self + ) self._set_properties(api_response) def _patch_property(self, name, value): @@ -151,16 +150,19 @@ def patch(self, client=None): client = self._require_client(client) # Pass '?projection=full' here because 'PATCH' documented not # to work properly w/ 'noAcl'. - query_params = {'projection': 'full'} + query_params = {"projection": "full"} if self.user_project is not None: - query_params['userProject'] = self.user_project - update_properties = {key: self._properties[key] - for key in self._changes} + query_params["userProject"] = self.user_project + update_properties = {key: self._properties[key] for key in self._changes} # Make the API call. api_response = client._connection.api_request( - method='PATCH', path=self.path, data=update_properties, - query_params=query_params, _target_object=self) + method="PATCH", + path=self.path, + data=update_properties, + query_params=query_params, + _target_object=self, + ) self._set_properties(api_response) def update(self, client=None): @@ -176,18 +178,23 @@ def update(self, client=None): ``client`` stored on the current object. """ client = self._require_client(client) - query_params = {'projection': 'full'} + query_params = {"projection": "full"} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project api_response = client._connection.api_request( - method='PUT', path=self.path, data=self._properties, - query_params=query_params, _target_object=self) + method="PUT", + path=self.path, + data=self._properties, + query_params=query_params, + _target_object=self, + ) self._set_properties(api_response) def _scalar_property(fieldname): """Create a property descriptor around the :class:`_PropertyMixin` helpers. """ + def _getter(self): """Scalar property getter.""" return self._properties.get(fieldname) diff --git a/storage/google/cloud/storage/_http.py b/storage/google/cloud/storage/_http.py index 255f0601395d..9d05a8eb360c 100644 --- a/storage/google/cloud/storage/_http.py +++ b/storage/google/cloud/storage/_http.py @@ -32,12 +32,10 @@ class Connection(_http.JSONConnection): API_BASE_URL = _http.API_BASE_URL """The base of the API call URL.""" - API_VERSION = 'v1' + API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" - API_URL_TEMPLATE = '{api_base_url}/storage/{api_version}{path}' + API_URL_TEMPLATE = "{api_base_url}/storage/{api_version}{path}" """A template for the URL of a particular API call.""" - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } + _EXTRA_HEADERS = {_http.CLIENT_INFO_HEADER: _CLIENT_INFO} diff --git a/storage/google/cloud/storage/_signing.py b/storage/google/cloud/storage/_signing.py index ce5dcee1799e..6f0bf4d5f39f 100644 --- a/storage/google/cloud/storage/_signing.py +++ b/storage/google/cloud/storage/_signing.py @@ -33,13 +33,17 @@ def ensure_signed_credentials(credentials): for signing text. """ if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' - 'core/auth.html?highlight=authentication#setting-up-' - 'a-service-account') - raise AttributeError('you need a private key to sign credentials.' - 'the credentials you are currently using %s ' - 'just contains a token. see %s for more ' - 'details.' % (type(credentials), auth_uri)) + auth_uri = ( + "https://google-cloud-python.readthedocs.io/en/latest/" + "core/auth.html?highlight=authentication#setting-up-" + "a-service-account" + ) + raise AttributeError( + "you need a private key to sign credentials." + "the credentials you are currently using %s " + "just contains a token. see %s for more " + "details." % (type(credentials), auth_uri) + ) def get_signed_query_params(credentials, expiration, string_to_sign): @@ -66,9 +70,9 @@ def get_signed_query_params(credentials, expiration, string_to_sign): signature = base64.b64encode(signature_bytes) service_account_name = credentials.signer_email return { - 'GoogleAccessId': service_account_name, - 'Expires': str(expiration), - 'Signature': signature, + "GoogleAccessId": service_account_name, + "Expires": str(expiration), + "Signature": signature, } @@ -91,19 +95,28 @@ def get_expiration_seconds(expiration): # If it's a datetime, convert to a timestamp. if isinstance(expiration, datetime.datetime): micros = _helpers._microseconds_from_datetime(expiration) - expiration = micros // 10**6 + expiration = micros // 10 ** 6 if not isinstance(expiration, six.integer_types): - raise TypeError('Expected an integer timestamp, datetime, or ' - 'timedelta. Got %s' % type(expiration)) + raise TypeError( + "Expected an integer timestamp, datetime, or " + "timedelta. Got %s" % type(expiration) + ) return expiration -def generate_signed_url(credentials, resource, expiration, - api_access_endpoint='', - method='GET', content_md5=None, - content_type=None, response_type=None, - response_disposition=None, generation=None): +def generate_signed_url( + credentials, + resource, + expiration, + api_access_endpoint="", + method="GET", + content_md5=None, + content_type=None, + response_type=None, + response_disposition=None, + generation=None, +): """Generate signed URL to provide query-string auth'n to a resource. .. note:: @@ -177,34 +190,36 @@ def generate_signed_url(credentials, resource, expiration, """ expiration = get_expiration_seconds(expiration) - if method == 'RESUMABLE': - method = 'POST' - canonicalized_resource = \ - 'x-goog-resumable:start\n{0}'.format(resource) + if method == "RESUMABLE": + method = "POST" + canonicalized_resource = "x-goog-resumable:start\n{0}".format(resource) else: - canonicalized_resource = '{0}'.format(resource) + canonicalized_resource = "{0}".format(resource) # Generate the string to sign. - string_to_sign = '\n'.join([ - method, - content_md5 or '', - content_type or '', - str(expiration), - canonicalized_resource, - ]) + string_to_sign = "\n".join( + [ + method, + content_md5 or "", + content_type or "", + str(expiration), + canonicalized_resource, + ] + ) # Set the right query parameters. - query_params = get_signed_query_params( - credentials, expiration, string_to_sign) + query_params = get_signed_query_params(credentials, expiration, string_to_sign) if response_type is not None: - query_params['response-content-type'] = response_type + query_params["response-content-type"] = response_type if response_disposition is not None: - query_params['response-content-disposition'] = response_disposition + query_params["response-content-disposition"] = response_disposition if generation is not None: - query_params['generation'] = generation + query_params["generation"] = generation # Return the built URL. - return '{endpoint}{resource}?{querystring}'.format( - endpoint=api_access_endpoint, resource=resource, - querystring=six.moves.urllib.parse.urlencode(query_params)) + return "{endpoint}{resource}?{querystring}".format( + endpoint=api_access_endpoint, + resource=resource, + querystring=six.moves.urllib.parse.urlencode(query_params), + ) diff --git a/storage/google/cloud/storage/acl.py b/storage/google/cloud/storage/acl.py index 227086eb2fce..9b1af1d87f2f 100644 --- a/storage/google/cloud/storage/acl.py +++ b/storage/google/cloud/storage/acl.py @@ -94,9 +94,9 @@ class _ACLEntity(object): entity types (like 'allUsers') this is optional. """ - READER_ROLE = 'READER' - WRITER_ROLE = 'WRITER' - OWNER_ROLE = 'OWNER' + READER_ROLE = "READER" + WRITER_ROLE = "WRITER" + OWNER_ROLE = "OWNER" def __init__(self, entity_type, identifier=None): self.identifier = identifier @@ -107,11 +107,12 @@ def __str__(self): if not self.identifier: return str(self.type) else: - return '{acl.type}-{acl.identifier}'.format(acl=self) + return "{acl.type}-{acl.identifier}".format(acl=self) def __repr__(self): - return ''.format( - acl=self, roles=', '.join(self.roles)) + return "".format( + acl=self, roles=", ".join(self.roles) + ) def get_roles(self): """Get the list of roles permitted by this entity. @@ -166,28 +167,30 @@ def revoke_owner(self): class ACL(object): """Container class representing a list of access controls.""" - _URL_PATH_ELEM = 'acl' - _PREDEFINED_QUERY_PARAM = 'predefinedAcl' + _URL_PATH_ELEM = "acl" + _PREDEFINED_QUERY_PARAM = "predefinedAcl" PREDEFINED_XML_ACLS = { # XML API name -> JSON API name - 'project-private': 'projectPrivate', - 'public-read': 'publicRead', - 'public-read-write': 'publicReadWrite', - 'authenticated-read': 'authenticatedRead', - 'bucket-owner-read': 'bucketOwnerRead', - 'bucket-owner-full-control': 'bucketOwnerFullControl', + "project-private": "projectPrivate", + "public-read": "publicRead", + "public-read-write": "publicReadWrite", + "authenticated-read": "authenticatedRead", + "bucket-owner-read": "bucketOwnerRead", + "bucket-owner-full-control": "bucketOwnerFullControl", } - PREDEFINED_JSON_ACLS = frozenset([ - 'private', - 'projectPrivate', - 'publicRead', - 'publicReadWrite', - 'authenticatedRead', - 'bucketOwnerRead', - 'bucketOwnerFullControl', - ]) + PREDEFINED_JSON_ACLS = frozenset( + [ + "private", + "projectPrivate", + "publicRead", + "publicReadWrite", + "authenticatedRead", + "bucketOwnerRead", + "bucketOwnerFullControl", + ] + ) """See https://cloud.google.com/storage/docs/access-control/lists#predefined-acl """ @@ -236,7 +239,7 @@ def __iter__(self): for entity in self.entities.values(): for role in entity.get_roles(): if role: - yield {'entity': str(entity), 'role': role} + yield {"entity": str(entity), "role": role} def entity_from_dict(self, entity_dict): """Build an _ACLEntity object from a dictionary of data. @@ -251,22 +254,21 @@ def entity_from_dict(self, entity_dict): :rtype: :class:`_ACLEntity` :returns: An Entity constructed from the dictionary. """ - entity = entity_dict['entity'] - role = entity_dict['role'] + entity = entity_dict["entity"] + role = entity_dict["role"] - if entity == 'allUsers': + if entity == "allUsers": entity = self.all() - elif entity == 'allAuthenticatedUsers': + elif entity == "allAuthenticatedUsers": entity = self.all_authenticated() - elif '-' in entity: - entity_type, identifier = entity.split('-', 1) - entity = self.entity(entity_type=entity_type, - identifier=identifier) + elif "-" in entity: + entity_type, identifier = entity.split("-", 1) + entity = self.entity(entity_type=entity_type, identifier=identifier) if not isinstance(entity, _ACLEntity): - raise ValueError('Invalid dictionary: %s' % entity_dict) + raise ValueError("Invalid dictionary: %s" % entity_dict) entity.grant(role) return entity @@ -344,7 +346,7 @@ def user(self, identifier): :rtype: :class:`_ACLEntity` :returns: An Entity corresponding to this user. """ - return self.entity('user', identifier=identifier) + return self.entity("user", identifier=identifier) def group(self, identifier): """Factory method for a group Entity. @@ -355,7 +357,7 @@ def group(self, identifier): :rtype: :class:`_ACLEntity` :returns: An Entity corresponding to this group. """ - return self.entity('group', identifier=identifier) + return self.entity("group", identifier=identifier) def domain(self, domain): """Factory method for a domain Entity. @@ -366,7 +368,7 @@ def domain(self, domain): :rtype: :class:`_ACLEntity` :returns: An entity corresponding to this domain. """ - return self.entity('domain', identifier=domain) + return self.entity("domain", identifier=domain) def all(self): """Factory method for an Entity representing all users. @@ -374,7 +376,7 @@ def all(self): :rtype: :class:`_ACLEntity` :returns: An entity representing all users. """ - return self.entity('allUsers') + return self.entity("allUsers") def all_authenticated(self): """Factory method for an Entity representing all authenticated users. @@ -382,7 +384,7 @@ def all_authenticated(self): :rtype: :class:`_ACLEntity` :returns: An entity representing all authenticated users. """ - return self.entity('allAuthenticatedUsers') + return self.entity("allAuthenticatedUsers") def get_entities(self): """Get a list of all Entity objects. @@ -428,17 +430,15 @@ def reload(self, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project self.entities.clear() found = client._connection.api_request( - method='GET', - path=path, - query_params=query_params, + method="GET", path=path, query_params=query_params ) self.loaded = True - for entry in found.get('items', ()): + for entry in found.get("items", ()): self.add_entity(self.entity_from_dict(entry)) def _save(self, acl, predefined, client): @@ -458,22 +458,23 @@ def _save(self, acl, predefined, client): :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. """ - query_params = {'projection': 'full'} + query_params = {"projection": "full"} if predefined is not None: acl = [] query_params[self._PREDEFINED_QUERY_PARAM] = predefined if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project path = self.save_path client = self._require_client(client) result = client._connection.api_request( - method='PATCH', + method="PATCH", path=path, data={self._URL_PATH_ELEM: list(acl)}, - query_params=query_params) + query_params=query_params, + ) self.entities.clear() for entry in result.get(self._URL_PATH_ELEM, ()): self.add_entity(self.entity_from_dict(entry)) @@ -559,7 +560,7 @@ def client(self): @property def reload_path(self): """Compute the path for GET API requests for this ACL.""" - return '%s/%s' % (self.bucket.path, self._URL_PATH_ELEM) + return "%s/%s" % (self.bucket.path, self._URL_PATH_ELEM) @property def save_path(self): @@ -575,8 +576,8 @@ def user_project(self): class DefaultObjectACL(BucketACL): """A class representing the default object ACL for a bucket.""" - _URL_PATH_ELEM = 'defaultObjectAcl' - _PREDEFINED_QUERY_PARAM = 'predefinedDefaultObjectAcl' + _URL_PATH_ELEM = "defaultObjectAcl" + _PREDEFINED_QUERY_PARAM = "predefinedDefaultObjectAcl" class ObjectACL(ACL): @@ -598,7 +599,7 @@ def client(self): @property def reload_path(self): """Compute the path for GET API requests for this ACL.""" - return '%s/acl' % self.blob.path + return "%s/acl" % self.blob.path @property def save_path(self): diff --git a/storage/google/cloud/storage/batch.py b/storage/google/cloud/storage/batch.py index 956c912d9221..772531222e24 100644 --- a/storage/google/cloud/storage/batch.py +++ b/storage/google/cloud/storage/batch.py @@ -49,26 +49,28 @@ class MIMEApplicationHTTP(MIMEApplication): :param body: (Optional) HTTP payload """ + def __init__(self, method, uri, headers, body): if isinstance(body, dict): body = json.dumps(body) - headers['Content-Type'] = 'application/json' - headers['Content-Length'] = len(body) + headers["Content-Type"] = "application/json" + headers["Content-Length"] = len(body) if body is None: - body = '' - lines = ['%s %s HTTP/1.1' % (method, uri)] - lines.extend(['%s: %s' % (key, value) - for key, value in sorted(headers.items())]) - lines.append('') + body = "" + lines = ["%s %s HTTP/1.1" % (method, uri)] + lines.extend( + ["%s: %s" % (key, value) for key, value in sorted(headers.items())] + ) + lines.append("") lines.append(body) - payload = '\r\n'.join(lines) + payload = "\r\n".join(lines) if six.PY2: # email.message.Message is an old-style class, so we # cannot use 'super()'. - MIMEApplication.__init__(self, payload, 'http', encode_noop) + MIMEApplication.__init__(self, payload, "http", encode_noop) else: # pragma: NO COVER Python3 super_init = super(MIMEApplicationHTTP, self).__init__ - super_init(payload, 'http', encode_noop) + super_init(payload, "http", encode_noop) class _FutureDict(object): @@ -90,8 +92,7 @@ def get(key, default=None): :raises: :class:`KeyError` always since the future is intended to fail as a dictionary. """ - raise KeyError('Cannot get(%r, default=%r) on a future' % ( - key, default)) + raise KeyError("Cannot get(%r, default=%r) on a future" % (key, default)) def __getitem__(self, key): """Stand-in for dict[key]. @@ -102,7 +103,7 @@ def __getitem__(self, key): :raises: :class:`KeyError` always since the future is intended to fail as a dictionary. """ - raise KeyError('Cannot get item %r from a future' % (key,)) + raise KeyError("Cannot get item %r from a future" % (key,)) def __setitem__(self, key, value): """Stand-in for dict[key] = value. @@ -116,11 +117,12 @@ def __setitem__(self, key, value): :raises: :class:`KeyError` always since the future is intended to fail as a dictionary. """ - raise KeyError('Cannot set %r -> %r on a future' % (key, value)) + raise KeyError("Cannot set %r -> %r on a future" % (key, value)) class _FutureResponse(requests.Response): """Reponse that returns a placeholder dictionary for a batched requests.""" + def __init__(self, future_dict): super(_FutureResponse, self).__init__() self._future_dict = future_dict @@ -140,6 +142,7 @@ class Batch(Connection): :type client: :class:`google.cloud.storage.client.Client` :param client: The client to use for making connections. """ + _MAX_BATCH_SIZE = 1000 def __init__(self, client): @@ -175,8 +178,9 @@ def _do_request(self, method, url, headers, data, target_object): :returns: The HTTP response object and the content of the response. """ if len(self._requests) >= self._MAX_BATCH_SIZE: - raise ValueError("Too many deferred requests (max %d)" % - self._MAX_BATCH_SIZE) + raise ValueError( + "Too many deferred requests (max %d)" % self._MAX_BATCH_SIZE + ) self._requests.append((method, url, headers, data)) result = _FutureDict() self._target_objects.append(target_object) @@ -210,7 +214,7 @@ def _prepare_batch_request(self): payload = buf.getvalue() # Strip off redundant header text - _, body = payload.split('\n\n', 1) + _, body = payload.split("\n\n", 1) return dict(multi._headers), body def _finish_futures(self, responses): @@ -227,10 +231,9 @@ def _finish_futures(self, responses): exception_args = None if len(self._target_objects) != len(responses): - raise ValueError('Expected a response for every request.') + raise ValueError("Expected a response for every request.") - for target_object, subresponse in zip( - self._target_objects, responses): + for target_object, subresponse in zip(self._target_objects, responses): if not 200 <= subresponse.status_code < 300: exception_args = exception_args or subresponse elif target_object is not None: @@ -250,13 +253,14 @@ def finish(self): """ headers, body = self._prepare_batch_request() - url = '%s/batch/storage/v1' % self.API_BASE_URL + url = "%s/batch/storage/v1" % self.API_BASE_URL # Use the private ``_base_connection`` rather than the property # ``_connection``, since the property may be this # current batch. response = self._client._base_connection._make_request( - 'POST', url, data=body, headers=headers) + "POST", url, data=body, headers=headers + ) responses = list(_unpack_batch_response(response)) self._finish_futures(responses) return responses @@ -285,20 +289,16 @@ def _generate_faux_mime_message(parser, response): # We coerce to bytes to get consistent concat across # Py2 and Py3. Percent formatting is insufficient since # it includes the b in Py3. - content_type = _helpers._to_bytes( - response.headers.get('content-type', '')) + content_type = _helpers._to_bytes(response.headers.get("content-type", "")) - faux_message = b''.join([ - b'Content-Type: ', - content_type, - b'\nMIME-Version: 1.0\n\n', - response.content, - ]) + faux_message = b"".join( + [b"Content-Type: ", content_type, b"\nMIME-Version: 1.0\n\n", response.content] + ) if six.PY2: return parser.parsestr(faux_message) else: # pragma: NO COVER Python3 - return parser.parsestr(faux_message.decode('utf-8')) + return parser.parsestr(faux_message.decode("utf-8")) def _unpack_batch_response(response): @@ -314,22 +314,22 @@ def _unpack_batch_response(response): message = _generate_faux_mime_message(parser, response) if not isinstance(message._payload, list): - raise ValueError('Bad response: not multi-part') + raise ValueError("Bad response: not multi-part") for subrequest in message._payload: - status_line, rest = subrequest._payload.split('\n', 1) - _, status, _ = status_line.split(' ', 2) + status_line, rest = subrequest._payload.split("\n", 1) + _, status, _ = status_line.split(" ", 2) sub_message = parser.parsestr(rest) payload = sub_message._payload msg_headers = dict(sub_message._headers) - content_id = msg_headers.get('Content-ID') + content_id = msg_headers.get("Content-ID") subresponse = requests.Response() subresponse.request = requests.Request( - method='BATCH', - url='contentid://{}'.format(content_id)).prepare() + method="BATCH", url="contentid://{}".format(content_id) + ).prepare() subresponse.status_code = int(status) subresponse.headers.update(msg_headers) - subresponse._content = payload.encode('utf-8') + subresponse._content = payload.encode("utf-8") yield subresponse diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 1fa15dc7867c..d8d9a3bc95b9 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -59,40 +59,43 @@ from google.cloud.storage.acl import ObjectACL -_API_ACCESS_ENDPOINT = 'https://storage.googleapis.com' -_DEFAULT_CONTENT_TYPE = u'application/octet-stream' +_API_ACCESS_ENDPOINT = "https://storage.googleapis.com" +_DEFAULT_CONTENT_TYPE = u"application/octet-stream" _DOWNLOAD_URL_TEMPLATE = ( - u'https://www.googleapis.com/download/storage/v1{path}?alt=media') + u"https://www.googleapis.com/download/storage/v1{path}?alt=media" +) _BASE_UPLOAD_TEMPLATE = ( - u'https://www.googleapis.com/upload/storage/v1{bucket_path}/o?uploadType=') -_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'multipart' -_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'resumable' + u"https://www.googleapis.com/upload/storage/v1{bucket_path}/o?uploadType=" +) +_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" +_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable" # NOTE: "acl" is also writeable but we defer ACL management to # the classes in the google.cloud.storage.acl module. -_CONTENT_TYPE_FIELD = 'contentType' +_CONTENT_TYPE_FIELD = "contentType" _WRITABLE_FIELDS = ( - 'cacheControl', - 'contentDisposition', - 'contentEncoding', - 'contentLanguage', + "cacheControl", + "contentDisposition", + "contentEncoding", + "contentLanguage", _CONTENT_TYPE_FIELD, - 'crc32c', - 'md5Hash', - 'metadata', - 'name', - 'storageClass', + "crc32c", + "md5Hash", + "metadata", + "name", + "storageClass", ) _NUM_RETRIES_MESSAGE = ( - '`num_retries` has been deprecated and will be removed in a future ' - 'release. The default behavior (when `num_retries` is not specified) when ' - 'a transient error (e.g. 429 Too Many Requests or 500 Internal Server ' - 'Error) occurs will be as follows: upload requests will be automatically ' - 'retried. Subsequent retries will be sent after waiting 1, 2, 4, 8, etc. ' - 'seconds (exponential backoff) until 10 minutes of wait time have ' - 'elapsed. At that point, there will be no more attempts to retry.') + "`num_retries` has been deprecated and will be removed in a future " + "release. The default behavior (when `num_retries` is not specified) when " + "a transient error (e.g. 429 Too Many Requests or 500 Internal Server " + "Error) occurs will be as follows: upload requests will be automatically " + "retried. Subsequent retries will be sent after waiting 1, 2, 4, 8, etc. " + "seconds (exponential backoff) until 10 minutes of wait time have " + "elapsed. At that point, there will be no more attempts to retry." +) _READ_LESS_THAN_SIZE = ( - 'Size {:d} was specified but the file-like object only had ' - '{:d} bytes remaining.') + "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." +) _DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB _MAX_MULTIPART_SIZE = 8388608 # 8 MB @@ -133,11 +136,11 @@ class Blob(_PropertyMixin): """Number (256 KB, in bytes) that must divide the chunk size.""" _STORAGE_CLASSES = ( - 'NEARLINE', - 'MULTI_REGIONAL', - 'REGIONAL', - 'COLDLINE', - 'STANDARD', # alias for MULTI_REGIONAL/REGIONAL, based on location + "NEARLINE", + "MULTI_REGIONAL", + "REGIONAL", + "COLDLINE", + "STANDARD", # alias for MULTI_REGIONAL/REGIONAL, based on location ) """Allowed values for :attr:`storage_class`. @@ -155,8 +158,9 @@ class Blob(_PropertyMixin): set as their 'storage_class'. """ - def __init__(self, name, bucket, chunk_size=None, - encryption_key=None, kms_key_name=None): + def __init__( + self, name, bucket, chunk_size=None, encryption_key=None, kms_key_name=None + ): name = _bytes_to_unicode(name) super(Blob, self).__init__(name=name) @@ -165,13 +169,13 @@ def __init__(self, name, bucket, chunk_size=None, self._acl = ObjectACL(self) if encryption_key is not None and kms_key_name is not None: raise ValueError( - "Pass at most one of 'encryption_key' " - "and 'kms_key_name'") + "Pass at most one of 'encryption_key' " "and 'kms_key_name'" + ) self._encryption_key = encryption_key if kms_key_name is not None: - self._properties['kmsKeyName'] = kms_key_name + self._properties["kmsKeyName"] = kms_key_name @property def chunk_size(self): @@ -192,11 +196,10 @@ def chunk_size(self, value): :raises: :class:`ValueError` if ``value`` is not ``None`` and is not a multiple of 256 KB. """ - if value is not None and \ - value > 0 and \ - value % self._CHUNK_SIZE_MULTIPLE != 0: - raise ValueError('Chunk size must be a multiple of %d.' % ( - self._CHUNK_SIZE_MULTIPLE,)) + if value is not None and value > 0 and value % self._CHUNK_SIZE_MULTIPLE != 0: + raise ValueError( + "Chunk size must be a multiple of %d." % (self._CHUNK_SIZE_MULTIPLE,) + ) self._chunk_size = value @staticmethod @@ -212,7 +215,7 @@ def path_helper(bucket_path, blob_name): :rtype: str :returns: The relative URL path for ``blob_name``. """ - return bucket_path + '/o/' + _quote(blob_name) + return bucket_path + "/o/" + _quote(blob_name) @property def acl(self): @@ -225,7 +228,7 @@ def __repr__(self): else: bucket_name = None - return '' % (bucket_name, self.name) + return "" % (bucket_name, self.name) @property def path(self): @@ -235,7 +238,7 @@ def path(self): :returns: The URL path to this Blob. """ if not self.name: - raise ValueError('Cannot determine path without a blob name.') + raise ValueError("Cannot determine path without a blob name.") return self.path_helper(self.bucket.path, self.name) @@ -264,15 +267,23 @@ def public_url(self): :rtype: `string` :returns: The public URL for this blob. """ - return '{storage_base_url}/{bucket_name}/{quoted_name}'.format( + return "{storage_base_url}/{bucket_name}/{quoted_name}".format( storage_base_url=_API_ACCESS_ENDPOINT, bucket_name=self.bucket.name, - quoted_name=quote(self.name.encode('utf-8'))) - - def generate_signed_url(self, expiration, method='GET', - content_type=None, - generation=None, response_disposition=None, - response_type=None, client=None, credentials=None): + quoted_name=quote(self.name.encode("utf-8")), + ) + + def generate_signed_url( + self, + expiration, + method="GET", + content_type=None, + generation=None, + response_disposition=None, + response_type=None, + client=None, + credentials=None, + ): """Generates a signed URL for this blob. .. note:: @@ -337,9 +348,9 @@ def generate_signed_url(self, expiration, method='GET', :returns: A signed URL you can use to access the resource until expiration. """ - resource = '/{bucket_name}/{quoted_name}'.format( - bucket_name=self.bucket.name, - quoted_name=quote(self.name.encode('utf-8'))) + resource = "/{bucket_name}/{quoted_name}".format( + bucket_name=self.bucket.name, quoted_name=quote(self.name.encode("utf-8")) + ) if credentials is None: client = self._require_client(client) @@ -354,7 +365,8 @@ def generate_signed_url(self, expiration, method='GET', content_type=content_type, response_type=response_type, response_disposition=response_disposition, - generation=generation) + generation=generation, + ) def exists(self, client=None): """Determines whether or not this blob exists. @@ -373,17 +385,20 @@ def exists(self, client=None): client = self._require_client(client) # We only need the status code (200 or not) so we seek to # minimize the returned payload. - query_params = {'fields': 'name'} + query_params = {"fields": "name"} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. client._connection.api_request( - method='GET', path=self.path, - query_params=query_params, _target_object=None) + method="GET", + path=self.path, + query_params=query_params, + _target_object=None, + ) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. @@ -439,18 +454,18 @@ def _get_download_url(self): if self.media_link is None: base_url = _DOWNLOAD_URL_TEMPLATE.format(path=self.path) if self.generation is not None: - name_value_pairs.append( - ('generation', '{:d}'.format(self.generation))) + name_value_pairs.append(("generation", "{:d}".format(self.generation))) else: base_url = self.media_link if self.user_project is not None: - name_value_pairs.append(('userProject', self.user_project)) + name_value_pairs.append(("userProject", self.user_project)) return _add_query_parameters(base_url, name_value_pairs) - def _do_download(self, transport, file_obj, download_url, headers, - start=None, end=None): + def _do_download( + self, transport, file_obj, download_url, headers, start=None, end=None + ): """Perform a download without any error handling. This is intended to be called by :meth:`download_to_file` so it can @@ -478,13 +493,18 @@ def _do_download(self, transport, file_obj, download_url, headers, """ if self.chunk_size is None: download = Download( - download_url, stream=file_obj, headers=headers, - start=start, end=end) + download_url, stream=file_obj, headers=headers, start=start, end=end + ) download.consume(transport) else: download = ChunkedDownload( - download_url, self.chunk_size, file_obj, headers=headers, - start=start if start else 0, end=end) + download_url, + self.chunk_size, + file_obj, + headers=headers, + start=start if start else 0, + end=end, + ) while not download.finished: download.consume_next_chunk(transport) @@ -533,17 +553,15 @@ def download_to_file(self, file_obj, client=None, start=None, end=None): """ download_url = self._get_download_url() headers = _get_encryption_headers(self._encryption_key) - headers['accept-encoding'] = 'gzip' + headers["accept-encoding"] = "gzip" transport = self._get_transport(client) try: - self._do_download( - transport, file_obj, download_url, headers, start, end) + self._do_download(transport, file_obj, download_url, headers, start, end) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) - def download_to_filename(self, filename, client=None, - start=None, end=None): + def download_to_filename(self, filename, client=None, start=None, end=None): """Download the contents of this blob into a named file. If :attr:`user_project` is set on the bucket, bills the API request @@ -566,9 +584,8 @@ def download_to_filename(self, filename, client=None, :raises: :class:`google.cloud.exceptions.NotFound` """ try: - with open(filename, 'wb') as file_obj: - self.download_to_file( - file_obj, client=client, start=start, end=end) + with open(filename, "wb") as file_obj: + self.download_to_file(file_obj, client=client, start=start, end=end) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. os.remove(filename) @@ -601,8 +618,7 @@ def download_as_string(self, client=None, start=None, end=None): :raises: :class:`google.cloud.exceptions.NotFound` """ string_buffer = BytesIO() - self.download_to_file( - string_buffer, client=client, start=start, end=end) + self.download_to_file(string_buffer, client=client, start=start, end=end) return string_buffer.getvalue() def _get_content_type(self, content_type, filename=None): @@ -659,7 +675,7 @@ def _get_writable_metadata(self): managed directly through :class:`ObjectACL` methods. """ # NOTE: This assumes `self.name` is unicode. - object_metadata = {'name': self.name} + object_metadata = {"name": self.name} for key in self._changes: if key in _WRITABLE_FIELDS: object_metadata[key] = self._properties[key] @@ -690,8 +706,9 @@ def _get_upload_arguments(self, content_type): content_type = self._get_content_type(content_type) return headers, object_metadata, content_type - def _do_multipart_upload(self, client, stream, content_type, - size, num_retries, predefined_acl): + def _do_multipart_upload( + self, client, stream, content_type, size, num_retries, predefined_acl + ): """Perform a multipart upload. The content type of the upload will be determined in order @@ -741,35 +758,41 @@ def _do_multipart_upload(self, client, stream, content_type, info = self._get_upload_arguments(content_type) headers, object_metadata, content_type = info - base_url = _MULTIPART_URL_TEMPLATE.format( - bucket_path=self.bucket.path) + base_url = _MULTIPART_URL_TEMPLATE.format(bucket_path=self.bucket.path) name_value_pairs = [] if self.user_project is not None: - name_value_pairs.append(('userProject', self.user_project)) + name_value_pairs.append(("userProject", self.user_project)) if self.kms_key_name is not None: - name_value_pairs.append(('kmsKeyName', self.kms_key_name)) + name_value_pairs.append(("kmsKeyName", self.kms_key_name)) if predefined_acl is not None: - name_value_pairs.append(('predefinedAcl', predefined_acl)) + name_value_pairs.append(("predefinedAcl", predefined_acl)) upload_url = _add_query_parameters(base_url, name_value_pairs) upload = MultipartUpload(upload_url, headers=headers) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries) + max_retries=num_retries + ) - response = upload.transmit( - transport, data, object_metadata, content_type) + response = upload.transmit(transport, data, object_metadata, content_type) return response - def _initiate_resumable_upload(self, client, stream, content_type, - size, num_retries, - predefined_acl=None, - extra_headers=None, chunk_size=None): + def _initiate_resumable_upload( + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl=None, + extra_headers=None, + chunk_size=None, + ): """Initiate a resumable upload. The content type of the upload will be determined in order @@ -831,34 +854,40 @@ def _initiate_resumable_upload(self, client, stream, content_type, if extra_headers is not None: headers.update(extra_headers) - base_url = _RESUMABLE_URL_TEMPLATE.format( - bucket_path=self.bucket.path) + base_url = _RESUMABLE_URL_TEMPLATE.format(bucket_path=self.bucket.path) name_value_pairs = [] if self.user_project is not None: - name_value_pairs.append(('userProject', self.user_project)) + name_value_pairs.append(("userProject", self.user_project)) if self.kms_key_name is not None: - name_value_pairs.append(('kmsKeyName', self.kms_key_name)) + name_value_pairs.append(("kmsKeyName", self.kms_key_name)) if predefined_acl is not None: - name_value_pairs.append(('predefinedAcl', predefined_acl)) + name_value_pairs.append(("predefinedAcl", predefined_acl)) upload_url = _add_query_parameters(base_url, name_value_pairs) upload = ResumableUpload(upload_url, chunk_size, headers=headers) if num_retries is not None: upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries) + max_retries=num_retries + ) upload.initiate( - transport, stream, object_metadata, content_type, - total_bytes=size, stream_final=False) + transport, + stream, + object_metadata, + content_type, + total_bytes=size, + stream_final=False, + ) return upload, transport - def _do_resumable_upload(self, client, stream, content_type, - size, num_retries, predefined_acl): + def _do_resumable_upload( + self, client, stream, content_type, size, num_retries, predefined_acl + ): """Perform a resumable upload. Assumes ``chunk_size`` is not :data:`None` on the current blob. @@ -897,16 +926,22 @@ def _do_resumable_upload(self, client, stream, content_type, is uploaded. """ upload, transport = self._initiate_resumable_upload( - client, stream, content_type, size, num_retries, - predefined_acl=predefined_acl) + client, + stream, + content_type, + size, + num_retries, + predefined_acl=predefined_acl, + ) while not upload.finished: response = upload.transmit_next_chunk(transport) return response - def _do_upload(self, client, stream, content_type, - size, num_retries, predefined_acl): + def _do_upload( + self, client, stream, content_type, size, num_retries, predefined_acl + ): """Determine an upload strategy and then perform the upload. If the size of the data to be uploaded exceeds 5 MB a resumable media @@ -949,18 +984,25 @@ def _do_upload(self, client, stream, content_type, """ if size is not None and size <= _MAX_MULTIPART_SIZE: response = self._do_multipart_upload( - client, stream, content_type, - size, num_retries, predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) else: response = self._do_resumable_upload( - client, stream, content_type, size, - num_retries, predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) return response.json() - def upload_from_file(self, file_obj, rewind=False, size=None, - content_type=None, num_retries=None, client=None, - predefined_acl=None): + def upload_from_file( + self, + file_obj, + rewind=False, + size=None, + content_type=None, + num_retries=None, + client=None, + predefined_acl=None, + ): """Upload the contents of this blob from a file-like object. The content type of the upload will be determined in order @@ -1029,22 +1071,22 @@ def upload_from_file(self, file_obj, rewind=False, size=None, .. _lifecycle: https://cloud.google.com/storage/docs/lifecycle """ if num_retries is not None: - warnings.warn( - _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) try: created_json = self._do_upload( - client, file_obj, content_type, - size, num_retries, predefined_acl) + client, file_obj, content_type, size, num_retries, predefined_acl + ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) - def upload_from_filename(self, filename, content_type=None, client=None, - predefined_acl=None): + def upload_from_filename( + self, filename, content_type=None, client=None, predefined_acl=None + ): """Upload this blob's contents from the content of a named file. The content type of the upload will be determined in order @@ -1084,14 +1126,19 @@ def upload_from_filename(self, filename, content_type=None, client=None, """ content_type = self._get_content_type(content_type, filename=filename) - with open(filename, 'rb') as file_obj: + with open(filename, "rb") as file_obj: total_bytes = os.fstat(file_obj.fileno()).st_size self.upload_from_file( - file_obj, content_type=content_type, client=client, - size=total_bytes, predefined_acl=predefined_acl) - - def upload_from_string(self, data, content_type='text/plain', client=None, - predefined_acl=None): + file_obj, + content_type=content_type, + client=client, + size=total_bytes, + predefined_acl=predefined_acl, + ) + + def upload_from_string( + self, data, content_type="text/plain", client=None, predefined_acl=None + ): """Upload contents of this blob from the provided string. .. note:: @@ -1124,19 +1171,19 @@ def upload_from_string(self, data, content_type='text/plain', client=None, :type predefined_acl: str :param predefined_acl: (Optional) predefined access control list """ - data = _to_bytes(data, encoding='utf-8') + data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) self.upload_from_file( - file_obj=string_buffer, size=len(data), - content_type=content_type, client=client, - predefined_acl=predefined_acl) + file_obj=string_buffer, + size=len(data), + content_type=content_type, + client=client, + predefined_acl=predefined_acl, + ) def create_resumable_upload_session( - self, - content_type=None, - size=None, - origin=None, - client=None): + self, content_type=None, size=None, origin=None, client=None + ): """Create a resumable upload session. Resumable upload sessions allow you to start an upload session from @@ -1205,18 +1252,23 @@ def create_resumable_upload_session( if origin is not None: # This header is specifically for client-side uploads, it # determines the origins allowed for CORS. - extra_headers['Origin'] = origin + extra_headers["Origin"] = origin try: - dummy_stream = BytesIO(b'') + dummy_stream = BytesIO(b"") # Send a fake the chunk size which we **know** will be acceptable # to the `ResumableUpload` constructor. The chunk size only # matters when **sending** bytes to an upload. upload, _ = self._initiate_resumable_upload( - client, dummy_stream, content_type, size, None, + client, + dummy_stream, + content_type, + size, + None, predefined_acl=None, extra_headers=extra_headers, - chunk_size=self._CHUNK_SIZE_MULTIPLE) + chunk_size=self._CHUNK_SIZE_MULTIPLE, + ) return upload.resumable_url except resumable_media.InvalidResponse as exc: @@ -1250,13 +1302,14 @@ def get_iam_policy(self, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project info = client._connection.api_request( - method='GET', - path='%s/iam' % (self.path,), + method="GET", + path="%s/iam" % (self.path,), query_params=query_params, - _target_object=None) + _target_object=None, + ) return Policy.from_api_repr(info) def set_iam_policy(self, policy, client=None): @@ -1290,16 +1343,17 @@ def set_iam_policy(self, policy, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project resource = policy.to_api_repr() - resource['resourceId'] = self.path + resource["resourceId"] = self.path info = client._connection.api_request( - method='PUT', - path='%s/iam' % (self.path,), + method="PUT", + path="%s/iam" % (self.path,), query_params=query_params, data=resource, - _target_object=None) + _target_object=None, + ) return Policy.from_api_repr(info) def test_iam_permissions(self, permissions, client=None): @@ -1329,18 +1383,17 @@ def test_iam_permissions(self, permissions, client=None): request. """ client = self._require_client(client) - query_params = {'permissions': permissions} + query_params = {"permissions": permissions} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project - path = '%s/iam/testPermissions' % (self.path,) + path = "%s/iam/testPermissions" % (self.path,) resp = client._connection.api_request( - method='GET', - path=path, - query_params=query_params) + method="GET", path=path, query_params=query_params + ) - return resp.get('permissions', []) + return resp.get("permissions", []) def make_public(self, client=None): """Update blob's ACL, granting read access to anonymous users. @@ -1382,18 +1435,19 @@ def compose(self, sources, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project request = { - 'sourceObjects': [{'name': source.name} for source in sources], - 'destination': self._properties.copy(), + "sourceObjects": [{"name": source.name} for source in sources], + "destination": self._properties.copy(), } api_response = client._connection.api_request( - method='POST', - path=self.path + '/compose', + method="POST", + path=self.path + "/compose", query_params=query_params, data=request, - _target_object=self) + _target_object=self, + ) self._set_properties(api_response) def rewrite(self, source, token=None, client=None): @@ -1424,38 +1478,38 @@ def rewrite(self, source, token=None, client=None): """ client = self._require_client(client) headers = _get_encryption_headers(self._encryption_key) - headers.update(_get_encryption_headers( - source._encryption_key, source=True)) + headers.update(_get_encryption_headers(source._encryption_key, source=True)) query_params = {} if token: - query_params['rewriteToken'] = token + query_params["rewriteToken"] = token if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project if self.kms_key_name is not None: - query_params['destinationKmsKeyName'] = self.kms_key_name + query_params["destinationKmsKeyName"] = self.kms_key_name api_response = client._connection.api_request( - method='POST', - path=source.path + '/rewriteTo' + self.path, + method="POST", + path=source.path + "/rewriteTo" + self.path, query_params=query_params, data=self._properties, headers=headers, - _target_object=self) - rewritten = int(api_response['totalBytesRewritten']) - size = int(api_response['objectSize']) + _target_object=self, + ) + rewritten = int(api_response["totalBytesRewritten"]) + size = int(api_response["objectSize"]) # The resource key is set if and only if the API response is # completely done. Additionally, there is no rewrite token to return # in this case. - if api_response['done']: - self._set_properties(api_response['resource']) + if api_response["done"]: + self._set_properties(api_response["resource"]) return None, rewritten, size - return api_response['rewriteToken'], rewritten, size + return api_response["rewriteToken"], rewritten, size def update_storage_class(self, new_class, client=None): """Update blob's storage class via a rewrite-in-place. @@ -1481,22 +1535,22 @@ def update_storage_class(self, new_class, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project headers = _get_encryption_headers(self._encryption_key) - headers.update(_get_encryption_headers( - self._encryption_key, source=True)) + headers.update(_get_encryption_headers(self._encryption_key, source=True)) api_response = client._connection.api_request( - method='POST', - path=self.path + '/rewriteTo' + self.path, + method="POST", + path=self.path + "/rewriteTo" + self.path, query_params=query_params, - data={'storageClass': new_class}, + data={"storageClass": new_class}, headers=headers, - _target_object=self) - self._set_properties(api_response['resource']) + _target_object=self, + ) + self._set_properties(api_response["resource"]) - cache_control = _scalar_property('cacheControl') + cache_control = _scalar_property("cacheControl") """HTTP 'Cache-Control' header for this object. See `RFC 7234`_ and `API reference docs`_. @@ -1506,7 +1560,7 @@ def update_storage_class(self, new_class, client=None): .. _RFC 7234: https://tools.ietf.org/html/rfc7234#section-5.2 """ - content_disposition = _scalar_property('contentDisposition') + content_disposition = _scalar_property("contentDisposition") """HTTP 'Content-Disposition' header for this object. See `RFC 6266`_ and `API reference docs`_. @@ -1516,7 +1570,7 @@ def update_storage_class(self, new_class, client=None): .. _RFC 6266: https://tools.ietf.org/html/rfc7234#section-5.2 """ - content_encoding = _scalar_property('contentEncoding') + content_encoding = _scalar_property("contentEncoding") """HTTP 'Content-Encoding' header for this object. See `RFC 7231`_ and `API reference docs`_. @@ -1526,7 +1580,7 @@ def update_storage_class(self, new_class, client=None): .. _RFC 7231: https://tools.ietf.org/html/rfc7231#section-3.1.2.2 """ - content_language = _scalar_property('contentLanguage') + content_language = _scalar_property("contentLanguage") """HTTP 'Content-Language' header for this object. See `BCP47`_ and `API reference docs`_. @@ -1546,7 +1600,7 @@ def update_storage_class(self, new_class, client=None): .. _RFC 2616: https://tools.ietf.org/html/rfc2616#section-14.17 """ - crc32c = _scalar_property('crc32c') + crc32c = _scalar_property("crc32c") """CRC32C checksum for this object. See `RFC 4960`_ and `API reference docs`_. @@ -1570,7 +1624,7 @@ def component_count(self): the server. This property will not be set on objects not created via ``compose``. """ - component_count = self._properties.get('componentCount') + component_count = self._properties.get("componentCount") if component_count is not None: return int(component_count) @@ -1586,9 +1640,9 @@ def etag(self): .. _RFC 2616 (etags): https://tools.ietf.org/html/rfc2616#section-3.11 """ - return self._properties.get('etag') + return self._properties.get("etag") - event_based_hold = _scalar_property('eventBasedHold') + event_based_hold = _scalar_property("eventBasedHold") """Is an event-based hold active on the object? See `API reference docs`_. @@ -1608,7 +1662,7 @@ def generation(self): :returns: The generation of the blob or ``None`` if the blob's resource has not been loaded from the server. """ - generation = self._properties.get('generation') + generation = self._properties.get("generation") if generation is not None: return int(generation) @@ -1624,9 +1678,9 @@ def id(self): :returns: The ID of the blob or ``None`` if the blob's resource has not been loaded from the server. """ - return self._properties.get('id') + return self._properties.get("id") - md5_hash = _scalar_property('md5Hash') + md5_hash = _scalar_property("md5Hash") """MD5 hash for this object. See `RFC 1321`_ and `API reference docs`_. @@ -1648,7 +1702,7 @@ def media_link(self): :returns: The media link for the blob or ``None`` if the blob's resource has not been loaded from the server. """ - return self._properties.get('mediaLink') + return self._properties.get("mediaLink") @property def metadata(self): @@ -1665,7 +1719,7 @@ def metadata(self): :returns: The metadata associated with the blob or ``None`` if the property is not set. """ - return copy.deepcopy(self._properties.get('metadata')) + return copy.deepcopy(self._properties.get("metadata")) @metadata.setter def metadata(self, value): @@ -1676,7 +1730,7 @@ def metadata(self, value): :type value: dict :param value: (Optional) The blob metadata to set. """ - self._patch_property('metadata', value) + self._patch_property("metadata", value) @property def metageneration(self): @@ -1688,7 +1742,7 @@ def metageneration(self): :returns: The metageneration of the blob or ``None`` if the blob's resource has not been loaded from the server. """ - metageneration = self._properties.get('metageneration') + metageneration = self._properties.get("metageneration") if metageneration is not None: return int(metageneration) @@ -1702,7 +1756,7 @@ def owner(self): :returns: Mapping of owner's role/ID, or ``None`` if the blob's resource has not been loaded from the server. """ - return copy.deepcopy(self._properties.get('owner')) + return copy.deepcopy(self._properties.get("owner")) @property def retention_expiration_time(self): @@ -1714,7 +1768,7 @@ def retention_expiration_time(self): :returns: Datetime object parsed from RFC3339 valid timestamp, or ``None`` if the property is not set locally. """ - value = self._properties.get('retentionExpirationTime') + value = self._properties.get("retentionExpirationTime") if value is not None: return _rfc3339_to_datetime(value) @@ -1728,7 +1782,7 @@ def self_link(self): :returns: The self link for the blob or ``None`` if the blob's resource has not been loaded from the server. """ - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def size(self): @@ -1740,7 +1794,7 @@ def size(self): :returns: The size of the blob or ``None`` if the blob's resource has not been loaded from the server. """ - size = self._properties.get('size') + size = self._properties.get("size") if size is not None: return int(size) @@ -1753,9 +1807,9 @@ def kms_key_name(self): The resource name or ``None`` if no Cloud KMS key was used, or the blob's resource has not been loaded from the server. """ - return self._properties.get('kmsKeyName') + return self._properties.get("kmsKeyName") - storage_class = _scalar_property('storageClass') + storage_class = _scalar_property("storageClass") """Retrieve the storage class for the object. This can only be set at blob / object **creation** time. If you'd @@ -1771,7 +1825,7 @@ def kms_key_name(self): "DURABLE_REDUCED_AVAILABILITY", else ``None``. """ - temporary_hold = _scalar_property('temporaryHold') + temporary_hold = _scalar_property("temporaryHold") """Is a temporary hold active on the object? See `API reference docs`_. @@ -1793,7 +1847,7 @@ def time_deleted(self): the server (see :meth:`reload`). If the blob has not been deleted, this will never be set. """ - value = self._properties.get('timeDeleted') + value = self._properties.get("timeDeleted") if value is not None: return _rfc3339_to_datetime(value) @@ -1808,7 +1862,7 @@ def time_created(self): ``None`` if the blob's resource has not been loaded from the server (see :meth:`reload`). """ - value = self._properties.get('timeCreated') + value = self._properties.get("timeCreated") if value is not None: return _rfc3339_to_datetime(value) @@ -1823,7 +1877,7 @@ def updated(self): ``None`` if the blob's resource has not been loaded from the server (see :meth:`reload`). """ - value = self._properties.get('updated') + value = self._properties.get("updated") if value is not None: return _rfc3339_to_datetime(value) @@ -1850,14 +1904,14 @@ def _get_encryption_headers(key, source=False): key = base64.b64encode(key) if source: - prefix = 'X-Goog-Copy-Source-Encryption-' + prefix = "X-Goog-Copy-Source-Encryption-" else: - prefix = 'X-Goog-Encryption-' + prefix = "X-Goog-Encryption-" return { - prefix + 'Algorithm': 'AES256', - prefix + 'Key': _bytes_to_unicode(key), - prefix + 'Key-Sha256': _bytes_to_unicode(key_hash), + prefix + "Algorithm": "AES256", + prefix + "Key": _bytes_to_unicode(key), + prefix + "Key-Sha256": _bytes_to_unicode(key_hash), } @@ -1875,8 +1929,8 @@ def _quote(value): :rtype: str :returns: The encoded value (bytes in Python 2, unicode in Python 3). """ - value = _to_bytes(value, encoding='utf-8') - return quote(value, safe='') + value = _to_bytes(value, encoding="utf-8") + return quote(value, safe="") def _maybe_rewind(stream, rewind=False): @@ -1905,13 +1959,11 @@ def _raise_from_invalid_response(error): response = error.response error_message = str(error) - message = u'{method} {url}: {error}'.format( - method=response.request.method, - url=response.request.url, - error=error_message) + message = u"{method} {url}: {error}".format( + method=response.request.method, url=response.request.url, error=error_message + ) - raise exceptions.from_http_status( - response.status_code, message, response=response) + raise exceptions.from_http_status(response.status_code, message, response=response) def _add_query_parameters(base_url, name_value_pairs): diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index 9f235d309c85..c76b6ddf2a3b 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -44,7 +44,8 @@ _LOCATION_SETTER_MESSAGE = ( "Assignment to 'Bucket.location' is deprecated, as it is only " "valid before the bucket is created. Instead, pass the location " - "to `Bucket.create`.") + "to `Bucket.create`." +) def _blobs_page_start(iterator, page, response): @@ -59,7 +60,7 @@ def _blobs_page_start(iterator, page, response): :type response: dict :param response: The JSON API response for a page of blobs. """ - page.prefixes = tuple(response.get('prefixes', ())) + page.prefixes = tuple(response.get("prefixes", ())) iterator.prefixes.update(page.prefixes) @@ -80,7 +81,7 @@ def _item_to_blob(iterator, item): :rtype: :class:`.Blob` :returns: The next blob in the page. """ - name = item.get('name') + name = item.get("name") blob = Blob(name, bucket=iterator.bucket) blob._set_properties(item) return blob @@ -136,25 +137,32 @@ class LifecycleRuleConditions(dict): :raises ValueError: if no arguments are passed. """ - def __init__(self, age=None, created_before=None, is_live=None, - matches_storage_class=None, number_of_newer_versions=None, - _factory=False): + + def __init__( + self, + age=None, + created_before=None, + is_live=None, + matches_storage_class=None, + number_of_newer_versions=None, + _factory=False, + ): conditions = {} if age is not None: - conditions['age'] = age + conditions["age"] = age if created_before is not None: - conditions['createdBefore'] = created_before.isoformat() + conditions["createdBefore"] = created_before.isoformat() if is_live is not None: - conditions['isLive'] = is_live + conditions["isLive"] = is_live if matches_storage_class is not None: - conditions['matchesStorageClass'] = matches_storage_class + conditions["matchesStorageClass"] = matches_storage_class if number_of_newer_versions is not None: - conditions['numNewerVersions'] = number_of_newer_versions + conditions["numNewerVersions"] = number_of_newer_versions if not _factory and not conditions: raise ValueError("Supply at least one condition") @@ -178,29 +186,29 @@ def from_api_repr(cls, resource): @property def age(self): """Conditon's age value.""" - return self.get('age') + return self.get("age") @property def created_before(self): """Conditon's created_before value.""" - before = self.get('createdBefore') + before = self.get("createdBefore") if before is not None: return datetime_helpers.from_iso8601_date(before) @property def is_live(self): """Conditon's 'is_live' value.""" - return self.get('isLive') + return self.get("isLive") @property def matches_storage_class(self): """Conditon's 'matches_storage_class' value.""" - return self.get('matchesStorageClass') + return self.get("matchesStorageClass") @property def number_of_newer_versions(self): """Conditon's 'number_of_newer_versions' value.""" - return self.get('numNewerVersions') + return self.get("numNewerVersions") class LifecycleRuleDelete(dict): @@ -209,14 +217,10 @@ class LifecycleRuleDelete(dict): :type kw: dict :params kw: arguments passed to :class:`LifecycleRuleConditions`. """ + def __init__(self, **kw): conditions = LifecycleRuleConditions(**kw) - rule = { - 'action': { - 'type': 'Delete', - }, - 'condition': dict(conditions), - } + rule = {"action": {"type": "Delete"}, "condition": dict(conditions)} super(LifecycleRuleDelete, self).__init__(rule) @classmethod @@ -243,14 +247,12 @@ class LifecycleRuleSetStorageClass(dict): :type kw: dict :params kw: arguments passed to :class:`LifecycleRuleConditions`. """ + def __init__(self, storage_class, **kw): conditions = LifecycleRuleConditions(**kw) rule = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': storage_class, - }, - 'condition': dict(conditions), + "action": {"type": "SetStorageClass", "storageClass": storage_class}, + "condition": dict(conditions), } super(LifecycleRuleSetStorageClass, self).__init__(rule) @@ -264,8 +266,8 @@ def from_api_repr(cls, resource): :rtype: :class:`LifecycleRuleDelete` :returns: Instance created from resource. """ - action = resource['action'] - instance = cls(action['storageClass'], _factory=True) + action = resource["action"] + instance = cls(action["storageClass"], _factory=True) instance.update(resource) return instance @@ -293,12 +295,12 @@ class Bucket(_PropertyMixin): """ _STORAGE_CLASSES = ( - 'MULTI_REGIONAL', - 'REGIONAL', - 'NEARLINE', - 'COLDLINE', - 'STANDARD', # alias for MULTI_REGIONAL/REGIONAL, based on location - 'DURABLE_REDUCED_AVAILABILITY', # deprecated + "MULTI_REGIONAL", + "REGIONAL", + "NEARLINE", + "COLDLINE", + "STANDARD", # alias for MULTI_REGIONAL/REGIONAL, based on location + "DURABLE_REDUCED_AVAILABILITY", # deprecated ) """Allowed values for :attr:`storage_class`. @@ -317,7 +319,7 @@ def __init__(self, client, name=None, user_project=None): self._user_project = user_project def __repr__(self): - return '' % (self.name,) + return "" % (self.name,) @property def client(self): @@ -343,8 +345,7 @@ def user_project(self): """ return self._user_project - def blob(self, blob_name, chunk_size=None, - encryption_key=None, kms_key_name=None): + def blob(self, blob_name, chunk_size=None, encryption_key=None, kms_key_name=None): """Factory constructor for blob object. .. note:: @@ -370,15 +371,23 @@ def blob(self, blob_name, chunk_size=None, :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The blob object created. """ - return Blob(name=blob_name, bucket=self, chunk_size=chunk_size, - encryption_key=encryption_key, kms_key_name=kms_key_name) - - def notification(self, topic_name, - topic_project=None, - custom_attributes=None, - event_types=None, - blob_name_prefix=None, - payload_format=NONE_PAYLOAD_FORMAT): + return Blob( + name=blob_name, + bucket=self, + chunk_size=chunk_size, + encryption_key=encryption_key, + kms_key_name=kms_key_name, + ) + + def notification( + self, + topic_name, + topic_project=None, + custom_attributes=None, + event_types=None, + blob_name_prefix=None, + payload_format=NONE_PAYLOAD_FORMAT, + ): """Factory: create a notification resource for the bucket. See: :class:`.BucketNotification` for parameters. @@ -386,7 +395,8 @@ def notification(self, topic_name, :rtype: :class:`.BucketNotification` """ return BucketNotification( - self, topic_name, + self, + topic_name, topic_project=topic_project, custom_attributes=custom_attributes, event_types=event_types, @@ -410,17 +420,20 @@ def exists(self, client=None): client = self._require_client(client) # We only need the status code (200 or not) so we seek to # minimize the returned payload. - query_params = {'fields': 'name'} + query_params = {"fields": "name"} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. client._connection.api_request( - method='GET', path=self.path, - query_params=query_params, _target_object=None) + method="GET", + path=self.path, + query_params=query_params, + _target_object=None, + ) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. @@ -465,19 +478,22 @@ def create(self, client=None, project=None, location=None): project = client.project if project is None: - raise ValueError( - "Client project not set: pass an explicit project.") + raise ValueError("Client project not set: pass an explicit project.") - query_params = {'project': project} + query_params = {"project": project} properties = {key: self._properties[key] for key in self._changes} - properties['name'] = self.name + properties["name"] = self.name if location is not None: - properties['location'] = location + properties["location"] = location api_response = client._connection.api_request( - method='POST', path='/b', query_params=query_params, - data=properties, _target_object=self) + method="POST", + path="/b", + query_params=query_params, + data=properties, + _target_object=self, + ) self._set_properties(api_response) def patch(self, client=None): @@ -495,10 +511,10 @@ def patch(self, client=None): # Special case: For buckets, it is possible that labels are being # removed; this requires special handling. if self._label_removals: - self._changes.add('labels') - self._properties.setdefault('labels', {}) + self._changes.add("labels") + self._properties.setdefault("labels", {}) for removed_label in self._label_removals: - self._properties['labels'][removed_label] = None + self._properties["labels"][removed_label] = None # Call the superclass method. return super(Bucket, self).patch(client=client) @@ -523,13 +539,13 @@ def path_helper(bucket_name): :rtype: str :returns: The relative URL path for ``bucket_name``. """ - return '/b/' + bucket_name + return "/b/" + bucket_name @property def path(self): """The URL path to this bucket.""" if not self.name: - raise ValueError('Cannot determine path without bucket name.') + raise ValueError("Cannot determine path without bucket name.") return self.path_helper(self.name) @@ -569,13 +585,14 @@ def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project - blob = Blob(bucket=self, name=blob_name, encryption_key=encryption_key, - **kwargs) + query_params["userProject"] = self.user_project + blob = Blob( + bucket=self, name=blob_name, encryption_key=encryption_key, **kwargs + ) try: headers = _get_encryption_headers(encryption_key) response = client._connection.api_request( - method='GET', + method="GET", path=blob.path, query_params=query_params, headers=headers, @@ -590,9 +607,17 @@ def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): except NotFound: return None - def list_blobs(self, max_results=None, page_token=None, prefix=None, - delimiter=None, versions=None, - projection='noAcl', fields=None, client=None): + def list_blobs( + self, + max_results=None, + page_token=None, + prefix=None, + delimiter=None, + versions=None, + projection="noAcl", + fields=None, + client=None, + ): """Return an iterator used to find blobs in the bucket. If :attr:`user_project` is set, bills the API request to that project. @@ -636,25 +661,25 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. """ - extra_params = {'projection': projection} + extra_params = {"projection": projection} if prefix is not None: - extra_params['prefix'] = prefix + extra_params["prefix"] = prefix if delimiter is not None: - extra_params['delimiter'] = delimiter + extra_params["delimiter"] = delimiter if versions is not None: - extra_params['versions'] = versions + extra_params["versions"] = versions if fields is not None: - extra_params['fields'] = fields + extra_params["fields"] = fields if self.user_project is not None: - extra_params['userProject'] = self.user_project + extra_params["userProject"] = self.user_project client = self._require_client(client) - path = self.path + '/o' + path = self.path + "/o" iterator = page_iterator.HTTPIterator( client=client, api_request=client._connection.api_request, @@ -663,7 +688,8 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, page_token=page_token, max_results=max_results, extra_params=extra_params, - page_start=_blobs_page_start) + page_start=_blobs_page_start, + ) iterator.bucket = self iterator.prefixes = set() return iterator @@ -685,12 +711,13 @@ def list_notifications(self, client=None): :returns: notification instances """ client = self._require_client(client) - path = self.path + '/notificationConfigs' + path = self.path + "/notificationConfigs" iterator = page_iterator.HTTPIterator( client=client, api_request=client._connection.api_request, path=path, - item_to_value=_item_to_notification) + item_to_value=_item_to_notification, + ) iterator.bucket = self return iterator @@ -728,33 +755,35 @@ def delete(self, force=False, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project if force: - blobs = list(self.list_blobs( - max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, - client=client)) + blobs = list( + self.list_blobs( + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client + ) + ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( - 'Refusing to delete bucket with more than ' - '%d objects. If you actually want to delete ' - 'this bucket, please delete the objects ' - 'yourself before calling Bucket.delete().' + "Refusing to delete bucket with more than " + "%d objects. If you actually want to delete " + "this bucket, please delete the objects " + "yourself before calling Bucket.delete()." ) % (self._MAX_OBJECTS_FOR_ITERATION,) raise ValueError(message) # Ignore 404 errors on delete. - self.delete_blobs(blobs, on_error=lambda blob: None, - client=client) + self.delete_blobs(blobs, on_error=lambda blob: None, client=client) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( - method='DELETE', + method="DELETE", path=self.path, query_params=query_params, - _target_object=None) + _target_object=None, + ) def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. @@ -791,17 +820,18 @@ def delete_blob(self, blob_name, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( - method='DELETE', + method="DELETE", path=blob_path, query_params=query_params, - _target_object=None) + _target_object=None, + ) def delete_blobs(self, blobs, on_error=None, client=None): """Deletes a list of blobs from the current bucket. @@ -839,8 +869,15 @@ def delete_blobs(self, blobs, on_error=None, client=None): else: raise - def copy_blob(self, blob, destination_bucket, new_name=None, - client=None, preserve_acl=True, source_generation=None): + def copy_blob( + self, + blob, + destination_bucket, + new_name=None, + client=None, + preserve_acl=True, + source_generation=None, + ): """Copy the given blob to the given bucket, optionally with a new name. If :attr:`user_project` is set, bills the API request to that project. @@ -875,18 +912,18 @@ def copy_blob(self, blob, destination_bucket, new_name=None, query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project if source_generation is not None: - query_params['sourceGeneration'] = source_generation + query_params["sourceGeneration"] = source_generation if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) - api_path = blob.path + '/copyTo' + new_blob.path + api_path = blob.path + "/copyTo" + new_blob.path copy_result = client._connection.api_request( - method='POST', + method="POST", path=api_path, query_params=query_params, _target_object=new_blob, @@ -962,8 +999,7 @@ def cors(self): :rtype: list of dictionaries :returns: A sequence of mappings describing each CORS policy. """ - return [copy.deepcopy(policy) - for policy in self._properties.get('cors', ())] + return [copy.deepcopy(policy) for policy in self._properties.get("cors", ())] @cors.setter def cors(self, entries): @@ -975,9 +1011,9 @@ def cors(self, entries): :type entries: list of dictionaries :param entries: A sequence of mappings describing each CORS policy. """ - self._patch_property('cors', entries) + self._patch_property("cors", entries) - default_event_based_hold = _scalar_property('defaultEventBasedHold') + default_event_based_hold = _scalar_property("defaultEventBasedHold") """Are uploaded objects automatically placed under an even-based hold? If True, uploaded objects will be placed under an event-based hold to @@ -1004,8 +1040,8 @@ def default_kms_key_name(self): :rtype: str :returns: Default KMS encryption key, or ``None`` if not set. """ - encryption_config = self._properties.get('encryption', {}) - return encryption_config.get('defaultKmsKeyName') + encryption_config = self._properties.get("encryption", {}) + return encryption_config.get("defaultKmsKeyName") @default_kms_key_name.setter def default_kms_key_name(self, value): @@ -1014,9 +1050,9 @@ def default_kms_key_name(self, value): :type value: str or None :param value: new KMS key name (None to clear any existing key). """ - encryption_config = self._properties.get('encryption', {}) - encryption_config['defaultKmsKeyName'] = value - self._patch_property('encryption', encryption_config) + encryption_config = self._properties.get("encryption", {}) + encryption_config["defaultKmsKeyName"] = value + self._patch_property("encryption", encryption_config) @property def labels(self): @@ -1043,7 +1079,7 @@ def labels(self): :rtype: :class:`dict` :returns: Name-value pairs (string->string) labelling the bucket. """ - labels = self._properties.get('labels') + labels = self._properties.get("labels") if labels is None: return {} return copy.deepcopy(labels) @@ -1062,12 +1098,10 @@ def labels(self, mapping): # so that a future .patch() call can do the correct thing. existing = set([k for k in self.labels.keys()]) incoming = set([k for k in mapping.keys()]) - self._label_removals = self._label_removals.union( - existing.difference(incoming), - ) + self._label_removals = self._label_removals.union(existing.difference(incoming)) # Actually update the labels on the object. - self._patch_property('labels', copy.deepcopy(mapping)) + self._patch_property("labels", copy.deepcopy(mapping)) @property def etag(self): @@ -1080,7 +1114,7 @@ def etag(self): :returns: The bucket etag or ``None`` if the bucket's resource has not been loaded from the server. """ - return self._properties.get('etag') + return self._properties.get("etag") @property def id(self): @@ -1092,7 +1126,7 @@ def id(self): :returns: The ID of the bucket or ``None`` if the bucket's resource has not been loaded from the server. """ - return self._properties.get('id') + return self._properties.get("id") @property def lifecycle_rules(self): @@ -1121,12 +1155,12 @@ def lifecycle_rules(self): :rtype: generator(dict) :returns: A sequence of mappings describing each lifecycle rule. """ - info = self._properties.get('lifecycle', {}) - for rule in info.get('rule', ()): - action_type = rule['action']['type'] - if action_type == 'Delete': + info = self._properties.get("lifecycle", {}) + for rule in info.get("rule", ()): + action_type = rule["action"]["type"] + if action_type == "Delete": yield LifecycleRuleDelete.from_api_repr(rule) - elif action_type == 'SetStorageClass': + elif action_type == "SetStorageClass": yield LifecycleRuleSetStorageClass.from_api_repr(rule) else: raise ValueError("Unknown lifecycle rule: {}".format(rule)) @@ -1142,7 +1176,7 @@ def lifecycle_rules(self, rules): :param entries: A sequence of mappings describing each lifecycle rule. """ rules = [dict(rule) for rule in rules] # Convert helpers if needed - self._patch_property('lifecycle', {'rule': rules}) + self._patch_property("lifecycle", {"rule": rules}) def clear_lifecyle_rules(self): """Set lifestyle rules configured for this bucket. @@ -1189,7 +1223,7 @@ def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): rules.append(LifecycleRuleSetStorageClass(storage_class, **kw)) self.lifecycle_rules = rules - _location = _scalar_property('location') + _location = _scalar_property("location") @property def location(self): @@ -1219,8 +1253,7 @@ def location(self, value): valid before the bucket is created. Instead, pass the location to `Bucket.create`. """ - warnings.warn( - _LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2) + warnings.warn(_LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2) self._location = value def get_logging(self): @@ -1232,10 +1265,10 @@ def get_logging(self): :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix`` (if logging is enabled), or None (if not). """ - info = self._properties.get('logging') + info = self._properties.get("logging") return copy.deepcopy(info) - def enable_logging(self, bucket_name, object_prefix=''): + def enable_logging(self, bucket_name, object_prefix=""): """Enable access logging for this bucket. See https://cloud.google.com/storage/docs/access-logs @@ -1246,15 +1279,15 @@ def enable_logging(self, bucket_name, object_prefix=''): :type object_prefix: str :param object_prefix: prefix for access log filenames """ - info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix} - self._patch_property('logging', info) + info = {"logBucket": bucket_name, "logObjectPrefix": object_prefix} + self._patch_property("logging", info) def disable_logging(self): """Disable access logging for this bucket. See https://cloud.google.com/storage/docs/access-logs#disabling """ - self._patch_property('logging', None) + self._patch_property("logging", None) @property def metageneration(self): @@ -1266,7 +1299,7 @@ def metageneration(self): :returns: The metageneration of the bucket or ``None`` if the bucket's resource has not been loaded from the server. """ - metageneration = self._properties.get('metageneration') + metageneration = self._properties.get("metageneration") if metageneration is not None: return int(metageneration) @@ -1280,7 +1313,7 @@ def owner(self): :returns: Mapping of owner's role/ID. Returns ``None`` if the bucket's resource has not been loaded from the server. """ - return copy.deepcopy(self._properties.get('owner')) + return copy.deepcopy(self._properties.get("owner")) @property def project_number(self): @@ -1292,7 +1325,7 @@ def project_number(self): :returns: The project number that owns the bucket or ``None`` if the bucket's resource has not been loaded from the server. """ - project_number = self._properties.get('projectNumber') + project_number = self._properties.get("projectNumber") if project_number is not None: return int(project_number) @@ -1305,9 +1338,9 @@ def retention_policy_effective_time(self): effective, or ``None`` if the property is not set locally. """ - policy = self._properties.get('retentionPolicy') + policy = self._properties.get("retentionPolicy") if policy is not None: - timestamp = policy.get('effectiveTime') + timestamp = policy.get("effectiveTime") if timestamp is not None: return _rfc3339_to_datetime(timestamp) @@ -1320,9 +1353,9 @@ def retention_policy_locked(self): if the policy is not locked, or the property is not set locally. """ - policy = self._properties.get('retentionPolicy') + policy = self._properties.get("retentionPolicy") if policy is not None: - return policy.get('isLocked') + return policy.get("isLocked") @property def retention_period(self): @@ -1333,9 +1366,9 @@ def retention_period(self): from event-based lock, or ``None`` if the property is not set locally. """ - policy = self._properties.get('retentionPolicy') + policy = self._properties.get("retentionPolicy") if policy is not None: - period = policy.get('retentionPeriod') + period = policy.get("retentionPeriod") if period is not None: return int(period) @@ -1350,12 +1383,12 @@ def retention_period(self, value): :raises ValueError: if the bucket's retention policy is locked. """ - policy = self._properties.setdefault('retentionPolicy', {}) + policy = self._properties.setdefault("retentionPolicy", {}) if value is not None: - policy['retentionPeriod'] = str(value) + policy["retentionPeriod"] = str(value) else: policy = None - self._patch_property('retentionPolicy', policy) + self._patch_property("retentionPolicy", policy) @property def self_link(self): @@ -1367,7 +1400,7 @@ def self_link(self): :returns: The self link for the bucket or ``None`` if the bucket's resource has not been loaded from the server. """ - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def storage_class(self): @@ -1383,7 +1416,7 @@ def storage_class(self): "NEARLINE", "COLDLINE", "STANDARD", or "DURABLE_REDUCED_AVAILABILITY", else ``None``. """ - return self._properties.get('storageClass') + return self._properties.get("storageClass") @storage_class.setter def storage_class(self, value): @@ -1396,8 +1429,8 @@ def storage_class(self, value): "COLDLINE", "STANDARD", or "DURABLE_REDUCED_AVAILABILITY" """ if value not in self._STORAGE_CLASSES: - raise ValueError('Invalid storage class: %s' % (value,)) - self._patch_property('storageClass', value) + raise ValueError("Invalid storage class: %s" % (value,)) + self._patch_property("storageClass", value) @property def time_created(self): @@ -1410,7 +1443,7 @@ def time_created(self): ``None`` if the bucket's resource has not been loaded from the server. """ - value = self._properties.get('timeCreated') + value = self._properties.get("timeCreated") if value is not None: return _rfc3339_to_datetime(value) @@ -1427,8 +1460,8 @@ def versioning_enabled(self): :rtype: bool :returns: True if enabled, else False. """ - versioning = self._properties.get('versioning', {}) - return versioning.get('enabled', False) + versioning = self._properties.get("versioning", {}) + return versioning.get("enabled", False) @versioning_enabled.setter def versioning_enabled(self, value): @@ -1440,7 +1473,7 @@ def versioning_enabled(self, value): :type value: convertible to boolean :param value: should versioning be enabled for the bucket? """ - self._patch_property('versioning', {'enabled': bool(value)}) + self._patch_property("versioning", {"enabled": bool(value)}) @property def requester_pays(self): @@ -1456,8 +1489,8 @@ def requester_pays(self): :returns: True if requester pays for API requests for the bucket, else False. """ - versioning = self._properties.get('billing', {}) - return versioning.get('requesterPays', False) + versioning = self._properties.get("billing", {}) + return versioning.get("requesterPays", False) @requester_pays.setter def requester_pays(self, value): @@ -1469,7 +1502,7 @@ def requester_pays(self, value): :type value: convertible to boolean :param value: should requester pay for API requests for the bucket? """ - self._patch_property('billing', {'requesterPays': bool(value)}) + self._patch_property("billing", {"requesterPays": bool(value)}) def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related properties. @@ -1506,11 +1539,8 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): :type not_found_page: str :param not_found_page: The file to use when a page isn't found. """ - data = { - 'mainPageSuffix': main_page_suffix, - 'notFoundPage': not_found_page, - } - self._patch_property('website', data) + data = {"mainPageSuffix": main_page_suffix, "notFoundPage": not_found_page} + self._patch_property("website", data) def disable_website(self): """Disable the website configuration for this bucket. @@ -1541,13 +1571,14 @@ def get_iam_policy(self, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project info = client._connection.api_request( - method='GET', - path='%s/iam' % (self.path,), + method="GET", + path="%s/iam" % (self.path,), query_params=query_params, - _target_object=None) + _target_object=None, + ) return Policy.from_api_repr(info) def set_iam_policy(self, policy, client=None): @@ -1574,16 +1605,17 @@ def set_iam_policy(self, policy, client=None): query_params = {} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project resource = policy.to_api_repr() - resource['resourceId'] = self.path + resource["resourceId"] = self.path info = client._connection.api_request( - method='PUT', - path='%s/iam' % (self.path,), + method="PUT", + path="%s/iam" % (self.path,), query_params=query_params, data=resource, - _target_object=None) + _target_object=None, + ) return Policy.from_api_repr(info) def test_iam_permissions(self, permissions, client=None): @@ -1607,17 +1639,16 @@ def test_iam_permissions(self, permissions, client=None): request. """ client = self._require_client(client) - query_params = {'permissions': permissions} + query_params = {"permissions": permissions} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project - path = '%s/iam/testPermissions' % (self.path,) + path = "%s/iam/testPermissions" % (self.path,) resp = client._connection.api_request( - method='GET', - path=path, - query_params=query_params) - return resp.get('permissions', []) + method="GET", path=path, query_params=query_params + ) + return resp.get("permissions", []) def make_public(self, recursive=False, future=False, client=None): """Update bucket's ACL, granting read access to anonymous users. @@ -1654,10 +1685,13 @@ def make_public(self, recursive=False, future=False, client=None): doa.save(client=client) if recursive: - blobs = list(self.list_blobs( - projection='full', - max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, - client=client)) + blobs = list( + self.list_blobs( + projection="full", + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + ) + ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( "Refusing to make public recursively with more than " @@ -1707,14 +1741,17 @@ def make_private(self, recursive=False, future=False, client=None): doa.save(client=client) if recursive: - blobs = list(self.list_blobs( - projection='full', - max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, - client=client)) + blobs = list( + self.list_blobs( + projection="full", + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + ) + ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( - 'Refusing to make private recursively with more than ' - '%d objects. If you actually want to make every object ' + "Refusing to make private recursively with more than " + "%d objects. If you actually want to make every object " "in this bucket private, iterate through the blobs " "returned by 'Bucket.list_blobs()' and call " "'make_private' on each one." @@ -1725,8 +1762,7 @@ def make_private(self, recursive=False, future=False, client=None): blob.acl.all().revoke_read() blob.acl.save(client=client) - def generate_upload_policy( - self, conditions, expiration=None, client=None): + def generate_upload_policy(self, conditions, expiration=None, client=None): """Create a signed upload policy for uploading objects. This method generates and signs a policy document. You can use @@ -1767,25 +1803,23 @@ def generate_upload_policy( if expiration is None: expiration = _NOW() + datetime.timedelta(hours=1) - conditions = conditions + [ - {'bucket': self.name}, - ] + conditions = conditions + [{"bucket": self.name}] policy_document = { - 'expiration': _datetime_to_rfc3339(expiration), - 'conditions': conditions, + "expiration": _datetime_to_rfc3339(expiration), + "conditions": conditions, } encoded_policy_document = base64.b64encode( - json.dumps(policy_document).encode('utf-8')) - signature = base64.b64encode( - credentials.sign_bytes(encoded_policy_document)) + json.dumps(policy_document).encode("utf-8") + ) + signature = base64.b64encode(credentials.sign_bytes(encoded_policy_document)) fields = { - 'bucket': self.name, - 'GoogleAccessId': credentials.signer_email, - 'policy': encoded_policy_document.decode('utf-8'), - 'signature': signature.decode('utf-8'), + "bucket": self.name, + "GoogleAccessId": credentials.signer_email, + "policy": encoded_policy_document.decode("utf-8"), + "signature": signature.decode("utf-8"), } return fields @@ -1798,28 +1832,26 @@ def lock_retention_policy(self, client=None): if the bucket has no retention policy assigned; if the bucket's retention policy is already locked. """ - if 'metageneration' not in self._properties: - raise ValueError( - "Bucket has no retention policy assigned: try 'reload'?") + if "metageneration" not in self._properties: + raise ValueError("Bucket has no retention policy assigned: try 'reload'?") - policy = self._properties.get('retentionPolicy') + policy = self._properties.get("retentionPolicy") if policy is None: - raise ValueError( - "Bucket has no retention policy assigned: try 'reload'?") + raise ValueError("Bucket has no retention policy assigned: try 'reload'?") - if policy.get('isLocked'): + if policy.get("isLocked"): raise ValueError("Bucket's retention policy is already locked.") client = self._require_client(client) - query_params = {'ifMetagenerationMatch': self.metageneration} + query_params = {"ifMetagenerationMatch": self.metageneration} if self.user_project is not None: - query_params['userProject'] = self.user_project + query_params["userProject"] = self.user_project - path = '/b/{}/lockRetentionPolicy'.format(self.name) + path = "/b/{}/lockRetentionPolicy".format(self.name) api_response = client._connection.api_request( - method='POST', path=path, query_params=query_params, - _target_object=self) + method="POST", path=path, query_params=query_params, _target_object=self + ) self._set_properties(api_response) diff --git a/storage/google/cloud/storage/client.py b/storage/google/cloud/storage/client.py index dca1073209fc..620a865b6f21 100644 --- a/storage/google/cloud/storage/client.py +++ b/storage/google/cloud/storage/client.py @@ -53,22 +53,25 @@ class Client(ClientWithProject): change in the future. """ - SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control', - 'https://www.googleapis.com/auth/devstorage.read_only', - 'https://www.googleapis.com/auth/devstorage.read_write') + SCOPE = ( + "https://www.googleapis.com/auth/devstorage.full_control", + "https://www.googleapis.com/auth/devstorage.read_only", + "https://www.googleapis.com/auth/devstorage.read_write", + ) """The scopes required for authenticating as a Cloud Storage consumer.""" def __init__(self, project=_marker, credentials=None, _http=None): self._base_connection = None if project is None: no_project = True - project = '' + project = "" else: no_project = False if project is _marker: project = None - super(Client, self).__init__(project=project, credentials=credentials, - _http=_http) + super(Client, self).__init__( + project=project, credentials=credentials, _http=_http + ) if no_project: self.project = None self._connection = Connection(self) @@ -86,7 +89,7 @@ def create_anonymous_client(cls): :rtype: :class:`google.cloud.storage.client.Client` :returns: Instance w/ anonymous credentials and no project. """ - client = cls(project='', credentials=AnonymousCredentials()) + client = cls(project="", credentials=AnonymousCredentials()) client.project = None return client @@ -117,7 +120,7 @@ def _connection(self, value): :raises: :class:`ValueError` if connection has already been set. """ if self._base_connection is not None: - raise ValueError('Connection already set on client') + raise ValueError("Connection already set on client") self._base_connection = value def _push_batch(self, batch): @@ -164,10 +167,9 @@ def get_service_account_email(self, project=None): """ if project is None: project = self.project - path = '/projects/%s/serviceAccount' % (project,) - api_response = self._base_connection.api_request( - method='GET', path=path) - return api_response['email_address'] + path = "/projects/%s/serviceAccount" % (project,) + api_response = self._base_connection.api_request(method="GET", path=path) + return api_response["email_address"] def bucket(self, bucket_name, user_project=None): """Factory constructor for bucket object. @@ -285,8 +287,15 @@ def create_bucket(self, bucket_name, requester_pays=None, project=None): bucket.create(client=self, project=project) return bucket - def list_buckets(self, max_results=None, page_token=None, prefix=None, - projection='noAcl', fields=None, project=None): + def list_buckets( + self, + max_results=None, + page_token=None, + prefix=None, + projection="noAcl", + fields=None, + project=None, + ): """Get all buckets in the project associated to the client. This will not populate the list of blobs available in each @@ -336,27 +345,27 @@ def list_buckets(self, max_results=None, page_token=None, prefix=None, project = self.project if project is None: - raise ValueError( - "Client project not set: pass an explicit project.") + raise ValueError("Client project not set: pass an explicit project.") - extra_params = {'project': project} + extra_params = {"project": project} if prefix is not None: - extra_params['prefix'] = prefix + extra_params["prefix"] = prefix - extra_params['projection'] = projection + extra_params["projection"] = projection if fields is not None: - extra_params['fields'] = fields + extra_params["fields"] = fields return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, - path='/b', + path="/b", item_to_value=_item_to_bucket, page_token=page_token, max_results=max_results, - extra_params=extra_params) + extra_params=extra_params, + ) def _item_to_bucket(iterator, item): @@ -371,7 +380,7 @@ def _item_to_bucket(iterator, item): :rtype: :class:`.Bucket` :returns: The next bucket in the page. """ - name = item.get('name') + name = item.get("name") bucket = Bucket(iterator.client, name) bucket._set_properties(item) return bucket diff --git a/storage/google/cloud/storage/iam.py b/storage/google/cloud/storage/iam.py index f66e7be9c5d2..fb7e9e4ede3a 100644 --- a/storage/google/cloud/storage/iam.py +++ b/storage/google/cloud/storage/iam.py @@ -19,68 +19,68 @@ # Storage-specific IAM roles -STORAGE_OBJECT_CREATOR_ROLE = 'roles/storage.objectCreator' +STORAGE_OBJECT_CREATOR_ROLE = "roles/storage.objectCreator" """Role implying rights to create objects, but not delete or overwrite them.""" -STORAGE_OBJECT_VIEWER_ROLE = 'roles/storage.objectViewer' +STORAGE_OBJECT_VIEWER_ROLE = "roles/storage.objectViewer" """Role implying rights to view object properties, excluding ACLs.""" -STORAGE_OBJECT_ADMIN_ROLE = 'roles/storage.objectViewer' +STORAGE_OBJECT_ADMIN_ROLE = "roles/storage.objectViewer" """Role implying full control of objects.""" -STORAGE_ADMIN_ROLE = 'roles/storage.admin' +STORAGE_ADMIN_ROLE = "roles/storage.admin" """Role implying full control of objects and buckets.""" -STORAGE_VIEWER_ROLE = 'Viewer' +STORAGE_VIEWER_ROLE = "Viewer" """Can list buckets.""" -STORAGE_EDITOR_ROLE = 'Editor' +STORAGE_EDITOR_ROLE = "Editor" """Can create, list, and delete buckets.""" -STORAGE_OWNER_ROLE = 'Owners' +STORAGE_OWNER_ROLE = "Owners" """Can create, list, and delete buckets.""" # Storage-specific permissions -STORAGE_BUCKETS_CREATE = 'storage.buckets.create' +STORAGE_BUCKETS_CREATE = "storage.buckets.create" """Permission: create buckets.""" -STORAGE_BUCKETS_DELETE = 'storage.buckets.delete' +STORAGE_BUCKETS_DELETE = "storage.buckets.delete" """Permission: delete buckets.""" -STORAGE_BUCKETS_GET = 'storage.buckets.get' +STORAGE_BUCKETS_GET = "storage.buckets.get" """Permission: read bucket metadata, excluding ACLs.""" -STORAGE_BUCKETS_GET_IAM_POLICY = 'storage.buckets.getIamPolicy' +STORAGE_BUCKETS_GET_IAM_POLICY = "storage.buckets.getIamPolicy" """Permission: read bucket ACLs.""" -STORAGE_BUCKETS_LIST = 'storage.buckets.list' +STORAGE_BUCKETS_LIST = "storage.buckets.list" """Permission: list buckets.""" -STORAGE_BUCKETS_SET_IAM_POLICY = 'storage.buckets.setIamPolicy' +STORAGE_BUCKETS_SET_IAM_POLICY = "storage.buckets.setIamPolicy" """Permission: update bucket ACLs.""" -STORAGE_BUCKETS_UPDATE = 'storage.buckets.list' +STORAGE_BUCKETS_UPDATE = "storage.buckets.list" """Permission: update buckets, excluding ACLS.""" -STORAGE_OBJECTS_CREATE = 'storage.objects.create' +STORAGE_OBJECTS_CREATE = "storage.objects.create" """Permission: add new objects to a bucket.""" -STORAGE_OBJECTS_DELETE = 'storage.objects.delete' +STORAGE_OBJECTS_DELETE = "storage.objects.delete" """Permission: delete objects.""" -STORAGE_OBJECTS_GET = 'storage.objects.get' +STORAGE_OBJECTS_GET = "storage.objects.get" """Permission: read object data / metadata, excluding ACLs.""" -STORAGE_OBJECTS_GET_IAM_POLICY = 'storage.objects.getIamPolicy' +STORAGE_OBJECTS_GET_IAM_POLICY = "storage.objects.getIamPolicy" """Permission: read object ACLs.""" -STORAGE_OBJECTS_LIST = 'storage.objects.list' +STORAGE_OBJECTS_LIST = "storage.objects.list" """Permission: list objects in a bucket.""" -STORAGE_OBJECTS_SET_IAM_POLICY = 'storage.objects.setIamPolicy' +STORAGE_OBJECTS_SET_IAM_POLICY = "storage.objects.setIamPolicy" """Permission: update object ACLs.""" -STORAGE_OBJECTS_UPDATE = 'storage.objects.update' +STORAGE_OBJECTS_UPDATE = "storage.objects.update" """Permission: update object metadat, excluding ACLs.""" diff --git a/storage/google/cloud/storage/notification.py b/storage/google/cloud/storage/notification.py index 7ac6c9aaf1af..982dc16c04d6 100644 --- a/storage/google/cloud/storage/notification.py +++ b/storage/google/cloud/storage/notification.py @@ -19,24 +19,24 @@ from google.api_core.exceptions import NotFound -OBJECT_FINALIZE_EVENT_TYPE = 'OBJECT_FINALIZE' -OBJECT_METADATA_UPDATE_EVENT_TYPE = 'OBJECT_METADATA_UPDATE' -OBJECT_DELETE_EVENT_TYPE = 'OBJECT_DELETE' -OBJECT_ARCHIVE_EVENT_TYPE = 'OBJECT_ARCHIVE' - -JSON_API_V1_PAYLOAD_FORMAT = 'JSON_API_V1' -NONE_PAYLOAD_FORMAT = 'NONE' - -_TOPIC_REF_FMT = '//pubsub.googleapis.com/projects/{}/topics/{}' -_PROJECT_PATTERN = r'(?P[a-z][a-z0-9-]{4,28}[a-z0-9])' -_TOPIC_NAME_PATTERN = r'(?P[A-Za-z](\w|[-_.~+%])+)' -_TOPIC_REF_PATTERN = _TOPIC_REF_FMT.format( - _PROJECT_PATTERN, _TOPIC_NAME_PATTERN) +OBJECT_FINALIZE_EVENT_TYPE = "OBJECT_FINALIZE" +OBJECT_METADATA_UPDATE_EVENT_TYPE = "OBJECT_METADATA_UPDATE" +OBJECT_DELETE_EVENT_TYPE = "OBJECT_DELETE" +OBJECT_ARCHIVE_EVENT_TYPE = "OBJECT_ARCHIVE" + +JSON_API_V1_PAYLOAD_FORMAT = "JSON_API_V1" +NONE_PAYLOAD_FORMAT = "NONE" + +_TOPIC_REF_FMT = "//pubsub.googleapis.com/projects/{}/topics/{}" +_PROJECT_PATTERN = r"(?P[a-z][a-z0-9-]{4,28}[a-z0-9])" +_TOPIC_NAME_PATTERN = r"(?P[A-Za-z](\w|[-_.~+%])+)" +_TOPIC_REF_PATTERN = _TOPIC_REF_FMT.format(_PROJECT_PATTERN, _TOPIC_NAME_PATTERN) _TOPIC_REF_RE = re.compile(_TOPIC_REF_PATTERN) _BAD_TOPIC = ( - 'Resource has invalid topic: {}; see ' - 'https://cloud.google.com/storage/docs/json_api/v1/' - 'notifications/insert#topic') + "Resource has invalid topic: {}; see " + "https://cloud.google.com/storage/docs/json_api/v1/" + "notifications/insert#topic" +) class BucketNotification(object): @@ -72,9 +72,17 @@ class BucketNotification(object): :param payload_format: (Optional) format of payload for notification events. """ - def __init__(self, bucket, topic_name, - topic_project=None, custom_attributes=None, event_types=None, - blob_name_prefix=None, payload_format=NONE_PAYLOAD_FORMAT): + + def __init__( + self, + bucket, + topic_name, + topic_project=None, + custom_attributes=None, + event_types=None, + blob_name_prefix=None, + payload_format=NONE_PAYLOAD_FORMAT, + ): self._bucket = bucket self._topic_name = topic_name @@ -82,23 +90,22 @@ def __init__(self, bucket, topic_name, topic_project = bucket.client.project if topic_project is None: - raise ValueError( - "Client project not set: pass an explicit topic_project.") + raise ValueError("Client project not set: pass an explicit topic_project.") self._topic_project = topic_project self._properties = {} if custom_attributes is not None: - self._properties['custom_attributes'] = custom_attributes + self._properties["custom_attributes"] = custom_attributes if event_types is not None: - self._properties['event_types'] = event_types + self._properties["event_types"] = event_types if blob_name_prefix is not None: - self._properties['object_name_prefix'] = blob_name_prefix + self._properties["object_name_prefix"] = blob_name_prefix - self._properties['payload_format'] = payload_format + self._properties["payload_format"] = payload_format @classmethod def from_api_repr(cls, resource, bucket): @@ -115,9 +122,9 @@ def from_api_repr(cls, resource, bucket): :rtype: :class:`BucketNotification` :returns: the new notification instance """ - topic_path = resource.get('topic') + topic_path = resource.get("topic") if topic_path is None: - raise ValueError('Resource has no topic') + raise ValueError("Resource has no topic") name, project = _parse_topic_path(topic_path) instance = cls(bucket, name, topic_project=project) @@ -145,39 +152,39 @@ def topic_project(self): def custom_attributes(self): """Custom attributes passed with notification events. """ - return self._properties.get('custom_attributes') + return self._properties.get("custom_attributes") @property def event_types(self): """Event types for which notification events are published. """ - return self._properties.get('event_types') + return self._properties.get("event_types") @property def blob_name_prefix(self): """Prefix of blob names for which notification events are published. """ - return self._properties.get('object_name_prefix') + return self._properties.get("object_name_prefix") @property def payload_format(self): """Format of payload of notification events.""" - return self._properties.get('payload_format') + return self._properties.get("payload_format") @property def notification_id(self): """Server-set ID of notification resource.""" - return self._properties.get('id') + return self._properties.get("id") @property def etag(self): """Server-set ETag of notification resource.""" - return self._properties.get('etag') + return self._properties.get("etag") @property def self_link(self): """Server-set ETag of notification resource.""" - return self._properties.get('selfLink') + return self._properties.get("selfLink") @property def client(self): @@ -187,8 +194,9 @@ def client(self): @property def path(self): """The URL path for this notification.""" - return '/b/{}/notificationConfigs/{}'.format( - self.bucket.name, self.notification_id) + return "/b/{}/notificationConfigs/{}".format( + self.bucket.name, self.notification_id + ) def _require_client(self, client): """Check client or verify over-ride. @@ -227,24 +235,21 @@ def create(self, client=None): to the ``client`` stored on the notification's bucket. """ if self.notification_id is not None: - raise ValueError("Notification already exists w/ id: {}".format( - self.notification_id)) + raise ValueError( + "Notification already exists w/ id: {}".format(self.notification_id) + ) client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: - query_params['userProject'] = self.bucket.user_project + query_params["userProject"] = self.bucket.user_project - path = '/b/{}/notificationConfigs'.format(self.bucket.name) + path = "/b/{}/notificationConfigs".format(self.bucket.name) properties = self._properties.copy() - properties['topic'] = _TOPIC_REF_FMT.format( - self.topic_project, self.topic_name) + properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name) self._properties = client._connection.api_request( - method='POST', - path=path, - query_params=query_params, - data=properties, + method="POST", path=path, query_params=query_params, data=properties ) def exists(self, client=None): @@ -272,13 +277,11 @@ def exists(self, client=None): query_params = {} if self.bucket.user_project is not None: - query_params['userProject'] = self.bucket.user_project + query_params["userProject"] = self.bucket.user_project try: client._connection.api_request( - method='GET', - path=self.path, - query_params=query_params, + method="GET", path=self.path, query_params=query_params ) except NotFound: return False @@ -310,12 +313,10 @@ def reload(self, client=None): query_params = {} if self.bucket.user_project is not None: - query_params['userProject'] = self.bucket.user_project + query_params["userProject"] = self.bucket.user_project response = client._connection.api_request( - method='GET', - path=self.path, - query_params=query_params, + method="GET", path=self.path, query_params=query_params ) self._set_properties(response) @@ -344,12 +345,10 @@ def delete(self, client=None): query_params = {} if self.bucket.user_project is not None: - query_params['userProject'] = self.bucket.user_project + query_params["userProject"] = self.bucket.user_project client._connection.api_request( - method='DELETE', - path=self.path, - query_params=query_params, + method="DELETE", path=self.path, query_params=query_params ) @@ -387,4 +386,4 @@ def _parse_topic_path(topic_path): if match is None: raise ValueError(_BAD_TOPIC.format(topic_path)) - return match.group('name'), match.group('project') + return match.group("name"), match.group("project") diff --git a/storage/tests/system.py b/storage/tests/system.py index 2ef22d3a49f2..b6b108e1d8d9 100644 --- a/storage/tests/system.py +++ b/storage/tests/system.py @@ -32,22 +32,19 @@ from test_utils.system import unique_resource_id -USER_PROJECT = os.environ.get('GOOGLE_CLOUD_TESTS_USER_PROJECT') -RUNNING_IN_VPCSC = os.getenv( - 'GOOGLE_CLOUD_TESTS_IN_VPCSC', '').lower() == 'true' +USER_PROJECT = os.environ.get("GOOGLE_CLOUD_TESTS_USER_PROJECT") +RUNNING_IN_VPCSC = os.getenv("GOOGLE_CLOUD_TESTS_IN_VPCSC", "").lower() == "true" def _bad_copy(bad_request): """Predicate: pass only exceptions for a failed copyTo.""" err_msg = bad_request.message - return (err_msg.startswith('No file found in request. (POST') and 'copyTo' in err_msg) + return err_msg.startswith("No file found in request. (POST") and "copyTo" in err_msg retry_429 = RetryErrors(exceptions.TooManyRequests) -retry_429_503 = RetryErrors([ - exceptions.TooManyRequests, exceptions.ServiceUnavailable]) -retry_bad_copy = RetryErrors(exceptions.BadRequest, - error_predicate=_bad_copy) +retry_429_503 = RetryErrors([exceptions.TooManyRequests, exceptions.ServiceUnavailable]) +retry_bad_copy = RetryErrors(exceptions.BadRequest, error_predicate=_bad_copy) def _empty_bucket(bucket): @@ -69,13 +66,14 @@ class Config(object): This is a mutable stand-in to allow test set-up to modify global state. """ + CLIENT = None TEST_BUCKET = None def setUpModule(): Config.CLIENT = storage.Client() - bucket_name = 'new' + unique_resource_id() + bucket_name = "new" + unique_resource_id() # In the **very** rare case the bucket name is reserved, this # fails with a ConnectionError. Config.TEST_BUCKET = Config.CLIENT.bucket(bucket_name) @@ -89,26 +87,20 @@ def tearDownModule(): class TestClient(unittest.TestCase): - def test_get_service_account_email(self): - domain = 'gs-project-accounts.iam.gserviceaccount.com' + domain = "gs-project-accounts.iam.gserviceaccount.com" email = Config.CLIENT.get_service_account_email() - new_style = re.compile( - r'service-(?P[^@]+)@' + domain) - old_style = re.compile( - r'{}@{}'.format(Config.CLIENT.project, domain)) + new_style = re.compile(r"service-(?P[^@]+)@" + domain) + old_style = re.compile(r"{}@{}".format(Config.CLIENT.project, domain)) patterns = [new_style, old_style] matches = [pattern.match(email) for pattern in patterns] - self.assertTrue(any( - match for match in matches if match is not None - )) + self.assertTrue(any(match for match in matches if match is not None)) class TestStorageBuckets(unittest.TestCase): - def setUp(self): self.case_buckets_to_delete = [] @@ -118,30 +110,33 @@ def tearDown(self): retry_429(bucket.delete)() def test_create_bucket(self): - new_bucket_name = 'a-new-bucket' + unique_resource_id('-') - self.assertRaises(exceptions.NotFound, - Config.CLIENT.get_bucket, new_bucket_name) + new_bucket_name = "a-new-bucket" + unique_resource_id("-") + self.assertRaises( + exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name + ) created = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) def test_lifecycle_rules(self): - new_bucket_name = 'w-lifcycle-rules' + unique_resource_id('-') - self.assertRaises(exceptions.NotFound, - Config.CLIENT.get_bucket, new_bucket_name) + new_bucket_name = "w-lifcycle-rules" + unique_resource_id("-") + self.assertRaises( + exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name + ) bucket = Config.CLIENT.bucket(new_bucket_name) bucket.add_lifecycle_delete_rule(age=42) bucket.add_lifecycle_set_storage_class_rule( - 'COLDLINE', is_live=False, matches_storage_class=['NEARLINE']) + "COLDLINE", is_live=False, matches_storage_class=["NEARLINE"] + ) expected_rules = [ LifecycleRuleDelete(age=42), LifecycleRuleSetStorageClass( - 'COLDLINE', - is_live=False, matches_storage_class=['NEARLINE']), + "COLDLINE", is_live=False, matches_storage_class=["NEARLINE"] + ), ] - retry_429(bucket.create)(location='us') + retry_429(bucket.create)(location="us") self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(bucket.name, new_bucket_name) @@ -154,9 +149,9 @@ def test_lifecycle_rules(self): def test_list_buckets(self): buckets_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), + "new" + unique_resource_id(), + "newer" + unique_resource_id(), + "newest" + unique_resource_id(), ] created_buckets = [] for bucket_name in buckets_to_create: @@ -166,22 +161,23 @@ def test_list_buckets(self): # Retrieve the buckets. all_buckets = Config.CLIENT.list_buckets() - created_buckets = [bucket for bucket in all_buckets - if bucket.name in buckets_to_create] + created_buckets = [ + bucket for bucket in all_buckets if bucket.name in buckets_to_create + ] self.assertEqual(len(created_buckets), len(buckets_to_create)) def test_bucket_update_labels(self): - bucket_name = 'update-labels' + unique_resource_id('-') + bucket_name = "update-labels" + unique_resource_id("-") bucket = retry_429(Config.CLIENT.create_bucket)(bucket_name) self.case_buckets_to_delete.append(bucket_name) self.assertTrue(bucket.exists()) - updated_labels = {'test-label': 'label-value'} + updated_labels = {"test-label": "label-value"} bucket.labels = updated_labels bucket.update() self.assertEqual(bucket.labels, updated_labels) - new_labels = {'another-label': 'another-value'} + new_labels = {"another-label": "another-value"} bucket.labels = new_labels bucket.patch() self.assertEqual(bucket.labels, new_labels) @@ -190,17 +186,19 @@ def test_bucket_update_labels(self): bucket.update() self.assertEqual(bucket.labels, {}) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_crud_bucket_with_requester_pays(self): - new_bucket_name = 'w-requester-pays' + unique_resource_id('-') + new_bucket_name = "w-requester-pays" + unique_resource_id("-") created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) self.assertTrue(created.requester_pays) with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) # Bucket will be deleted in-line below. self.case_buckets_to_delete.remove(new_bucket_name) @@ -213,16 +211,16 @@ def test_crud_bucket_with_requester_pays(self): # Exercise 'buckets.patch' w/ userProject. with_user_project.configure_website( - main_page_suffix='index.html', not_found_page='404.html') + main_page_suffix="index.html", not_found_page="404.html" + ) with_user_project.patch() self.assertEqual( - with_user_project._properties['website'], { - 'mainPageSuffix': 'index.html', - 'notFoundPage': '404.html', - }) + with_user_project._properties["website"], + {"mainPageSuffix": "index.html", "notFoundPage": "404.html"}, + ) # Exercise 'buckets.update' w/ userProject. - new_labels = {'another-label': 'another-value'} + new_labels = {"another-label": "another-value"} with_user_project.labels = new_labels with_user_project.update() self.assertEqual(with_user_project.labels, new_labels) @@ -231,63 +229,66 @@ def test_crud_bucket_with_requester_pays(self): # Exercise 'buckets.delete' w/ userProject. with_user_project.delete() - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_bucket_acls_iam_with_user_project(self): - new_bucket_name = 'acl-w-user-project' + unique_resource_id('-') - retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name = "acl-w-user-project" + unique_resource_id("-") + retry_429(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) self.case_buckets_to_delete.append(new_bucket_name) with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) # Exercise bucket ACL w/ userProject acl = with_user_project.acl acl.reload() acl.all().grant_read() acl.save() - self.assertIn('READER', acl.all().get_roles()) - del acl.entities['allUsers'] + self.assertIn("READER", acl.all().get_roles()) + del acl.entities["allUsers"] acl.save() - self.assertFalse(acl.has_entity('allUsers')) + self.assertFalse(acl.has_entity("allUsers")) # Exercise default object ACL w/ userProject doa = with_user_project.default_object_acl doa.reload() doa.all().grant_read() doa.save() - self.assertIn('READER', doa.all().get_roles()) + self.assertIn("READER", doa.all().get_roles()) # Exercise IAM w/ userProject - test_permissions = ['storage.buckets.get'] + test_permissions = ["storage.buckets.get"] self.assertEqual( - with_user_project.test_iam_permissions(test_permissions), - test_permissions) + with_user_project.test_iam_permissions(test_permissions), test_permissions + ) policy = with_user_project.get_iam_policy() - viewers = policy.setdefault('roles/storage.objectViewer', set()) + viewers = policy.setdefault("roles/storage.objectViewer", set()) viewers.add(policy.all_users()) with_user_project.set_iam_policy(policy) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_copy_existing_file_with_user_project(self): - new_bucket_name = 'copy-w-requester-pays' + unique_resource_id('-') + new_bucket_name = "copy-w-requester-pays" + unique_resource_id("-") created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) self.assertTrue(created.requester_pays) to_delete = [] - blob = storage.Blob('simple', bucket=created) - blob.upload_from_string(b'DEADBEEF') + blob = storage.Blob("simple", bucket=created) + blob.upload_from_string(b"DEADBEEF") to_delete.append(blob) try: with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) new_blob = retry_bad_copy(with_user_project.copy_blob)( - blob, with_user_project, 'simple-copy') + blob, with_user_project, "simple-copy" + ) to_delete.append(new_blob) base_contents = blob.download_as_string() @@ -297,24 +298,26 @@ def test_copy_existing_file_with_user_project(self): for blob in to_delete: retry_429(blob.delete)() - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_bucket_get_blob_with_user_project(self): - new_bucket_name = 'w-requester-pays' + unique_resource_id('-') - data = b'DEADBEEF' + new_bucket_name = "w-requester-pays" + unique_resource_id("-") + data = b"DEADBEEF" created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) self.assertTrue(created.requester_pays) with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) - self.assertIsNone(with_user_project.get_blob('nonesuch')) - to_add = created.blob('blob-name') + self.assertIsNone(with_user_project.get_blob("nonesuch")) + to_add = created.blob("blob-name") to_add.upload_from_string(data) try: - found = with_user_project.get_blob('blob-name') + found = with_user_project.get_blob("blob-name") self.assertEqual(found.download_as_string(), data) finally: to_add.delete() @@ -324,101 +327,92 @@ class TestStorageFiles(unittest.TestCase): DIRNAME = os.path.realpath(os.path.dirname(__file__)) FILES = { - 'logo': { - 'path': DIRNAME + '/data/CloudPlatform_128px_Retina.png', - }, - 'big': { - 'path': DIRNAME + '/data/five-point-one-mb-file.zip', - }, - 'simple': { - 'path': DIRNAME + '/data/simple.txt', - } + "logo": {"path": DIRNAME + "/data/CloudPlatform_128px_Retina.png"}, + "big": {"path": DIRNAME + "/data/five-point-one-mb-file.zip"}, + "simple": {"path": DIRNAME + "/data/simple.txt"}, } @classmethod def setUpClass(cls): super(TestStorageFiles, cls).setUpClass() for file_data in cls.FILES.values(): - with open(file_data['path'], 'rb') as file_obj: - file_data['hash'] = _base64_md5hash(file_obj) + with open(file_data["path"], "rb") as file_obj: + file_data["hash"] = _base64_md5hash(file_obj) cls.bucket = Config.TEST_BUCKET def setUp(self): self.case_blobs_to_delete = [] def tearDown(self): - errors = ( - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - ) + errors = (exceptions.TooManyRequests, exceptions.ServiceUnavailable) retry = RetryErrors(errors, max_tries=6) for blob in self.case_blobs_to_delete: retry(blob.delete)() class TestStorageWriteFiles(TestStorageFiles): - ENCRYPTION_KEY = 'b23ff11bba187db8c37077e6af3b25b8' + ENCRYPTION_KEY = "b23ff11bba187db8c37077e6af3b25b8" def test_large_file_write_from_stream(self): - blob = self.bucket.blob('LargeFile') + blob = self.bucket.blob("LargeFile") - file_data = self.FILES['big'] - with open(file_data['path'], 'rb') as file_obj: + file_data = self.FILES["big"] + with open(file_data["path"], "rb") as file_obj: blob.upload_from_file(file_obj) self.case_blobs_to_delete.append(blob) md5_hash = blob.md5_hash if not isinstance(md5_hash, six.binary_type): - md5_hash = md5_hash.encode('utf-8') - self.assertEqual(md5_hash, file_data['hash']) + md5_hash = md5_hash.encode("utf-8") + self.assertEqual(md5_hash, file_data["hash"]) def test_large_encrypted_file_write_from_stream(self): - blob = self.bucket.blob('LargeFile', - encryption_key=self.ENCRYPTION_KEY) + blob = self.bucket.blob("LargeFile", encryption_key=self.ENCRYPTION_KEY) - file_data = self.FILES['big'] - with open(file_data['path'], 'rb') as file_obj: + file_data = self.FILES["big"] + with open(file_data["path"], "rb") as file_obj: blob.upload_from_file(file_obj) self.case_blobs_to_delete.append(blob) md5_hash = blob.md5_hash if not isinstance(md5_hash, six.binary_type): - md5_hash = md5_hash.encode('utf-8') - self.assertEqual(md5_hash, file_data['hash']) + md5_hash = md5_hash.encode("utf-8") + self.assertEqual(md5_hash, file_data["hash"]) temp_filename = tempfile.mktemp() - with open(temp_filename, 'wb') as file_obj: + with open(temp_filename, "wb") as file_obj: blob.download_to_file(file_obj) - with open(temp_filename, 'rb') as file_obj: + with open(temp_filename, "rb") as file_obj: md5_temp_hash = _base64_md5hash(file_obj) - self.assertEqual(md5_temp_hash, file_data['hash']) + self.assertEqual(md5_temp_hash, file_data["hash"]) def test_small_file_write_from_filename(self): - blob = self.bucket.blob('SmallFile') + blob = self.bucket.blob("SmallFile") - file_data = self.FILES['simple'] - blob.upload_from_filename(file_data['path']) + file_data = self.FILES["simple"] + blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(blob) md5_hash = blob.md5_hash if not isinstance(md5_hash, six.binary_type): - md5_hash = md5_hash.encode('utf-8') - self.assertEqual(md5_hash, file_data['hash']) + md5_hash = md5_hash.encode("utf-8") + self.assertEqual(md5_hash, file_data["hash"]) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_crud_blob_w_user_project(self): with_user_project = Config.CLIENT.bucket( - self.bucket.name, user_project=USER_PROJECT) - blob = with_user_project.blob('SmallFile') + self.bucket.name, user_project=USER_PROJECT + ) + blob = with_user_project.blob("SmallFile") - file_data = self.FILES['simple'] - with open(file_data['path'], mode='rb') as to_read: + file_data = self.FILES["simple"] + with open(file_data["path"], mode="rb") as to_read: file_contents = to_read.read() # Exercise 'objects.insert' w/ userProject. - blob.upload_from_filename(file_data['path']) + blob.upload_from_filename(file_data["path"]) try: # Exercise 'objects.get' (metadata) w/ userProject. @@ -430,15 +424,12 @@ def test_crud_blob_w_user_project(self): self.assertEqual(downloaded, file_contents) # Exercise 'objects.patch' w/ userProject. - blob.content_language = 'en' + blob.content_language = "en" blob.patch() - self.assertEqual(blob.content_language, 'en') + self.assertEqual(blob.content_language, "en") # Exercise 'objects.update' w/ userProject. - metadata = { - 'foo': 'Foo', - 'bar': 'Bar', - } + metadata = {"foo": "Foo", "bar": "Bar"} blob.metadata = metadata blob.update() self.assertEqual(blob.metadata, metadata) @@ -446,15 +437,16 @@ def test_crud_blob_w_user_project(self): # Exercise 'objects.delete' (metadata) w/ userProject. blob.delete() - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_blob_acl_w_user_project(self): with_user_project = Config.CLIENT.bucket( - self.bucket.name, user_project=USER_PROJECT) - blob = with_user_project.blob('SmallFile') + self.bucket.name, user_project=USER_PROJECT + ) + blob = with_user_project.blob("SmallFile") - file_data = self.FILES['simple'] + file_data = self.FILES["simple"] - blob.upload_from_filename(file_data['path']) + blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(blob) # Exercise bucket ACL w/ userProject @@ -462,36 +454,35 @@ def test_blob_acl_w_user_project(self): acl.reload() acl.all().grant_read() acl.save() - self.assertIn('READER', acl.all().get_roles()) - del acl.entities['allUsers'] + self.assertIn("READER", acl.all().get_roles()) + del acl.entities["allUsers"] acl.save() - self.assertFalse(acl.has_entity('allUsers')) + self.assertFalse(acl.has_entity("allUsers")) def test_upload_blob_acl(self): - control = self.bucket.blob('logo') - control_data = self.FILES['logo'] + control = self.bucket.blob("logo") + control_data = self.FILES["logo"] - blob = self.bucket.blob('SmallFile') - file_data = self.FILES['simple'] + blob = self.bucket.blob("SmallFile") + file_data = self.FILES["simple"] try: - control.upload_from_filename(control_data['path']) - blob.upload_from_filename(file_data['path'], - predefined_acl='publicRead') + control.upload_from_filename(control_data["path"]) + blob.upload_from_filename(file_data["path"], predefined_acl="publicRead") finally: self.case_blobs_to_delete.append(blob) self.case_blobs_to_delete.append(control) control_acl = control.acl - self.assertNotIn('READER', control_acl.all().get_roles()) + self.assertNotIn("READER", control_acl.all().get_roles()) acl = blob.acl - self.assertIn('READER', acl.all().get_roles()) + self.assertIn("READER", acl.all().get_roles()) acl.all().revoke_read() self.assertSequenceEqual(acl.all().get_roles(), set([])) self.assertEqual(control_acl.all().get_roles(), acl.all().get_roles()) def test_write_metadata(self): - filename = self.FILES['logo']['path'] + filename = self.FILES["logo"]["path"] blob_name = os.path.basename(filename) blob = storage.Blob(blob_name, bucket=self.bucket) @@ -500,34 +491,35 @@ def test_write_metadata(self): # NOTE: This should not be necessary. We should be able to pass # it in to upload_file and also to upload_from_string. - blob.content_type = 'image/png' - self.assertEqual(blob.content_type, 'image/png') + blob.content_type = "image/png" + self.assertEqual(blob.content_type, "image/png") def test_direct_write_and_read_into_file(self): - blob = self.bucket.blob('MyBuffer') - file_contents = b'Hello World' + blob = self.bucket.blob("MyBuffer") + file_contents = b"Hello World" blob.upload_from_string(file_contents) self.case_blobs_to_delete.append(blob) - same_blob = self.bucket.blob('MyBuffer') + same_blob = self.bucket.blob("MyBuffer") same_blob.reload() # Initialize properties. temp_filename = tempfile.mktemp() - with open(temp_filename, 'wb') as file_obj: + with open(temp_filename, "wb") as file_obj: same_blob.download_to_file(file_obj) - with open(temp_filename, 'rb') as file_obj: + with open(temp_filename, "rb") as file_obj: stored_contents = file_obj.read() self.assertEqual(file_contents, stored_contents) def test_copy_existing_file(self): - filename = self.FILES['logo']['path'] - blob = storage.Blob('CloudLogo', bucket=self.bucket) + filename = self.FILES["logo"]["path"] + blob = storage.Blob("CloudLogo", bucket=self.bucket) blob.upload_from_filename(filename) self.case_blobs_to_delete.append(blob) new_blob = retry_bad_copy(self.bucket.copy_blob)( - blob, self.bucket, 'CloudLogoCopy') + blob, self.bucket, "CloudLogoCopy" + ) self.case_blobs_to_delete.append(new_blob) base_contents = blob.download_as_string() @@ -536,11 +528,10 @@ def test_copy_existing_file(self): class TestUnicode(unittest.TestCase): - - @unittest.skipIf(RUNNING_IN_VPCSC, 'Test is not VPCSC compatible.') + @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") def test_fetch_object_and_check_content(self): client = storage.Client() - bucket = client.bucket('storage-library-test-bucket') + bucket = client.bucket("storage-library-test-bucket") # Note: These files are public. # Normalization form C: a single character for e-acute; @@ -548,8 +539,8 @@ def test_fetch_object_and_check_content(self): # Normalization Form D: an ASCII e followed by U+0301 combining # character; URL should end with Caf%C3%A9 test_data = { - u'Caf\u00e9': b'Normalization Form C', - u'Cafe\u0301': b'Normalization Form D', + u"Caf\u00e9": b"Normalization Form C", + u"Cafe\u0301": b"Normalization Form D", } for blob_name, file_contents in test_data.items(): blob = bucket.blob(blob_name) @@ -559,7 +550,7 @@ def test_fetch_object_and_check_content(self): class TestStorageListFiles(TestStorageFiles): - FILENAMES = ('CloudLogo1', 'CloudLogo2', 'CloudLogo3') + FILENAMES = ("CloudLogo1", "CloudLogo2", "CloudLogo3") @classmethod def setUpClass(cls): @@ -567,23 +558,19 @@ def setUpClass(cls): # Make sure bucket empty before beginning. _empty_bucket(cls.bucket) - logo_path = cls.FILES['logo']['path'] + logo_path = cls.FILES["logo"]["path"] blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket) blob.upload_from_filename(logo_path) cls.suite_blobs_to_delete = [blob] # Copy main blob onto remaining in FILENAMES. for filename in cls.FILENAMES[1:]: - new_blob = retry_bad_copy(cls.bucket.copy_blob)( - blob, cls.bucket, filename) + new_blob = retry_bad_copy(cls.bucket.copy_blob)(blob, cls.bucket, filename) cls.suite_blobs_to_delete.append(new_blob) @classmethod def tearDownClass(cls): - errors = ( - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - ) + errors = (exceptions.TooManyRequests, exceptions.ServiceUnavailable) retry = RetryErrors(errors, max_tries=6) for blob in cls.suite_blobs_to_delete: retry(blob.delete)() @@ -591,17 +578,20 @@ def tearDownClass(cls): @RetryErrors(unittest.TestCase.failureException) def test_list_files(self): all_blobs = list(self.bucket.list_blobs()) - self.assertEqual(sorted(blob.name for blob in all_blobs), - sorted(self.FILENAMES)) + self.assertEqual( + sorted(blob.name for blob in all_blobs), sorted(self.FILENAMES) + ) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") @RetryErrors(unittest.TestCase.failureException) def test_list_files_with_user_project(self): with_user_project = Config.CLIENT.bucket( - self.bucket.name, user_project=USER_PROJECT) + self.bucket.name, user_project=USER_PROJECT + ) all_blobs = list(with_user_project.list_blobs()) - self.assertEqual(sorted(blob.name for blob in all_blobs), - sorted(self.FILENAMES)) + self.assertEqual( + sorted(blob.name for blob in all_blobs), sorted(self.FILENAMES) + ) @RetryErrors(unittest.TestCase.failureException) def test_paginate_files(self): @@ -628,12 +618,12 @@ def test_paginate_files(self): class TestStoragePseudoHierarchy(TestStorageFiles): FILENAMES = ( - 'file01.txt', - 'parent/file11.txt', - 'parent/child/file21.txt', - 'parent/child/file22.txt', - 'parent/child/grand/file31.txt', - 'parent/child/other/file32.txt', + "file01.txt", + "parent/file11.txt", + "parent/child/file21.txt", + "parent/child/file22.txt", + "parent/child/grand/file31.txt", + "parent/child/other/file32.txt", ) @classmethod @@ -643,7 +633,7 @@ def setUpClass(cls): _empty_bucket(cls.bucket) cls.suite_blobs_to_delete = [] - simple_path = cls.FILES['simple']['path'] + simple_path = cls.FILES["simple"]["path"] for filename in cls.FILENAMES: blob = storage.Blob(filename, bucket=cls.bucket) blob.upload_from_filename(simple_path) @@ -651,10 +641,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - errors = ( - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - ) + errors = (exceptions.TooManyRequests, exceptions.ServiceUnavailable) retry = RetryErrors(errors, max_tries=6) for blob in cls.suite_blobs_to_delete: retry(blob.delete)() @@ -667,38 +654,34 @@ def test_blob_get_w_delimiter(self): @RetryErrors(unittest.TestCase.failureException) def test_root_level_w_delimiter(self): - iterator = self.bucket.list_blobs(delimiter='/') + iterator = self.bucket.list_blobs(delimiter="/") page = six.next(iterator.pages) blobs = list(page) - self.assertEqual([blob.name for blob in blobs], ['file01.txt']) + self.assertEqual([blob.name for blob in blobs], ["file01.txt"]) self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.prefixes, set(['parent/'])) + self.assertEqual(iterator.prefixes, set(["parent/"])) @RetryErrors(unittest.TestCase.failureException) def test_first_level(self): - iterator = self.bucket.list_blobs(delimiter='/', prefix='parent/') + iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/") page = six.next(iterator.pages) blobs = list(page) - self.assertEqual([blob.name for blob in blobs], ['parent/file11.txt']) + self.assertEqual([blob.name for blob in blobs], ["parent/file11.txt"]) self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.prefixes, set(['parent/child/'])) + self.assertEqual(iterator.prefixes, set(["parent/child/"])) @RetryErrors(unittest.TestCase.failureException) def test_second_level(self): - expected_names = [ - 'parent/child/file21.txt', - 'parent/child/file22.txt', - ] + expected_names = ["parent/child/file21.txt", "parent/child/file22.txt"] - iterator = self.bucket.list_blobs(delimiter='/', - prefix='parent/child/') + iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/child/") page = six.next(iterator.pages) blobs = list(page) - self.assertEqual([blob.name for blob in blobs], - expected_names) + self.assertEqual([blob.name for blob in blobs], expected_names) self.assertIsNone(iterator.next_page_token) - self.assertEqual(iterator.prefixes, - set(['parent/child/grand/', 'parent/child/other/'])) + self.assertEqual( + iterator.prefixes, set(["parent/child/grand/", "parent/child/other/"]) + ) @RetryErrors(unittest.TestCase.failureException) def test_third_level(self): @@ -706,83 +689,82 @@ def test_third_level(self): # of 1024 characters in the UTF-8 encoded name: # https://cloud.google.com/storage/docs/bucketnaming#objectnames # Exercise a layer deeper to illustrate this. - iterator = self.bucket.list_blobs(delimiter='/', - prefix='parent/child/grand/') + iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/child/grand/") page = six.next(iterator.pages) blobs = list(page) - self.assertEqual([blob.name for blob in blobs], - ['parent/child/grand/file31.txt']) + self.assertEqual( + [blob.name for blob in blobs], ["parent/child/grand/file31.txt"] + ) self.assertIsNone(iterator.next_page_token) self.assertEqual(iterator.prefixes, set()) class TestStorageSignURLs(TestStorageFiles): - def setUp(self): super(TestStorageSignURLs, self).setUp() - logo_path = self.FILES['logo']['path'] - with open(logo_path, 'rb') as file_obj: + logo_path = self.FILES["logo"]["path"] + with open(logo_path, "rb") as file_obj: self.LOCAL_FILE = file_obj.read() - blob = self.bucket.blob('LogoToSign.jpg') + blob = self.bucket.blob("LogoToSign.jpg") blob.upload_from_string(self.LOCAL_FILE) self.case_blobs_to_delete.append(blob) def tearDown(self): - errors = ( - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - ) + errors = (exceptions.TooManyRequests, exceptions.ServiceUnavailable) retry = RetryErrors(errors, max_tries=6) for blob in self.case_blobs_to_delete: if blob.exists(): retry(blob.delete)() def test_create_signed_read_url(self): - blob = self.bucket.blob('LogoToSign.jpg') + blob = self.bucket.blob("LogoToSign.jpg") expiration = int(time.time() + 10) - signed_url = blob.generate_signed_url(expiration, method='GET', - client=Config.CLIENT) + signed_url = blob.generate_signed_url( + expiration, method="GET", client=Config.CLIENT + ) response = requests.get(signed_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, self.LOCAL_FILE) def test_create_signed_read_url_lowercase_method(self): - blob = self.bucket.blob('LogoToSign.jpg') + blob = self.bucket.blob("LogoToSign.jpg") expiration = int(time.time() + 10) - signed_url = blob.generate_signed_url(expiration, method='get', - client=Config.CLIENT) + signed_url = blob.generate_signed_url( + expiration, method="get", client=Config.CLIENT + ) response = requests.get(signed_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, self.LOCAL_FILE) def test_create_signed_read_url_w_non_ascii_name(self): - blob = self.bucket.blob(u'Caf\xe9.txt') - payload = b'Test signed URL for blob w/ non-ASCII name' + blob = self.bucket.blob(u"Caf\xe9.txt") + payload = b"Test signed URL for blob w/ non-ASCII name" blob.upload_from_string(payload) self.case_blobs_to_delete.append(blob) expiration = int(time.time() + 10) - signed_url = blob.generate_signed_url(expiration, method='GET', - client=Config.CLIENT) + signed_url = blob.generate_signed_url( + expiration, method="GET", client=Config.CLIENT + ) response = requests.get(signed_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, payload) def test_create_signed_delete_url(self): - blob = self.bucket.blob('LogoToSign.jpg') + blob = self.bucket.blob("LogoToSign.jpg") expiration = int(time.time() + 283473274) - signed_delete_url = blob.generate_signed_url(expiration, - method='DELETE', - client=Config.CLIENT) + signed_delete_url = blob.generate_signed_url( + expiration, method="DELETE", client=Config.CLIENT + ) - response = requests.request('DELETE', signed_delete_url) + response = requests.request("DELETE", signed_delete_url) self.assertEqual(response.status_code, 204) - self.assertEqual(response.content, b'') + self.assertEqual(response.content, b"") # Check that the blob has actually been deleted. self.assertFalse(blob.exists()) @@ -793,18 +775,18 @@ class TestStorageCompose(TestStorageFiles): FILES = {} def test_compose_create_new_blob(self): - SOURCE_1 = b'AAA\n' - source_1 = self.bucket.blob('source-1') + SOURCE_1 = b"AAA\n" + source_1 = self.bucket.blob("source-1") source_1.upload_from_string(SOURCE_1) self.case_blobs_to_delete.append(source_1) - SOURCE_2 = b'BBB\n' - source_2 = self.bucket.blob('source-2') + SOURCE_2 = b"BBB\n" + source_2 = self.bucket.blob("source-2") source_2.upload_from_string(SOURCE_2) self.case_blobs_to_delete.append(source_2) - destination = self.bucket.blob('destination') - destination.content_type = 'text/plain' + destination = self.bucket.blob("destination") + destination.content_type = "text/plain" destination.compose([source_1, source_2]) self.case_blobs_to_delete.append(destination) @@ -812,17 +794,17 @@ def test_compose_create_new_blob(self): self.assertEqual(composed, SOURCE_1 + SOURCE_2) def test_compose_create_new_blob_wo_content_type(self): - SOURCE_1 = b'AAA\n' - source_1 = self.bucket.blob('source-1') + SOURCE_1 = b"AAA\n" + source_1 = self.bucket.blob("source-1") source_1.upload_from_string(SOURCE_1) self.case_blobs_to_delete.append(source_1) - SOURCE_2 = b'BBB\n' - source_2 = self.bucket.blob('source-2') + SOURCE_2 = b"BBB\n" + source_2 = self.bucket.blob("source-2") source_2.upload_from_string(SOURCE_2) self.case_blobs_to_delete.append(source_2) - destination = self.bucket.blob('destination') + destination = self.bucket.blob("destination") destination.compose([source_1, source_2]) self.case_blobs_to_delete.append(destination) @@ -832,14 +814,14 @@ def test_compose_create_new_blob_wo_content_type(self): self.assertEqual(composed, SOURCE_1 + SOURCE_2) def test_compose_replace_existing_blob(self): - BEFORE = b'AAA\n' - original = self.bucket.blob('original') - original.content_type = 'text/plain' + BEFORE = b"AAA\n" + original = self.bucket.blob("original") + original.content_type = "text/plain" original.upload_from_string(BEFORE) self.case_blobs_to_delete.append(original) - TO_APPEND = b'BBB\n' - to_append = self.bucket.blob('to_append') + TO_APPEND = b"BBB\n" + to_append = self.bucket.blob("to_append") to_append.upload_from_string(TO_APPEND) self.case_blobs_to_delete.append(to_append) @@ -848,25 +830,27 @@ def test_compose_replace_existing_blob(self): composed = original.download_as_string() self.assertEqual(composed, BEFORE + TO_APPEND) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_compose_with_user_project(self): - new_bucket_name = 'compose-user-project' + unique_resource_id('-') + new_bucket_name = "compose-user-project" + unique_resource_id("-") created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) try: - SOURCE_1 = b'AAA\n' - source_1 = created.blob('source-1') + SOURCE_1 = b"AAA\n" + source_1 = created.blob("source-1") source_1.upload_from_string(SOURCE_1) - SOURCE_2 = b'BBB\n' - source_2 = created.blob('source-2') + SOURCE_2 = b"BBB\n" + source_2 = created.blob("source-2") source_2.upload_from_string(SOURCE_2) with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) - destination = with_user_project.blob('destination') - destination.content_type = 'text/plain' + destination = with_user_project.blob("destination") + destination.content_type = "text/plain" destination.compose([source_1, source_2]) composed = destination.download_as_string() @@ -877,20 +861,18 @@ def test_compose_with_user_project(self): class TestStorageRewrite(TestStorageFiles): - FILENAMES = ( - 'file01.txt', - ) + FILENAMES = ("file01.txt",) def test_rewrite_create_new_blob_add_encryption_key(self): - file_data = self.FILES['simple'] + file_data = self.FILES["simple"] - source = self.bucket.blob('source') - source.upload_from_filename(file_data['path']) + source = self.bucket.blob("source") + source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) source_data = source.download_as_string() KEY = os.urandom(32) - dest = self.bucket.blob('dest', encryption_key=KEY) + dest = self.bucket.blob("dest", encryption_key=KEY) token, rewritten, total = dest.rewrite(source) self.case_blobs_to_delete.append(dest) @@ -898,16 +880,15 @@ def test_rewrite_create_new_blob_add_encryption_key(self): self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(source.download_as_string(), - dest.download_as_string()) + self.assertEqual(source.download_as_string(), dest.download_as_string()) def test_rewrite_rotate_encryption_key(self): - BLOB_NAME = 'rotating-keys' - file_data = self.FILES['simple'] + BLOB_NAME = "rotating-keys" + file_data = self.FILES["simple"] SOURCE_KEY = os.urandom(32) source = self.bucket.blob(BLOB_NAME, encryption_key=SOURCE_KEY) - source.upload_from_filename(file_data['path']) + source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) source_data = source.download_as_string() @@ -923,48 +904,50 @@ def test_rewrite_rotate_encryption_key(self): self.assertEqual(dest.download_as_string(), source_data) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_rewrite_add_key_with_user_project(self): - file_data = self.FILES['simple'] - new_bucket_name = 'rewrite-key-up' + unique_resource_id('-') + file_data = self.FILES["simple"] + new_bucket_name = "rewrite-key-up" + unique_resource_id("-") created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) try: with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) - source = with_user_project.blob('source') - source.upload_from_filename(file_data['path']) + source = with_user_project.blob("source") + source.upload_from_filename(file_data["path"]) source_data = source.download_as_string() KEY = os.urandom(32) - dest = with_user_project.blob('dest', encryption_key=KEY) + dest = with_user_project.blob("dest", encryption_key=KEY) token, rewritten, total = dest.rewrite(source) self.assertEqual(token, None) self.assertEqual(rewritten, len(source_data)) self.assertEqual(total, len(source_data)) - self.assertEqual(source.download_as_string(), - dest.download_as_string()) + self.assertEqual(source.download_as_string(), dest.download_as_string()) finally: retry_429(created.delete)(force=True) - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_rewrite_rotate_with_user_project(self): - BLOB_NAME = 'rotating-keys' - file_data = self.FILES['simple'] - new_bucket_name = 'rewrite-rotate-up' + unique_resource_id('-') + BLOB_NAME = "rotating-keys" + file_data = self.FILES["simple"] + new_bucket_name = "rewrite-rotate-up" + unique_resource_id("-") created = retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name, requester_pays=True + ) try: with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) SOURCE_KEY = os.urandom(32) - source = with_user_project.blob( - BLOB_NAME, encryption_key=SOURCE_KEY) - source.upload_from_filename(file_data['path']) + source = with_user_project.blob(BLOB_NAME, encryption_key=SOURCE_KEY) + source.upload_from_filename(file_data["path"]) source_data = source.download_as_string() DEST_KEY = os.urandom(32) @@ -983,17 +966,13 @@ def test_rewrite_rotate_with_user_project(self): class TestStorageNotificationCRUD(unittest.TestCase): topic = None - TOPIC_NAME = 'notification' + unique_resource_id('-') - CUSTOM_ATTRIBUTES = { - 'attr1': 'value1', - 'attr2': 'value2', - } - BLOB_NAME_PREFIX = 'blob-name-prefix/' + TOPIC_NAME = "notification" + unique_resource_id("-") + CUSTOM_ATTRIBUTES = {"attr1": "value1", "attr2": "value2"} + BLOB_NAME_PREFIX = "blob-name-prefix/" @property def topic_path(self): - return 'projects/{}/topics/{}'.format( - Config.CLIENT.project, self.TOPIC_NAME) + return "projects/{}/topics/{}".format(Config.CLIENT.project, self.TOPIC_NAME) def _initialize_topic(self): try: @@ -1004,10 +983,10 @@ def _initialize_topic(self): retry_429(self.publisher_client.create_topic)(self.topic_path) policy = self.publisher_client.get_iam_policy(self.topic_path) binding = policy.bindings.add() - binding.role = 'roles/pubsub.publisher' + binding.role = "roles/pubsub.publisher" binding.members.append( - 'serviceAccount:{}'.format( - Config.CLIENT.get_service_account_email())) + "serviceAccount:{}".format(Config.CLIENT.get_service_account_email()) + ) self.publisher_client.set_iam_policy(self.topic_path, policy) def setUp(self): @@ -1025,19 +1004,19 @@ def tearDown(self): def event_types(): from google.cloud.storage.notification import ( OBJECT_FINALIZE_EVENT_TYPE, - OBJECT_DELETE_EVENT_TYPE) + OBJECT_DELETE_EVENT_TYPE, + ) return [OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE] @staticmethod def payload_format(): - from google.cloud.storage.notification import ( - JSON_API_V1_PAYLOAD_FORMAT) + from google.cloud.storage.notification import JSON_API_V1_PAYLOAD_FORMAT return JSON_API_V1_PAYLOAD_FORMAT def test_notification_minimal(self): - new_bucket_name = 'notification-minimal' + unique_resource_id('-') + new_bucket_name = "notification-minimal" + unique_resource_id("-") bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(list(bucket.list_notifications()), []) @@ -1053,7 +1032,7 @@ def test_notification_minimal(self): notification.delete() def test_notification_explicit(self): - new_bucket_name = 'notification-explicit' + unique_resource_id('-') + new_bucket_name = "notification-explicit" + unique_resource_id("-") bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) notification = bucket.notification( @@ -1067,24 +1046,21 @@ def test_notification_explicit(self): try: self.assertTrue(notification.exists()) self.assertIsNotNone(notification.notification_id) - self.assertEqual( - notification.custom_attributes, self.CUSTOM_ATTRIBUTES) + self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) - self.assertEqual( - notification.blob_name_prefix, self.BLOB_NAME_PREFIX) - self.assertEqual( - notification.payload_format, self.payload_format()) + self.assertEqual(notification.blob_name_prefix, self.BLOB_NAME_PREFIX) + self.assertEqual(notification.payload_format, self.payload_format()) finally: notification.delete() - @unittest.skipUnless(USER_PROJECT, 'USER_PROJECT not set in environment.') + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_notification_w_user_project(self): - new_bucket_name = 'notification-minimal' + unique_resource_id('-') - retry_429(Config.CLIENT.create_bucket)( - new_bucket_name, requester_pays=True) + new_bucket_name = "notification-minimal" + unique_resource_id("-") + retry_429(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) self.case_buckets_to_delete.append(new_bucket_name) with_user_project = Config.CLIENT.bucket( - new_bucket_name, user_project=USER_PROJECT) + new_bucket_name, user_project=USER_PROJECT + ) self.assertEqual(list(with_user_project.list_notifications()), []) notification = with_user_project.notification(self.TOPIC_NAME) retry_429(notification.create)() @@ -1100,9 +1076,9 @@ def test_notification_w_user_project(self): class TestAnonymousClient(unittest.TestCase): - PUBLIC_BUCKET = 'gcp-public-data-landsat' + PUBLIC_BUCKET = "gcp-public-data-landsat" - @unittest.skipIf(RUNNING_IN_VPCSC, 'Test is not VPCSC compatible.') + @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") def test_access_to_public_bucket(self): anonymous = storage.Client.create_anonymous_client() bucket = anonymous.bucket(self.PUBLIC_BUCKET) @@ -1113,24 +1089,17 @@ def test_access_to_public_bucket(self): class TestKMSIntegration(TestStorageFiles): - FILENAMES = ( - 'file01.txt', - ) + FILENAMES = ("file01.txt",) - KEYRING_NAME = 'gcs-test' - KEY_NAME = 'gcs-test' - ALT_KEY_NAME = 'gcs-test-alternate' + KEYRING_NAME = "gcs-test" + KEY_NAME = "gcs-test" + ALT_KEY_NAME = "gcs-test-alternate" def _kms_key_name(self, key_name=None): if key_name is None: key_name = self.KEY_NAME - return ( - "projects/{}/" - "locations/{}/" - "keyRings/{}/" - "cryptoKeys/{}" - ).format( + return ("projects/{}/" "locations/{}/" "keyRings/{}/" "cryptoKeys/{}").format( Config.CLIENT.project, self.bucket.location.lower(), self.KEYRING_NAME, @@ -1164,9 +1133,7 @@ def setUp(self): "bindings": [ { "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter", - "members": [ - "serviceAccount:" + service_account, - ] + "members": ["serviceAccount:" + service_account], } ] } @@ -1174,30 +1141,29 @@ def setUp(self): # Populate the keyring with the keys we use in the tests key_names = [ - 'gcs-test', - 'gcs-test-alternate', - 'explicit-kms-key-name', - 'default-kms-key-name', - 'override-default-kms-key-name', - 'alt-default-kms-key-name', + "gcs-test", + "gcs-test-alternate", + "explicit-kms-key-name", + "default-kms-key-name", + "override-default-kms-key-name", + "alt-default-kms-key-name", ] for key_name in key_names: - key_path = client.crypto_key_path( - project, location, keyring_name, key_name) + key_path = client.crypto_key_path(project, location, keyring_name, key_name) try: client.get_crypto_key(key_path) except exceptions.NotFound: - key = {'purpose': purpose} + key = {"purpose": purpose} client.create_crypto_key(keyring_path, key_name, key) def test_blob_w_explicit_kms_key_name(self): - BLOB_NAME = 'explicit-kms-key-name' - file_data = self.FILES['simple'] + BLOB_NAME = "explicit-kms-key-name" + file_data = self.FILES["simple"] kms_key_name = self._kms_key_name() blob = self.bucket.blob(BLOB_NAME, kms_key_name=kms_key_name) - blob.upload_from_filename(file_data['path']) + blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(blob) - with open(file_data['path'], 'rb') as _file_data: + with open(file_data["path"], "rb") as _file_data: self.assertEqual(blob.download_as_string(), _file_data.read()) # We don't know the current version of the key. self.assertTrue(blob.kms_key_name.startswith(kms_key_name)) @@ -1206,14 +1172,14 @@ def test_blob_w_explicit_kms_key_name(self): self.assertTrue(listed.kms_key_name.startswith(kms_key_name)) def test_bucket_w_default_kms_key_name(self): - BLOB_NAME = 'default-kms-key-name' - OVERRIDE_BLOB_NAME = 'override-default-kms-key-name' - ALT_BLOB_NAME = 'alt-default-kms-key-name' - CLEARTEXT_BLOB_NAME = 'cleartext' + BLOB_NAME = "default-kms-key-name" + OVERRIDE_BLOB_NAME = "override-default-kms-key-name" + ALT_BLOB_NAME = "alt-default-kms-key-name" + CLEARTEXT_BLOB_NAME = "cleartext" - file_data = self.FILES['simple'] + file_data = self.FILES["simple"] - with open(file_data['path'], 'rb') as _file_data: + with open(file_data["path"], "rb") as _file_data: contents = _file_data.read() kms_key_name = self._kms_key_name() @@ -1222,7 +1188,7 @@ def test_bucket_w_default_kms_key_name(self): self.assertEqual(self.bucket.default_kms_key_name, kms_key_name) defaulted_blob = self.bucket.blob(BLOB_NAME) - defaulted_blob.upload_from_filename(file_data['path']) + defaulted_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(defaulted_blob) self.assertEqual(defaulted_blob.download_as_string(), contents) @@ -1232,20 +1198,20 @@ def test_bucket_w_default_kms_key_name(self): alt_kms_key_name = self._kms_key_name(self.ALT_KEY_NAME) override_blob = self.bucket.blob( - OVERRIDE_BLOB_NAME, kms_key_name=alt_kms_key_name) - override_blob.upload_from_filename(file_data['path']) + OVERRIDE_BLOB_NAME, kms_key_name=alt_kms_key_name + ) + override_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(override_blob) self.assertEqual(override_blob.download_as_string(), contents) # We don't know the current version of the key. - self.assertTrue( - override_blob.kms_key_name.startswith(alt_kms_key_name)) + self.assertTrue(override_blob.kms_key_name.startswith(alt_kms_key_name)) self.bucket.default_kms_key_name = alt_kms_key_name self.bucket.patch() alt_blob = self.bucket.blob(ALT_BLOB_NAME) - alt_blob.upload_from_filename(file_data['path']) + alt_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(alt_blob) self.assertEqual(alt_blob.download_as_string(), contents) @@ -1256,19 +1222,19 @@ def test_bucket_w_default_kms_key_name(self): self.bucket.patch() cleartext_blob = self.bucket.blob(CLEARTEXT_BLOB_NAME) - cleartext_blob.upload_from_filename(file_data['path']) + cleartext_blob.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(cleartext_blob) self.assertEqual(cleartext_blob.download_as_string(), contents) self.assertIsNone(cleartext_blob.kms_key_name) def test_rewrite_rotate_csek_to_cmek(self): - BLOB_NAME = 'rotating-keys' - file_data = self.FILES['simple'] + BLOB_NAME = "rotating-keys" + file_data = self.FILES["simple"] SOURCE_KEY = os.urandom(32) source = self.bucket.blob(BLOB_NAME, encryption_key=SOURCE_KEY) - source.upload_from_filename(file_data['path']) + source.upload_from_filename(file_data["path"]) self.case_blobs_to_delete.append(source) source_data = source.download_as_string() @@ -1296,7 +1262,6 @@ def test_rewrite_rotate_csek_to_cmek(self): class TestRetentionPolicy(unittest.TestCase): - def setUp(self): self.case_buckets_to_delete = [] @@ -1311,7 +1276,7 @@ def test_bucket_w_retention_period(self): period_secs = 10 - new_bucket_name = 'w-retention-period' + unique_resource_id('-') + new_bucket_name = "w-retention-period" + unique_resource_id("-") bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) @@ -1320,13 +1285,12 @@ def test_bucket_w_retention_period(self): bucket.patch() self.assertEqual(bucket.retention_period, period_secs) - self.assertIsInstance( - bucket.retention_policy_effective_time, datetime.datetime) + self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) - blob_name = 'test-blob' - payload = b'DEADBEEF' + blob_name = "test-blob" + payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) @@ -1334,8 +1298,7 @@ def test_bucket_w_retention_period(self): self.assertFalse(other.event_based_hold) self.assertFalse(other.temporary_hold) - self.assertIsInstance( - other.retention_expiration_time, datetime.datetime) + self.assertIsInstance(other.retention_expiration_time, datetime.datetime) with self.assertRaises(exceptions.Forbidden): other.delete() @@ -1359,9 +1322,10 @@ def test_bucket_w_retention_period(self): def test_bucket_w_default_event_based_hold(self): from google.api_core import exceptions - new_bucket_name = 'w-def-ebh' + unique_resource_id('-') - self.assertRaises(exceptions.NotFound, - Config.CLIENT.get_bucket, new_bucket_name) + new_bucket_name = "w-def-ebh" + unique_resource_id("-") + self.assertRaises( + exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name + ) bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) @@ -1373,8 +1337,8 @@ def test_bucket_w_default_event_based_hold(self): self.assertIsNone(bucket.retention_policy_effective_time) self.assertFalse(bucket.retention_policy_locked) - blob_name = 'test-blob' - payload = b'DEADBEEF' + blob_name = "test-blob" + payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) @@ -1410,14 +1374,15 @@ def test_bucket_w_default_event_based_hold(self): def test_blob_w_temporary_hold(self): from google.api_core import exceptions - new_bucket_name = 'w-tmp-hold' + unique_resource_id('-') - self.assertRaises(exceptions.NotFound, - Config.CLIENT.get_bucket, new_bucket_name) + new_bucket_name = "w-tmp-hold" + unique_resource_id("-") + self.assertRaises( + exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name + ) bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) - blob_name = 'test-blob' - payload = b'DEADBEEF' + blob_name = "test-blob" + payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) @@ -1443,9 +1408,10 @@ def test_bucket_lock_retention_policy(self): period_secs = 10 - new_bucket_name = 'loc-ret-policy' + unique_resource_id('-') - self.assertRaises(exceptions.NotFound, - Config.CLIENT.get_bucket, new_bucket_name) + new_bucket_name = "loc-ret-policy" + unique_resource_id("-") + self.assertRaises( + exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name + ) bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) @@ -1453,8 +1419,7 @@ def test_bucket_lock_retention_policy(self): bucket.patch() self.assertEqual(bucket.retention_period, period_secs) - self.assertIsInstance( - bucket.retention_policy_effective_time, datetime.datetime) + self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) diff --git a/storage/tests/unit/test__helpers.py b/storage/tests/unit/test__helpers.py index 04a509c7bf8f..70065ab3eac2 100644 --- a/storage/tests/unit/test__helpers.py +++ b/storage/tests/unit/test__helpers.py @@ -16,7 +16,6 @@ class Test_PropertyMixin(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage._helpers import _PropertyMixin @@ -27,7 +26,6 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _derivedClass(self, path=None, user_project=None): - class Derived(self._get_target_class()): client = None @@ -58,45 +56,48 @@ def test_user_project_is_abstract(self): mixin.user_project def test_reload(self): - connection = _Connection({'foo': 'Foo'}) + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path')() + derived = self._derivedClass("/path")() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() derived.reload(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/path', - 'query_params': {'projection': 'noAcl'}, - '_target_object': derived, - }) + self.assertEqual( + kw[0], + { + "method": "GET", + "path": "/path", + "query_params": {"projection": "noAcl"}, + "_target_object": derived, + }, + ) self.assertEqual(derived._changes, set()) def test_reload_w_user_project(self): - user_project = 'user-project-123' - connection = _Connection({'foo': 'Foo'}) + user_project = "user-project-123" + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path', user_project)() + derived = self._derivedClass("/path", user_project)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() derived.reload(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/path', - 'query_params': { - 'projection': 'noAcl', - 'userProject': user_project, + self.assertEqual( + kw[0], + { + "method": "GET", + "path": "/path", + "query_params": {"projection": "noAcl", "userProject": user_project}, + "_target_object": derived, }, - '_target_object': derived, - }) + ) self.assertEqual(derived._changes, set()) def test__set_properties(self): @@ -108,138 +109,137 @@ def test__set_properties(self): def test__patch_property(self): derived = self._derivedClass()() - derived._patch_property('foo', 'Foo') - self.assertEqual(derived._properties, {'foo': 'Foo'}) + derived._patch_property("foo", "Foo") + self.assertEqual(derived._properties, {"foo": "Foo"}) def test_patch(self): - connection = _Connection({'foo': 'Foo'}) + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path')() + derived = self._derivedClass("/path")() # Make sure changes is non-empty, so we can observe a change. BAR = object() BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Ignore baz. + derived._properties = {"bar": BAR, "baz": BAZ} + derived._changes = set(["bar"]) # Ignore baz. derived.patch(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/path', - 'query_params': {'projection': 'full'}, - # Since changes does not include `baz`, we don't see it sent. - 'data': {'bar': BAR}, - '_target_object': derived, - }) + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/path", + "query_params": {"projection": "full"}, + # Since changes does not include `baz`, we don't see it sent. + "data": {"bar": BAR}, + "_target_object": derived, + }, + ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) def test_patch_w_user_project(self): - user_project = 'user-project-123' - connection = _Connection({'foo': 'Foo'}) + user_project = "user-project-123" + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path', user_project)() + derived = self._derivedClass("/path", user_project)() # Make sure changes is non-empty, so we can observe a change. BAR = object() BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Ignore baz. + derived._properties = {"bar": BAR, "baz": BAZ} + derived._changes = set(["bar"]) # Ignore baz. derived.patch(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/path', - 'query_params': { - 'projection': 'full', - 'userProject': user_project, + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/path", + "query_params": {"projection": "full", "userProject": user_project}, + # Since changes does not include `baz`, we don't see it sent. + "data": {"bar": BAR}, + "_target_object": derived, }, - # Since changes does not include `baz`, we don't see it sent. - 'data': {'bar': BAR}, - '_target_object': derived, - }) + ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) def test_update(self): - connection = _Connection({'foo': 'Foo'}) + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path')() + derived = self._derivedClass("/path")() # Make sure changes is non-empty, so we can observe a change. BAR = object() BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Update sends 'baz' anyway. + derived._properties = {"bar": BAR, "baz": BAZ} + derived._changes = set(["bar"]) # Update sends 'baz' anyway. derived.update(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '/path') - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - self.assertEqual(kw[0]['data'], {'bar': BAR, 'baz': BAZ}) + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "/path") + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) + self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) def test_update_w_user_project(self): - user_project = 'user-project-123' - connection = _Connection({'foo': 'Foo'}) + user_project = "user-project-123" + connection = _Connection({"foo": "Foo"}) client = _Client(connection) - derived = self._derivedClass('/path', user_project)() + derived = self._derivedClass("/path", user_project)() # Make sure changes is non-empty, so we can observe a change. BAR = object() BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Update sends 'baz' anyway. + derived._properties = {"bar": BAR, "baz": BAZ} + derived._changes = set(["bar"]) # Update sends 'baz' anyway. derived.update(client=client) - self.assertEqual(derived._properties, {'foo': 'Foo'}) + self.assertEqual(derived._properties, {"foo": "Foo"}) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "/path") self.assertEqual( - kw[0]['query_params'], { - 'projection': 'full', - 'userProject': user_project, - }) - self.assertEqual(kw[0]['data'], {'bar': BAR, 'baz': BAZ}) + kw[0]["query_params"], {"projection": "full", "userProject": user_project} + ) + self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) class Test__scalar_property(unittest.TestCase): - def _call_fut(self, fieldName): from google.cloud.storage._helpers import _scalar_property return _scalar_property(fieldName) def test_getter(self): - class Test(object): def __init__(self, **kw): self._properties = kw.copy() - do_re_mi = self._call_fut('solfege') - test = Test(solfege='Latido') - self.assertEqual(test.do_re_mi, 'Latido') + do_re_mi = self._call_fut("solfege") - def test_setter(self): + test = Test(solfege="Latido") + self.assertEqual(test.do_re_mi, "Latido") + def test_setter(self): class Test(object): def _patch_property(self, name, value): self._patched = (name, value) - do_re_mi = self._call_fut('solfege') + + do_re_mi = self._call_fut("solfege") test = Test() - test.do_re_mi = 'Latido' - self.assertEqual(test._patched, ('solfege', 'Latido')) + test.do_re_mi = "Latido" + self.assertEqual(test._patched, ("solfege", "Latido")) class Test__base64_md5hash(unittest.TestCase): - def _call_fut(self, bytes_to_sign): from google.cloud.storage._helpers import _base64_md5hash @@ -248,19 +248,18 @@ def _call_fut(self, bytes_to_sign): def test_it(self): from io import BytesIO - BYTES_TO_SIGN = b'FOO' + BYTES_TO_SIGN = b"FOO" BUFFER = BytesIO() BUFFER.write(BYTES_TO_SIGN) BUFFER.seek(0) SIGNED_CONTENT = self._call_fut(BUFFER) - self.assertEqual(SIGNED_CONTENT, b'kBiQqOnIz21aGlQrIp/r/w==') + self.assertEqual(SIGNED_CONTENT, b"kBiQqOnIz21aGlQrIp/r/w==") def test_it_with_stubs(self): import mock class _Buffer(object): - def __init__(self, return_vals): self.return_vals = return_vals self._block_sizes = [] @@ -271,13 +270,13 @@ def read(self, block_size): BASE64 = _Base64() DIGEST_VAL = object() - BYTES_TO_SIGN = b'BYTES_TO_SIGN' - BUFFER = _Buffer([b'', BYTES_TO_SIGN]) + BYTES_TO_SIGN = b"BYTES_TO_SIGN" + BUFFER = _Buffer([b"", BYTES_TO_SIGN]) MD5 = _MD5(DIGEST_VAL) patch = mock.patch.multiple( - 'google.cloud.storage._helpers', - base64=BASE64, md5=MD5) + "google.cloud.storage._helpers", base64=BASE64, md5=MD5 + ) with patch: SIGNED_CONTENT = self._call_fut(BUFFER) @@ -290,7 +289,6 @@ def read(self, block_size): class _Connection(object): - def __init__(self, *responses): self._responses = responses self._requested = [] @@ -302,7 +300,6 @@ def api_request(self, **kw): class _MD5Hash(object): - def __init__(self, digest_val): self.digest_val = digest_val self.num_digest_calls = 0 @@ -317,7 +314,6 @@ def digest(self): class _MD5(object): - def __init__(self, digest_val): self.hash_obj = _MD5Hash(digest_val) self._called = [] @@ -328,7 +324,6 @@ def __call__(self, data=None): class _Base64(object): - def __init__(self): self._called_b64encode = [] @@ -338,6 +333,5 @@ def b64encode(self, value): class _Client(object): - def __init__(self, connection): self._connection = connection diff --git a/storage/tests/unit/test__http.py b/storage/tests/unit/test__http.py index 645ec2d1b0cc..ca9ef850b7b3 100644 --- a/storage/tests/unit/test__http.py +++ b/storage/tests/unit/test__http.py @@ -18,7 +18,6 @@ class TestConnection(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage._http import Connection @@ -37,49 +36,39 @@ def test_extra_headers(self): http = mock.create_autospec(requests.Session, instance=True) response = requests.Response() response.status_code = 200 - data = b'brent-spiner' + data = b"brent-spiner" response._content = data http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) + client = mock.Mock(_http=http, spec=["_http"]) conn = self._make_one(client) - req_data = 'hey-yoooouuuuu-guuuuuyyssss' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) + req_data = "hey-yoooouuuuu-guuuuuyyssss" + result = conn.api_request("GET", "/rainbow", data=req_data, expect_json=False) self.assertEqual(result, data) expected_headers = { - 'Accept-Encoding': 'gzip', + "Accept-Encoding": "gzip", base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, + "User-Agent": conn.USER_AGENT, } - expected_uri = conn.build_api_url('/rainbow') + expected_uri = conn.build_api_url("/rainbow") http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, + data=req_data, headers=expected_headers, method="GET", url=expected_uri ) def test_build_api_url_no_extra_query_params(self): conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - 'storage', - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) + URI = "/".join([conn.API_BASE_URL, "storage", conn.API_VERSION, "foo"]) + self.assertEqual(conn.build_api_url("/foo"), URI) def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) + uri = conn.build_api_url("/foo", {"bar": "baz"}) scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', 'storage', conn.API_VERSION, 'foo'])) + self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, "/".join(["", "storage", conn.API_VERSION, "foo"])) parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') + self.assertEqual(parms["bar"], "baz") diff --git a/storage/tests/unit/test__signing.py b/storage/tests/unit/test__signing.py index 5cedf2aeb454..01922ca97849 100644 --- a/storage/tests/unit/test__signing.py +++ b/storage/tests/unit/test__signing.py @@ -24,7 +24,6 @@ class Test_get_expiration_seconds(unittest.TestCase): - @staticmethod def _call_fut(expiration): from google.cloud.storage._signing import get_expiration_seconds @@ -44,7 +43,7 @@ def test_w_int(self): def test_w_long(self): if six.PY3: - raise unittest.SkipTest('No long on Python 3') + raise unittest.SkipTest("No long on Python 3") self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 @@ -64,7 +63,7 @@ def test_w_other_zone_datetime(self): from google.cloud._helpers import _UTC class CET(_UTC): - _tzname = 'CET' + _tzname = "CET" _utcoffset = datetime.timedelta(hours=1) zone = CET() @@ -79,8 +78,8 @@ def test_w_timedelta_seconds(self): expiration_as_delta = datetime.timedelta(seconds=10) patch = mock.patch( - 'google.cloud.storage._signing.NOW', - return_value=dummy_utcnow) + "google.cloud.storage._signing.NOW", return_value=dummy_utcnow + ) with patch as utcnow: result = self._call_fut(expiration_as_delta) @@ -93,8 +92,8 @@ def test_w_timedelta_days(self): expiration_as_delta = datetime.timedelta(days=1) patch = mock.patch( - 'google.cloud.storage._signing.NOW', - return_value=dummy_utcnow) + "google.cloud.storage._signing.NOW", return_value=dummy_utcnow + ) with patch as utcnow: result = self._call_fut(expiration_as_delta) @@ -103,51 +102,48 @@ def test_w_timedelta_days(self): class Test_get_signed_query_params(unittest.TestCase): - @staticmethod def _call_fut(credentials, expiration, string_to_sign): from google.cloud.storage._signing import get_signed_query_params - return get_signed_query_params( - credentials, expiration, string_to_sign) + return get_signed_query_params(credentials, expiration, string_to_sign) def test_it(self): - sig_bytes = b'DEADBEEF' + sig_bytes = b"DEADBEEF" account_name = mock.sentinel.service_account_email - credentials = _make_credentials( - signing=True, signer_email=account_name) + credentials = _make_credentials(signing=True, signer_email=account_name) credentials.sign_bytes.return_value = sig_bytes expiration = 100 - string_to_sign = 'dummy_signature' - result = self._call_fut( - credentials, expiration, string_to_sign) + string_to_sign = "dummy_signature" + result = self._call_fut(credentials, expiration, string_to_sign) expected = { - 'GoogleAccessId': account_name, - 'Expires': str(expiration), - 'Signature': base64.b64encode(sig_bytes), + "GoogleAccessId": account_name, + "Expires": str(expiration), + "Signature": base64.b64encode(sig_bytes), } self.assertEqual(result, expected) credentials.sign_bytes.assert_called_once_with(string_to_sign) class Test_generate_signed_url(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.storage._signing import generate_signed_url return generate_signed_url(*args, **kwargs) - def _generate_helper(self, response_type=None, response_disposition=None, - generation=None): - endpoint = 'http://api.example.com' - resource = '/name/path' + def _generate_helper( + self, response_type=None, response_disposition=None, generation=None + ): + endpoint = "http://api.example.com" + resource = "/name/path" credentials = _make_credentials( - signing=True, signer_email='service@example.com') - credentials.sign_bytes.return_value = b'DEADBEEF' + signing=True, signer_email="service@example.com" + ) + credentials.sign_bytes.return_value = b"DEADBEEF" signed = base64.b64encode(credentials.sign_bytes.return_value) - signed = signed.decode('ascii') + signed = signed.decode("ascii") expiration = 1000 url = self._call_fut( @@ -161,54 +157,54 @@ def _generate_helper(self, response_type=None, response_disposition=None, ) # Check the mock was called. - string_to_sign = '\n'.join([ - 'GET', - '', - '', - str(expiration), - resource, - ]) + string_to_sign = "\n".join(["GET", "", "", str(expiration), resource]) credentials.sign_bytes.assert_called_once_with(string_to_sign) scheme, netloc, path, qs, frag = urllib_parse.urlsplit(url) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'api.example.com') + self.assertEqual(scheme, "http") + self.assertEqual(netloc, "api.example.com") self.assertEqual(path, resource) - self.assertEqual(frag, '') + self.assertEqual(frag, "") # Check the URL parameters. params = urllib_parse.parse_qs(qs) expected_params = { - 'GoogleAccessId': [credentials.signer_email], - 'Expires': [str(expiration)], - 'Signature': [signed], + "GoogleAccessId": [credentials.signer_email], + "Expires": [str(expiration)], + "Signature": [signed], } if response_type is not None: - expected_params['response-content-type'] = [response_type] + expected_params["response-content-type"] = [response_type] if response_disposition is not None: - expected_params['response-content-disposition'] = [ - response_disposition] + expected_params["response-content-disposition"] = [response_disposition] if generation is not None: - expected_params['generation'] = [generation] + expected_params["generation"] = [generation] self.assertEqual(params, expected_params) def test_w_expiration_int(self): self._generate_helper() def test_w_custom_fields(self): - response_type = 'text/plain' - response_disposition = 'attachment; filename=blob.png' - generation = '123' - self._generate_helper(response_type=response_type, - response_disposition=response_disposition, - generation=generation) + response_type = "text/plain" + response_disposition = "attachment; filename=blob.png" + generation = "123" + self._generate_helper( + response_type=response_type, + response_disposition=response_disposition, + generation=generation, + ) def test_with_google_credentials(self): - resource = '/name/path' + resource = "/name/path" credentials = _make_credentials() expiration = int(time.time() + 5) - self.assertRaises(AttributeError, self._call_fut, credentials, - resource=resource, expiration=expiration) + self.assertRaises( + AttributeError, + self._call_fut, + credentials, + resource=resource, + expiration=expiration, + ) def _make_credentials(signing=False, signer_email=None): diff --git a/storage/tests/unit/test_acl.py b/storage/tests/unit/test_acl.py index 50432fcc8bc5..d66a9439c1cc 100644 --- a/storage/tests/unit/test_acl.py +++ b/storage/tests/unit/test_acl.py @@ -16,7 +16,6 @@ class Test_ACLEntity(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.acl import _ACLEntity @@ -27,42 +26,42 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_default_identifier(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) self.assertEqual(entity.type, TYPE) self.assertIsNone(entity.identifier) self.assertEqual(entity.get_roles(), set()) def test_ctor_w_identifier(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" entity = self._make_one(TYPE, ID) self.assertEqual(entity.type, TYPE) self.assertEqual(entity.identifier, ID) self.assertEqual(entity.get_roles(), set()) def test___str__no_identifier(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) self.assertEqual(str(entity), TYPE) def test___str__w_identifier(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" entity = self._make_one(TYPE, ID) - self.assertEqual(str(entity), '%s-%s' % (TYPE, ID)) + self.assertEqual(str(entity), "%s-%s" % (TYPE, ID)) def test_grant_simple(self): - TYPE = 'type' - ROLE = 'role' + TYPE = "type" + ROLE = "role" entity = self._make_one(TYPE) entity.grant(ROLE) self.assertEqual(entity.get_roles(), set([ROLE])) def test_grant_duplicate(self): - TYPE = 'type' - ROLE1 = 'role1' - ROLE2 = 'role2' + TYPE = "type" + ROLE1 = "role1" + ROLE2 = "role2" entity = self._make_one(TYPE) entity.grant(ROLE1) entity.grant(ROLE2) @@ -70,16 +69,16 @@ def test_grant_duplicate(self): self.assertEqual(entity.get_roles(), set([ROLE1, ROLE2])) def test_revoke_miss(self): - TYPE = 'type' - ROLE = 'nonesuch' + TYPE = "type" + ROLE = "nonesuch" entity = self._make_one(TYPE) entity.revoke(ROLE) self.assertEqual(entity.get_roles(), set()) def test_revoke_hit(self): - TYPE = 'type' - ROLE1 = 'role1' - ROLE2 = 'role2' + TYPE = "type" + ROLE1 = "role1" + ROLE2 = "role2" entity = self._make_one(TYPE) entity.grant(ROLE1) entity.grant(ROLE2) @@ -87,39 +86,39 @@ def test_revoke_hit(self): self.assertEqual(entity.get_roles(), set([ROLE2])) def test_grant_read(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant_read() self.assertEqual(entity.get_roles(), set([entity.READER_ROLE])) def test_grant_write(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant_write() self.assertEqual(entity.get_roles(), set([entity.WRITER_ROLE])) def test_grant_owner(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant_owner() self.assertEqual(entity.get_roles(), set([entity.OWNER_ROLE])) def test_revoke_read(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant(entity.READER_ROLE) entity.revoke_read() self.assertEqual(entity.get_roles(), set()) def test_revoke_write(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant(entity.WRITER_ROLE) entity.revoke_write() self.assertEqual(entity.get_roles(), set()) def test_revoke_owner(self): - TYPE = 'type' + TYPE = "type" entity = self._make_one(TYPE) entity.grant(entity.OWNER_ROLE) entity.revoke_owner() @@ -127,7 +126,6 @@ def test_revoke_owner(self): class Test_ACL(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.acl import ACL @@ -140,10 +138,10 @@ def _make_one(self, *args, **kw): def test_validate_predefined(self): ACL = self._get_target_class() self.assertIsNone(ACL.validate_predefined(None)) - self.assertEqual(ACL.validate_predefined('public-read'), 'publicRead') - self.assertEqual(ACL.validate_predefined('publicRead'), 'publicRead') + self.assertEqual(ACL.validate_predefined("public-read"), "publicRead") + self.assertEqual(ACL.validate_predefined("publicRead"), "publicRead") with self.assertRaises(ValueError): - ACL.validate_predefined('publicread') + ACL.validate_predefined("publicread") def test_ctor(self): acl = self._make_one() @@ -165,8 +163,8 @@ def test_client_is_abstract(self): self.assertRaises(NotImplementedError, lambda: acl.client) def test_reset(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True acl.entity(TYPE, ID) @@ -190,83 +188,78 @@ def _reload(): self.assertTrue(acl.loaded) def test___iter___non_empty_no_roles(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True acl.entity(TYPE, ID) self.assertEqual(list(acl), []) def test___iter___non_empty_w_roles(self): - TYPE = 'type' - ID = 'id' - ROLE = 'role' + TYPE = "type" + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) entity.grant(ROLE) - self.assertEqual(list(acl), - [{'entity': '%s-%s' % (TYPE, ID), 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "%s-%s" % (TYPE, ID), "role": ROLE}]) def test___iter___non_empty_w_empty_role(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) - entity.grant('') + entity.grant("") self.assertEqual(list(acl), []) def test_entity_from_dict_allUsers_eager(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True - entity = acl.entity_from_dict({'entity': 'allUsers', 'role': ROLE}) - self.assertEqual(entity.type, 'allUsers') + entity = acl.entity_from_dict({"entity": "allUsers", "role": ROLE}) + self.assertEqual(entity.type, "allUsers") self.assertIsNone(entity.identifier) self.assertEqual(entity.get_roles(), set([ROLE])) - self.assertEqual(list(acl), - [{'entity': 'allUsers', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "allUsers", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_entity_from_dict_allAuthenticatedUsers(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True - entity = acl.entity_from_dict({'entity': 'allAuthenticatedUsers', - 'role': ROLE}) - self.assertEqual(entity.type, 'allAuthenticatedUsers') + entity = acl.entity_from_dict({"entity": "allAuthenticatedUsers", "role": ROLE}) + self.assertEqual(entity.type, "allAuthenticatedUsers") self.assertIsNone(entity.identifier) self.assertEqual(entity.get_roles(), set([ROLE])) - self.assertEqual(list(acl), - [{'entity': 'allAuthenticatedUsers', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "allAuthenticatedUsers", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_entity_from_dict_string_w_hyphen(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True - entity = acl.entity_from_dict({'entity': 'type-id', 'role': ROLE}) - self.assertEqual(entity.type, 'type') - self.assertEqual(entity.identifier, 'id') + entity = acl.entity_from_dict({"entity": "type-id", "role": ROLE}) + self.assertEqual(entity.type, "type") + self.assertEqual(entity.identifier, "id") self.assertEqual(entity.get_roles(), set([ROLE])) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_entity_from_dict_string_wo_hyphen(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True - self.assertRaises(ValueError, - acl.entity_from_dict, - {'entity': 'bogus', 'role': ROLE}) + self.assertRaises( + ValueError, acl.entity_from_dict, {"entity": "bogus", "role": ROLE} + ) self.assertEqual(list(acl.get_entities()), []) def test_has_entity_miss_str_eager(self): acl = self._make_one() acl.loaded = True - self.assertFalse(acl.has_entity('nonesuch')) + self.assertFalse(acl.has_entity("nonesuch")) def test_has_entity_miss_str_lazy(self): acl = self._make_one() @@ -275,30 +268,30 @@ def _reload(): acl.loaded = True acl.reload = _reload - self.assertFalse(acl.has_entity('nonesuch')) + self.assertFalse(acl.has_entity("nonesuch")) self.assertTrue(acl.loaded) def test_has_entity_miss_entity(self): from google.cloud.storage.acl import _ACLEntity - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" entity = _ACLEntity(TYPE, ID) acl = self._make_one() acl.loaded = True self.assertFalse(acl.has_entity(entity)) def test_has_entity_hit_str(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True acl.entity(TYPE, ID) - self.assertTrue(acl.has_entity('%s-%s' % (TYPE, ID))) + self.assertTrue(acl.has_entity("%s-%s" % (TYPE, ID))) def test_has_entity_hit_entity(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) @@ -307,7 +300,7 @@ def test_has_entity_hit_entity(self): def test_get_entity_miss_str_no_default_eager(self): acl = self._make_one() acl.loaded = True - self.assertIsNone(acl.get_entity('nonesuch')) + self.assertIsNone(acl.get_entity("nonesuch")) def test_get_entity_miss_str_no_default_lazy(self): acl = self._make_one() @@ -316,14 +309,14 @@ def _reload(): acl.loaded = True acl.reload = _reload - self.assertIsNone(acl.get_entity('nonesuch')) + self.assertIsNone(acl.get_entity("nonesuch")) self.assertTrue(acl.loaded) def test_get_entity_miss_entity_no_default(self): from google.cloud.storage.acl import _ACLEntity - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" entity = _ACLEntity(TYPE, ID) acl = self._make_one() acl.loaded = True @@ -333,30 +326,30 @@ def test_get_entity_miss_str_w_default(self): DEFAULT = object() acl = self._make_one() acl.loaded = True - self.assertIs(acl.get_entity('nonesuch', DEFAULT), DEFAULT) + self.assertIs(acl.get_entity("nonesuch", DEFAULT), DEFAULT) def test_get_entity_miss_entity_w_default(self): from google.cloud.storage.acl import _ACLEntity DEFAULT = object() - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" entity = _ACLEntity(TYPE, ID) acl = self._make_one() acl.loaded = True self.assertIs(acl.get_entity(entity, DEFAULT), DEFAULT) def test_get_entity_hit_str(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True acl.entity(TYPE, ID) - self.assertTrue(acl.has_entity('%s-%s' % (TYPE, ID))) + self.assertTrue(acl.has_entity("%s-%s" % (TYPE, ID))) def test_get_entity_hit_entity(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) @@ -365,25 +358,24 @@ def test_get_entity_hit_entity(self): def test_add_entity_miss_eager(self): from google.cloud.storage.acl import _ACLEntity - TYPE = 'type' - ID = 'id' - ROLE = 'role' + TYPE = "type" + ID = "id" + ROLE = "role" entity = _ACLEntity(TYPE, ID) entity.grant(ROLE) acl = self._make_one() acl.loaded = True acl.add_entity(entity) self.assertTrue(acl.loaded) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_add_entity_miss_lazy(self): from google.cloud.storage.acl import _ACLEntity - TYPE = 'type' - ID = 'id' - ROLE = 'role' + TYPE = "type" + ID = "id" + ROLE = "role" entity = _ACLEntity(TYPE, ID) entity.grant(ROLE) acl = self._make_one() @@ -394,18 +386,17 @@ def _reload(): acl.reload = _reload acl.add_entity(entity) self.assertTrue(acl.loaded) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) self.assertTrue(acl.loaded) def test_add_entity_hit(self): from google.cloud.storage.acl import _ACLEntity - TYPE = 'type' - ID = 'id' - ENTITY_VAL = '%s-%s' % (TYPE, ID) - ROLE = 'role' + TYPE = "type" + ID = "id" + ENTITY_VAL = "%s-%s" % (TYPE, ID) + ROLE = "role" entity = _ACLEntity(TYPE, ID) entity.grant(ROLE) acl = self._make_one() @@ -415,94 +406,86 @@ def test_add_entity_hit(self): self.assertTrue(acl.loaded) self.assertIsNot(acl.get_entity(ENTITY_VAL), before) self.assertIs(acl.get_entity(ENTITY_VAL), entity) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_entity_miss(self): - TYPE = 'type' - ID = 'id' - ROLE = 'role' + TYPE = "type" + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) self.assertTrue(acl.loaded) entity.grant(ROLE) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_entity_hit(self): - TYPE = 'type' - ID = 'id' - ROLE = 'role' + TYPE = "type" + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True before = acl.entity(TYPE, ID) before.grant(ROLE) entity = acl.entity(TYPE, ID) self.assertIs(entity, before) - self.assertEqual(list(acl), - [{'entity': 'type-id', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "type-id", "role": ROLE}]) self.assertEqual(list(acl.get_entities()), [entity]) def test_user(self): - ID = 'id' - ROLE = 'role' + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.user(ID) entity.grant(ROLE) - self.assertEqual(entity.type, 'user') + self.assertEqual(entity.type, "user") self.assertEqual(entity.identifier, ID) - self.assertEqual(list(acl), - [{'entity': 'user-%s' % ID, 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "user-%s" % ID, "role": ROLE}]) def test_group(self): - ID = 'id' - ROLE = 'role' + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.group(ID) entity.grant(ROLE) - self.assertEqual(entity.type, 'group') + self.assertEqual(entity.type, "group") self.assertEqual(entity.identifier, ID) - self.assertEqual(list(acl), - [{'entity': 'group-%s' % ID, 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "group-%s" % ID, "role": ROLE}]) def test_domain(self): - ID = 'id' - ROLE = 'role' + ID = "id" + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.domain(ID) entity.grant(ROLE) - self.assertEqual(entity.type, 'domain') + self.assertEqual(entity.type, "domain") self.assertEqual(entity.identifier, ID) - self.assertEqual(list(acl), - [{'entity': 'domain-%s' % ID, 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "domain-%s" % ID, "role": ROLE}]) def test_all(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.all() entity.grant(ROLE) - self.assertEqual(entity.type, 'allUsers') + self.assertEqual(entity.type, "allUsers") self.assertIsNone(entity.identifier) - self.assertEqual(list(acl), - [{'entity': 'allUsers', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "allUsers", "role": ROLE}]) def test_all_authenticated(self): - ROLE = 'role' + ROLE = "role" acl = self._make_one() acl.loaded = True entity = acl.all_authenticated() entity.grant(ROLE) - self.assertEqual(entity.type, 'allAuthenticatedUsers') + self.assertEqual(entity.type, "allAuthenticatedUsers") self.assertIsNone(entity.identifier) - self.assertEqual(list(acl), - [{'entity': 'allAuthenticatedUsers', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "allAuthenticatedUsers", "role": ROLE}]) def test_get_entities_empty_eager(self): acl = self._make_one() @@ -520,8 +503,8 @@ def _reload(): self.assertTrue(acl.loaded) def test_get_entities_nonempty(self): - TYPE = 'type' - ID = 'id' + TYPE = "type" + ID = "id" acl = self._make_one() acl.loaded = True entity = acl.entity(TYPE, ID) @@ -529,31 +512,29 @@ def test_get_entities_nonempty(self): def test_reload_missing(self): # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/652 - ROLE = 'role' + ROLE = "role" connection = _Connection({}) client = _Client(connection) acl = self._make_one() - acl.reload_path = '/testing/acl' + acl.reload_path = "/testing/acl" acl.loaded = True - acl.entity('allUsers', ROLE) + acl.entity("allUsers", ROLE) acl.reload(client=client) self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {}, - }) + self.assertEqual( + kw[0], {"method": "GET", "path": "/testing/acl", "query_params": {}} + ) def test_reload_empty_result_clears_local(self): - ROLE = 'role' - connection = _Connection({'items': []}) + ROLE = "role" + connection = _Connection({"items": []}) client = _Client(connection) acl = self._make_one() - acl.reload_path = '/testing/acl' + acl.reload_path = "/testing/acl" acl.loaded = True - acl.entity('allUsers', ROLE) + acl.entity("allUsers", ROLE) acl.reload(client=client) @@ -561,40 +542,40 @@ def test_reload_empty_result_clears_local(self): self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {}, - }) + self.assertEqual( + kw[0], {"method": "GET", "path": "/testing/acl", "query_params": {}} + ) def test_reload_nonempty_result_w_user_project(self): - ROLE = 'role' - USER_PROJECT = 'user-project-123' - connection = _Connection( - {'items': [{'entity': 'allUsers', 'role': ROLE}]}) + ROLE = "role" + USER_PROJECT = "user-project-123" + connection = _Connection({"items": [{"entity": "allUsers", "role": ROLE}]}) client = _Client(connection) acl = self._make_one() - acl.reload_path = '/testing/acl' + acl.reload_path = "/testing/acl" acl.loaded = True acl.user_project = USER_PROJECT acl.reload(client=client) self.assertTrue(acl.loaded) - self.assertEqual(list(acl), [{'entity': 'allUsers', 'role': ROLE}]) + self.assertEqual(list(acl), [{"entity": "allUsers", "role": ROLE}]) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '/testing/acl', - 'query_params': {'userProject': USER_PROJECT}, - }) + self.assertEqual( + kw[0], + { + "method": "GET", + "path": "/testing/acl", + "query_params": {"userProject": USER_PROJECT}, + }, + ) def test_save_none_set_none_passed(self): connection = _Connection() client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.save(client=client) kw = connection._requested self.assertEqual(len(kw), 0) @@ -603,49 +584,52 @@ def test_save_existing_missing_none_passed(self): connection = _Connection({}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True acl.save(client=client) self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0]['data'], {'acl': []}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/testing") + self.assertEqual(kw[0]["data"], {"acl": []}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) def test_save_no_acl(self): - ROLE = 'role' - AFTER = [{'entity': 'allUsers', 'role': ROLE}] - connection = _Connection({'acl': AFTER}) + ROLE = "role" + AFTER = [{"entity": "allUsers", "role": ROLE}] + connection = _Connection({"acl": AFTER}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True - acl.entity('allUsers').grant(ROLE) + acl.entity("allUsers").grant(ROLE) acl.save(client=client) self.assertEqual(list(acl), AFTER) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': {'projection': 'full'}, - 'data': {'acl': AFTER}, - }) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/testing") + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": {"projection": "full"}, + "data": {"acl": AFTER}, + }, + ) def test_save_w_acl_w_user_project(self): - ROLE1 = 'role1' - ROLE2 = 'role2' - STICKY = {'entity': 'allUsers', 'role': ROLE2} - USER_PROJECT = 'user-project-123' - new_acl = [{'entity': 'allUsers', 'role': ROLE1}] - connection = _Connection({'acl': [STICKY] + new_acl}) + ROLE1 = "role1" + ROLE2 = "role2" + STICKY = {"entity": "allUsers", "role": ROLE2} + USER_PROJECT = "user-project-123" + new_acl = [{"entity": "allUsers", "role": ROLE1}] + connection = _Connection({"acl": [STICKY] + new_acl}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True acl.user_project = USER_PROJECT @@ -657,118 +641,123 @@ def test_save_w_acl_w_user_project(self): self.assertTrue(new_acl[0] in entries) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'userProject': USER_PROJECT, + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": {"projection": "full", "userProject": USER_PROJECT}, + "data": {"acl": new_acl}, }, - 'data': {'acl': new_acl}, - }) + ) def test_save_prefefined_invalid(self): connection = _Connection() client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True with self.assertRaises(ValueError): - acl.save_predefined('bogus', client=client) + acl.save_predefined("bogus", client=client) def test_save_predefined_valid(self): - PREDEFINED = 'private' - connection = _Connection({'acl': []}) + PREDEFINED = "private" + connection = _Connection({"acl": []}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True acl.save_predefined(PREDEFINED, client=client) entries = list(acl) self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'predefinedAcl': PREDEFINED, + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": {"projection": "full", "predefinedAcl": PREDEFINED}, + "data": {"acl": []}, }, - 'data': {'acl': []}, - }) + ) def test_save_predefined_w_XML_alias(self): - PREDEFINED_XML = 'project-private' - PREDEFINED_JSON = 'projectPrivate' - connection = _Connection({'acl': []}) + PREDEFINED_XML = "project-private" + PREDEFINED_JSON = "projectPrivate" + connection = _Connection({"acl": []}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True acl.save_predefined(PREDEFINED_XML, client=client) entries = list(acl) self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'predefinedAcl': PREDEFINED_JSON, + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": { + "projection": "full", + "predefinedAcl": PREDEFINED_JSON, + }, + "data": {"acl": []}, }, - 'data': {'acl': []}, - }) + ) def test_save_predefined_valid_w_alternate_query_param(self): # Cover case where subclass overrides _PREDEFINED_QUERY_PARAM - PREDEFINED = 'publicRead' - connection = _Connection({'acl': []}) + PREDEFINED = "publicRead" + connection = _Connection({"acl": []}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True - acl._PREDEFINED_QUERY_PARAM = 'alternate' + acl._PREDEFINED_QUERY_PARAM = "alternate" acl.save_predefined(PREDEFINED, client=client) entries = list(acl) self.assertEqual(len(entries), 0) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': { - 'projection': 'full', - 'alternate': PREDEFINED, + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": {"projection": "full", "alternate": PREDEFINED}, + "data": {"acl": []}, }, - 'data': {'acl': []}, - }) + ) def test_clear(self): - ROLE1 = 'role1' - ROLE2 = 'role2' - STICKY = {'entity': 'allUsers', 'role': ROLE2} - connection = _Connection({'acl': [STICKY]}) + ROLE1 = "role1" + ROLE2 = "role2" + STICKY = {"entity": "allUsers", "role": ROLE2} + connection = _Connection({"acl": [STICKY]}) client = _Client(connection) acl = self._make_one() - acl.save_path = '/testing' + acl.save_path = "/testing" acl.loaded = True - acl.entity('allUsers', ROLE1) + acl.entity("allUsers", ROLE1) acl.clear(client=client) self.assertEqual(list(acl), [STICKY]) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'PATCH', - 'path': '/testing', - 'query_params': {'projection': 'full'}, - 'data': {'acl': []}, - }) + self.assertEqual( + kw[0], + { + "method": "PATCH", + "path": "/testing", + "query_params": {"projection": "full"}, + "data": {"acl": []}, + }, + ) class Test_BucketACL(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.acl import BucketACL @@ -779,18 +768,18 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - NAME = 'name' + NAME = "name" bucket = _Bucket(NAME) acl = self._make_one(bucket) self.assertEqual(acl.entities, {}) self.assertFalse(acl.loaded) self.assertIs(acl.bucket, bucket) - self.assertEqual(acl.reload_path, '/b/%s/acl' % NAME) - self.assertEqual(acl.save_path, '/b/%s' % NAME) + self.assertEqual(acl.reload_path, "/b/%s/acl" % NAME) + self.assertEqual(acl.save_path, "/b/%s" % NAME) def test_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' + NAME = "name" + USER_PROJECT = "user-project-123" bucket = _Bucket(NAME) acl = self._make_one(bucket) self.assertIsNone(acl.user_project) @@ -799,7 +788,6 @@ def test_user_project(self): class Test_DefaultObjectACL(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.acl import DefaultObjectACL @@ -810,18 +798,17 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - NAME = 'name' + NAME = "name" bucket = _Bucket(NAME) acl = self._make_one(bucket) self.assertEqual(acl.entities, {}) self.assertFalse(acl.loaded) self.assertIs(acl.bucket, bucket) - self.assertEqual(acl.reload_path, '/b/%s/defaultObjectAcl' % NAME) - self.assertEqual(acl.save_path, '/b/%s' % NAME) + self.assertEqual(acl.reload_path, "/b/%s/defaultObjectAcl" % NAME) + self.assertEqual(acl.save_path, "/b/%s" % NAME) class Test_ObjectACL(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.acl import ObjectACL @@ -832,21 +819,21 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): - NAME = 'name' - BLOB_NAME = 'blob-name' + NAME = "name" + BLOB_NAME = "blob-name" bucket = _Bucket(NAME) blob = _Blob(bucket, BLOB_NAME) acl = self._make_one(blob) self.assertEqual(acl.entities, {}) self.assertFalse(acl.loaded) self.assertIs(acl.blob, blob) - self.assertEqual(acl.reload_path, '/b/%s/o/%s/acl' % (NAME, BLOB_NAME)) - self.assertEqual(acl.save_path, '/b/%s/o/%s' % (NAME, BLOB_NAME)) + self.assertEqual(acl.reload_path, "/b/%s/o/%s/acl" % (NAME, BLOB_NAME)) + self.assertEqual(acl.save_path, "/b/%s/o/%s" % (NAME, BLOB_NAME)) def test_user_project(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' + NAME = "name" + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" bucket = _Bucket(NAME) blob = _Blob(bucket, BLOB_NAME) acl = self._make_one(blob) @@ -865,7 +852,7 @@ def __init__(self, bucket, blob): @property def path(self): - return '%s/o/%s' % (self.bucket.path, self.blob) + return "%s/o/%s" % (self.bucket.path, self.blob) class _Bucket(object): @@ -877,7 +864,7 @@ def __init__(self, name): @property def path(self): - return '/b/%s' % self.name + return "/b/%s" % self.name class _Connection(object): @@ -895,6 +882,5 @@ def api_request(self, **kw): class _Client(object): - def __init__(self, connection): self._connection = connection diff --git a/storage/tests/unit/test_batch.py b/storage/tests/unit/test_batch.py index 0e6fd1349abe..1c95807f0a22 100644 --- a/storage/tests/unit/test_batch.py +++ b/storage/tests/unit/test_batch.py @@ -25,7 +25,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_response(status=http_client.OK, content=b'', headers={}): +def _make_response(status=http_client.OK, content=b"", headers={}): response = requests.Response() response.status_code = status response._content = content @@ -41,7 +41,6 @@ def _make_requests_session(responses): class TestMIMEApplicationHTTP(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.batch import MIMEApplicationHTTP @@ -52,21 +51,18 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_body_None(self): - METHOD = 'DELETE' - PATH = '/path/to/api' - LINES = [ - "DELETE /path/to/api HTTP/1.1", - "", - ] + METHOD = "DELETE" + PATH = "/path/to/api" + LINES = ["DELETE /path/to/api HTTP/1.1", ""] mah = self._make_one(METHOD, PATH, {}, None) - self.assertEqual(mah.get_content_type(), 'application/http') + self.assertEqual(mah.get_content_type(), "application/http") self.assertEqual(mah.get_payload().splitlines(), LINES) def test_ctor_body_str(self): - METHOD = 'GET' - PATH = '/path/to/api' - BODY = 'ABC' - HEADERS = {'Content-Length': len(BODY), 'Content-Type': 'text/plain'} + METHOD = "GET" + PATH = "/path/to/api" + BODY = "ABC" + HEADERS = {"Content-Length": len(BODY), "Content-Type": "text/plain"} LINES = [ "GET /path/to/api HTTP/1.1", "Content-Length: 3", @@ -78,15 +74,15 @@ def test_ctor_body_str(self): self.assertEqual(mah.get_payload().splitlines(), LINES) def test_ctor_body_dict(self): - METHOD = 'GET' - PATH = '/path/to/api' - BODY = {'foo': 'bar'} + METHOD = "GET" + PATH = "/path/to/api" + BODY = {"foo": "bar"} HEADERS = {} LINES = [ - 'GET /path/to/api HTTP/1.1', - 'Content-Length: 14', - 'Content-Type: application/json', - '', + "GET /path/to/api HTTP/1.1", + "Content-Length: 14", + "Content-Type: application/json", + "", '{"foo": "bar"}', ] mah = self._make_one(METHOD, PATH, HEADERS, BODY) @@ -94,7 +90,6 @@ def test_ctor_body_dict(self): class TestBatch(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.batch import Batch @@ -116,7 +111,7 @@ def test_ctor(self): def test_current(self): from google.cloud.storage.client import Client - project = 'PROJECT' + project = "PROJECT" credentials = _make_credentials() client = Client(project=project, credentials=credentials) batch1 = self._make_one(client) @@ -132,13 +127,13 @@ def test_current(self): def test__make_request_GET_normal(self): from google.cloud.storage.batch import _FutureDict - url = 'http://example.com/api' + url = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) batch = self._make_one(connection) target = _MockObject() - response = batch._make_request('GET', url, target_object=target) + response = batch._make_request("GET", url, target_object=target) # Check the respone self.assertEqual(response.status_code, 204) @@ -153,22 +148,23 @@ def test__make_request_GET_normal(self): self.assertEqual(len(batch._requests), 1) request = batch._requests[0] request_method, request_url, _, request_data = request - self.assertEqual(request_method, 'GET') + self.assertEqual(request_method, "GET") self.assertEqual(request_url, url) self.assertIsNone(request_data) def test__make_request_POST_normal(self): from google.cloud.storage.batch import _FutureDict - url = 'http://example.com/api' + url = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) batch = self._make_one(connection) - data = {'foo': 1} + data = {"foo": 1} target = _MockObject() response = batch._make_request( - 'POST', url, data={'foo': 1}, target_object=target) + "POST", url, data={"foo": 1}, target_object=target + ) self.assertEqual(response.status_code, 204) self.assertIsInstance(response.content, _FutureDict) @@ -179,22 +175,23 @@ def test__make_request_POST_normal(self): request = batch._requests[0] request_method, request_url, _, request_data = request - self.assertEqual(request_method, 'POST') + self.assertEqual(request_method, "POST") self.assertEqual(request_url, url) self.assertEqual(request_data, data) def test__make_request_PATCH_normal(self): from google.cloud.storage.batch import _FutureDict - url = 'http://example.com/api' + url = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) batch = self._make_one(connection) - data = {'foo': 1} + data = {"foo": 1} target = _MockObject() response = batch._make_request( - 'PATCH', url, data={'foo': 1}, target_object=target) + "PATCH", url, data={"foo": 1}, target_object=target + ) self.assertEqual(response.status_code, 204) self.assertIsInstance(response.content, _FutureDict) @@ -205,20 +202,20 @@ def test__make_request_PATCH_normal(self): request = batch._requests[0] request_method, request_url, _, request_data = request - self.assertEqual(request_method, 'PATCH') + self.assertEqual(request_method, "PATCH") self.assertEqual(request_url, url) self.assertEqual(request_data, data) def test__make_request_DELETE_normal(self): from google.cloud.storage.batch import _FutureDict - url = 'http://example.com/api' + url = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) batch = self._make_one(connection) target = _MockObject() - response = batch._make_request('DELETE', url, target_object=target) + response = batch._make_request("DELETE", url, target_object=target) # Check the respone self.assertEqual(response.status_code, 204) @@ -232,21 +229,21 @@ def test__make_request_DELETE_normal(self): self.assertEqual(len(batch._requests), 1) request = batch._requests[0] request_method, request_url, _, request_data = request - self.assertEqual(request_method, 'DELETE') + self.assertEqual(request_method, "DELETE") self.assertEqual(request_url, url) self.assertIsNone(request_data) def test__make_request_POST_too_many_requests(self): - url = 'http://example.com/api' + url = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) batch = self._make_one(connection) batch._MAX_BATCH_SIZE = 1 - batch._requests.append(('POST', url, {}, {'bar': 2})) + batch._requests.append(("POST", url, {}, {"bar": 2})) with self.assertRaises(ValueError): - batch._make_request('POST', url, data={'foo': 1}) + batch._make_request("POST", url, data={"foo": 1}) def test_finish_empty(self): http = _make_requests_session([]) @@ -257,7 +254,7 @@ def test_finish_empty(self): batch.finish() def _get_payload_chunks(self, boundary, payload): - divider = '--' + boundary[len('boundary="'):-1] + divider = "--" + boundary[len('boundary="') : -1] chunks = payload.split(divider)[1:-1] # discard prolog / epilog return chunks @@ -265,13 +262,13 @@ def _check_subrequest_no_payload(self, chunk, method, url): lines = chunk.splitlines() # blank + 2 headers + blank + request + blank + blank self.assertEqual(len(lines), 7) - self.assertEqual(lines[0], '') - self.assertEqual(lines[1], 'Content-Type: application/http') - self.assertEqual(lines[2], 'MIME-Version: 1.0') - self.assertEqual(lines[3], '') - self.assertEqual(lines[4], '%s %s HTTP/1.1' % (method, url)) - self.assertEqual(lines[5], '') - self.assertEqual(lines[6], '') + self.assertEqual(lines[0], "") + self.assertEqual(lines[1], "Content-Type: application/http") + self.assertEqual(lines[2], "MIME-Version: 1.0") + self.assertEqual(lines[3], "") + self.assertEqual(lines[4], "%s %s HTTP/1.1" % (method, url)) + self.assertEqual(lines[5], "") + self.assertEqual(lines[6], "") def _check_subrequest_payload(self, chunk, method, url, payload): import json @@ -279,148 +276,153 @@ def _check_subrequest_payload(self, chunk, method, url, payload): lines = chunk.splitlines() # blank + 2 headers + blank + request + 2 headers + blank + body payload_str = json.dumps(payload) - self.assertEqual(lines[0], '') - self.assertEqual(lines[1], 'Content-Type: application/http') - self.assertEqual(lines[2], 'MIME-Version: 1.0') - self.assertEqual(lines[3], '') - self.assertEqual(lines[4], '%s %s HTTP/1.1' % (method, url)) - if method == 'GET': + self.assertEqual(lines[0], "") + self.assertEqual(lines[1], "Content-Type: application/http") + self.assertEqual(lines[2], "MIME-Version: 1.0") + self.assertEqual(lines[3], "") + self.assertEqual(lines[4], "%s %s HTTP/1.1" % (method, url)) + if method == "GET": self.assertEqual(len(lines), 7) - self.assertEqual(lines[5], '') - self.assertEqual(lines[6], '') + self.assertEqual(lines[5], "") + self.assertEqual(lines[6], "") else: self.assertEqual(len(lines), 9) - self.assertEqual(lines[5], 'Content-Length: %d' % len(payload_str)) - self.assertEqual(lines[6], 'Content-Type: application/json') - self.assertEqual(lines[7], '') + self.assertEqual(lines[5], "Content-Length: %d" % len(payload_str)) + self.assertEqual(lines[6], "Content-Type: application/json") + self.assertEqual(lines[7], "") self.assertEqual(json.loads(lines[8]), payload) def _get_mutlipart_request(self, http): request_call = http.request.mock_calls[0][2] - request_headers = request_call['headers'] - request_body = request_call['data'] + request_headers = request_call["headers"] + request_body = request_call["data"] content_type, boundary = [ - value.strip() for value in - request_headers['Content-Type'].split(';')] + value.strip() for value in request_headers["Content-Type"].split(";") + ] return request_headers, request_body, content_type, boundary def test_finish_nonempty(self): - url = 'http://api.example.com/other_api' + url = "http://api.example.com/other_api" expected_response = _make_response( content=_THREE_PART_MIME_RESPONSE, - headers={'content-type': 'multipart/mixed; boundary="DEADBEEF="'}) + headers={"content-type": 'multipart/mixed; boundary="DEADBEEF="'}, + ) http = _make_requests_session([expected_response]) connection = _Connection(http=http) client = _Client(connection) batch = self._make_one(client) - batch.API_BASE_URL = 'http://api.example.com' + batch.API_BASE_URL = "http://api.example.com" - batch._do_request('POST', url, {}, {'foo': 1, 'bar': 2}, None) - batch._do_request('PATCH', url, {}, {'bar': 3}, None) - batch._do_request('DELETE', url, {}, None, None) + batch._do_request("POST", url, {}, {"foo": 1, "bar": 2}, None) + batch._do_request("PATCH", url, {}, {"bar": 3}, None) + batch._do_request("DELETE", url, {}, None, None) result = batch.finish() self.assertEqual(len(result), len(batch._requests)) response1, response2, response3 = result - self.assertEqual(response1.headers, { - 'Content-Length': '20', - 'Content-Type': 'application/json; charset=UTF-8', - }) - self.assertEqual(response1.json(), {'foo': 1, 'bar': 2}) + self.assertEqual( + response1.headers, + {"Content-Length": "20", "Content-Type": "application/json; charset=UTF-8"}, + ) + self.assertEqual(response1.json(), {"foo": 1, "bar": 2}) - self.assertEqual(response2.headers, { - 'Content-Length': '20', - 'Content-Type': 'application/json; charset=UTF-8', - }) - self.assertEqual(response2.json(), {'foo': 1, 'bar': 3}) + self.assertEqual( + response2.headers, + {"Content-Length": "20", "Content-Type": "application/json; charset=UTF-8"}, + ) + self.assertEqual(response2.json(), {"foo": 1, "bar": 3}) - self.assertEqual(response3.headers, {'Content-Length': '0'}) + self.assertEqual(response3.headers, {"Content-Length": "0"}) self.assertEqual(response3.status_code, http_client.NO_CONTENT) - expected_url = '{}/batch/storage/v1'.format(batch.API_BASE_URL) + expected_url = "{}/batch/storage/v1".format(batch.API_BASE_URL) http.request.assert_called_once_with( - method='POST', url=expected_url, headers=mock.ANY, data=mock.ANY) + method="POST", url=expected_url, headers=mock.ANY, data=mock.ANY + ) request_info = self._get_mutlipart_request(http) request_headers, request_body, content_type, boundary = request_info - self.assertEqual(content_type, 'multipart/mixed') + self.assertEqual(content_type, "multipart/mixed") self.assertTrue(boundary.startswith('boundary="==')) self.assertTrue(boundary.endswith('=="')) - self.assertEqual(request_headers['MIME-Version'], '1.0') + self.assertEqual(request_headers["MIME-Version"], "1.0") chunks = self._get_payload_chunks(boundary, request_body) self.assertEqual(len(chunks), 3) - self._check_subrequest_payload( - chunks[0], 'POST', url, {'foo': 1, 'bar': 2}) - self._check_subrequest_payload(chunks[1], 'PATCH', url, {'bar': 3}) - self._check_subrequest_no_payload(chunks[2], 'DELETE', url) + self._check_subrequest_payload(chunks[0], "POST", url, {"foo": 1, "bar": 2}) + self._check_subrequest_payload(chunks[1], "PATCH", url, {"bar": 3}) + self._check_subrequest_no_payload(chunks[2], "DELETE", url) def test_finish_responses_mismatch(self): - url = 'http://api.example.com/other_api' + url = "http://api.example.com/other_api" expected_response = _make_response( content=_TWO_PART_MIME_RESPONSE_WITH_FAIL, - headers={'content-type': 'multipart/mixed; boundary="DEADBEEF="'}) + headers={"content-type": 'multipart/mixed; boundary="DEADBEEF="'}, + ) http = _make_requests_session([expected_response]) connection = _Connection(http=http) client = _Client(connection) batch = self._make_one(client) - batch.API_BASE_URL = 'http://api.example.com' + batch.API_BASE_URL = "http://api.example.com" - batch._requests.append(('GET', url, {}, None)) + batch._requests.append(("GET", url, {}, None)) with self.assertRaises(ValueError): batch.finish() def test_finish_nonempty_with_status_failure(self): from google.cloud.exceptions import NotFound - url = 'http://api.example.com/other_api' + + url = "http://api.example.com/other_api" expected_response = _make_response( content=_TWO_PART_MIME_RESPONSE_WITH_FAIL, - headers={'content-type': 'multipart/mixed; boundary="DEADBEEF="'}) + headers={"content-type": 'multipart/mixed; boundary="DEADBEEF="'}, + ) http = _make_requests_session([expected_response]) connection = _Connection(http=http) client = _Client(connection) batch = self._make_one(client) - batch.API_BASE_URL = 'http://api.example.com' + batch.API_BASE_URL = "http://api.example.com" target1 = _MockObject() target2 = _MockObject() - batch._do_request('GET', url, {}, None, target1) - batch._do_request('GET', url, {}, None, target2) + batch._do_request("GET", url, {}, None, target1) + batch._do_request("GET", url, {}, None, target2) # Make sure futures are not populated. - self.assertEqual([future for future in batch._target_objects], - [target1, target2]) + self.assertEqual( + [future for future in batch._target_objects], [target1, target2] + ) target2_future_before = target2._properties with self.assertRaises(NotFound): batch.finish() - self.assertEqual(target1._properties, - {'foo': 1, 'bar': 2}) + self.assertEqual(target1._properties, {"foo": 1, "bar": 2}) self.assertIs(target2._properties, target2_future_before) - expected_url = '{}/batch/storage/v1'.format(batch.API_BASE_URL) + expected_url = "{}/batch/storage/v1".format(batch.API_BASE_URL) http.request.assert_called_once_with( - method='POST', url=expected_url, headers=mock.ANY, data=mock.ANY) + method="POST", url=expected_url, headers=mock.ANY, data=mock.ANY + ) _, request_body, _, boundary = self._get_mutlipart_request(http) chunks = self._get_payload_chunks(boundary, request_body) self.assertEqual(len(chunks), 2) - self._check_subrequest_payload(chunks[0], 'GET', url, {}) - self._check_subrequest_payload(chunks[1], 'GET', url, {}) + self._check_subrequest_payload(chunks[0], "GET", url, {}) + self._check_subrequest_payload(chunks[1], "GET", url, {}) def test_finish_nonempty_non_multipart_response(self): - url = 'http://api.example.com/other_api' + url = "http://api.example.com/other_api" http = _make_requests_session([_make_response()]) connection = _Connection(http=http) client = _Client(connection) batch = self._make_one(client) - batch._requests.append(('POST', url, {}, {'foo': 1, 'bar': 2})) + batch._requests.append(("POST", url, {}, {"foo": 1, "bar": 2})) with self.assertRaises(ValueError): batch.finish() @@ -428,12 +430,13 @@ def test_finish_nonempty_non_multipart_response(self): def test_as_context_mgr_wo_error(self): from google.cloud.storage.client import Client - url = 'http://example.com/api' + url = "http://example.com/api" expected_response = _make_response( content=_THREE_PART_MIME_RESPONSE, - headers={'content-type': 'multipart/mixed; boundary="DEADBEEF="'}) + headers={"content-type": 'multipart/mixed; boundary="DEADBEEF="'}, + ) http = _make_requests_session([expected_response]) - project = 'PROJECT' + project = "PROJECT" credentials = _make_credentials() client = Client(project=project, credentials=credentials) client._http_internal = http @@ -446,32 +449,30 @@ def test_as_context_mgr_wo_error(self): with self._make_one(client) as batch: self.assertEqual(list(client._batch_stack), [batch]) - batch._make_request('POST', url, {'foo': 1, 'bar': 2}, - target_object=target1) - batch._make_request('PATCH', url, {'bar': 3}, - target_object=target2) - batch._make_request('DELETE', url, target_object=target3) + batch._make_request( + "POST", url, {"foo": 1, "bar": 2}, target_object=target1 + ) + batch._make_request("PATCH", url, {"bar": 3}, target_object=target2) + batch._make_request("DELETE", url, target_object=target3) self.assertEqual(list(client._batch_stack), []) self.assertEqual(len(batch._requests), 3) - self.assertEqual(batch._requests[0][0], 'POST') - self.assertEqual(batch._requests[1][0], 'PATCH') - self.assertEqual(batch._requests[2][0], 'DELETE') + self.assertEqual(batch._requests[0][0], "POST") + self.assertEqual(batch._requests[1][0], "PATCH") + self.assertEqual(batch._requests[2][0], "DELETE") self.assertEqual(batch._target_objects, [target1, target2, target3]) - self.assertEqual(target1._properties, - {'foo': 1, 'bar': 2}) - self.assertEqual(target2._properties, - {'foo': 1, 'bar': 3}) - self.assertEqual(target3._properties, b'') + self.assertEqual(target1._properties, {"foo": 1, "bar": 2}) + self.assertEqual(target2._properties, {"foo": 1, "bar": 3}) + self.assertEqual(target3._properties, b"") def test_as_context_mgr_w_error(self): from google.cloud.storage.batch import _FutureDict from google.cloud.storage.client import Client - URL = 'http://example.com/api' + URL = "http://example.com/api" http = _make_requests_session([]) connection = _Connection(http=http) - project = 'PROJECT' + project = "PROJECT" credentials = _make_credentials() client = Client(project=project, credentials=credentials) client._base_connection = connection @@ -484,11 +485,11 @@ def test_as_context_mgr_w_error(self): try: with self._make_one(client) as batch: self.assertEqual(list(client._batch_stack), [batch]) - batch._make_request('POST', URL, {'foo': 1, 'bar': 2}, - target_object=target1) - batch._make_request('PATCH', URL, {'bar': 3}, - target_object=target2) - batch._make_request('DELETE', URL, target_object=target3) + batch._make_request( + "POST", URL, {"foo": 1, "bar": 2}, target_object=target1 + ) + batch._make_request("PATCH", URL, {"bar": 3}, target_object=target2) + batch._make_request("DELETE", URL, target_object=target3) raise ValueError() except ValueError: pass @@ -505,7 +506,6 @@ def test_as_context_mgr_w_error(self): class Test__unpack_batch_response(unittest.TestCase): - def _call_fut(self, headers, content): from google.cloud.storage.batch import _unpack_batch_response @@ -518,18 +518,18 @@ def _unpack_helper(self, response, content): self.assertEqual(len(result), 3) self.assertEqual(result[0].status_code, http_client.OK) - self.assertEqual(result[0].json(), {u'bar': 2, u'foo': 1}) + self.assertEqual(result[0].json(), {u"bar": 2, u"foo": 1}) self.assertEqual(result[1].status_code, http_client.OK) - self.assertEqual(result[1].json(), {u'foo': 1, u'bar': 3}) + self.assertEqual(result[1].json(), {u"foo": 1, u"bar": 3}) self.assertEqual(result[2].status_code, http_client.NO_CONTENT) def test_bytes_headers(self): - RESPONSE = {'content-type': b'multipart/mixed; boundary="DEADBEEF="'} + RESPONSE = {"content-type": b'multipart/mixed; boundary="DEADBEEF="'} CONTENT = _THREE_PART_MIME_RESPONSE self._unpack_helper(RESPONSE, CONTENT) def test_unicode_headers(self): - RESPONSE = {'content-type': u'multipart/mixed; boundary="DEADBEEF="'} + RESPONSE = {"content-type": u'multipart/mixed; boundary="DEADBEEF="'} CONTENT = _THREE_PART_MIME_RESPONSE self._unpack_helper(RESPONSE, CONTENT) @@ -591,7 +591,6 @@ def test_unicode_headers(self): class Test__FutureDict(unittest.TestCase): - def _make_one(self, *args, **kw): from google.cloud.storage.batch import _FutureDict @@ -616,14 +615,13 @@ def test___setitem__(self): class _Connection(object): - project = 'TESTING' + project = "TESTING" def __init__(self, **kw): self.__dict__.update(kw) def _make_request(self, method, url, data=None, headers=None): - return self.http.request(url=url, method=method, - headers=headers, data=data) + return self.http.request(url=url, method=method, headers=headers, data=data) class _MockObject(object): @@ -631,6 +629,5 @@ class _MockObject(object): class _Client(object): - def __init__(self, connection): self._base_connection = connection diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 7d696c0f9c4d..5450ccd4c4e6 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -34,20 +34,19 @@ def _make_credentials(): class Test_Blob(unittest.TestCase): - @staticmethod def _make_one(*args, **kw): from google.cloud.storage.blob import Blob - properties = kw.pop('properties', {}) + properties = kw.pop("properties", {}) blob = Blob(*args, **kw) blob._properties.update(properties) return blob def test_ctor_wo_encryption_key(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - properties = {'key': 'value'} + properties = {"key": "value"} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertIs(blob.bucket, bucket) self.assertEqual(blob.name, BLOB_NAME) @@ -58,37 +57,36 @@ def test_ctor_wo_encryption_key(self): self.assertEqual(blob.kms_key_name, None) def test_ctor_with_encoded_unicode(self): - blob_name = b'wet \xe2\x9b\xb5' + blob_name = b"wet \xe2\x9b\xb5" blob = self._make_one(blob_name, bucket=None) - unicode_name = u'wet \N{sailboat}' + unicode_name = u"wet \N{sailboat}" self.assertNotIsInstance(blob.name, bytes) self.assertIsInstance(blob.name, six.text_type) self.assertEqual(blob.name, unicode_name) def test_ctor_w_encryption_key(self): - KEY = b'01234567890123456789012345678901' # 32 bytes - BLOB_NAME = 'blob-name' + KEY = b"01234567890123456789012345678901" # 32 bytes + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) self.assertEqual(blob._encryption_key, KEY) self.assertEqual(blob.kms_key_name, None) def test_ctor_w_kms_key_name_and_encryption_key(self): - KEY = b'01234567890123456789012345678901' # 32 bytes + KEY = b"01234567890123456789012345678901" # 32 bytes KMS_RESOURCE = ( "projects/test-project-123/" "locations/us/" "keyRings/test-ring/" "cryptoKeys/test-key" ) - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() with self.assertRaises(ValueError): self._make_one( - BLOB_NAME, bucket=bucket, - encryption_key=KEY, - kms_key_name=KMS_RESOURCE) + BLOB_NAME, bucket=bucket, encryption_key=KEY, kms_key_name=KMS_RESOURCE + ) def test_ctor_w_kms_key_name(self): KMS_RESOURCE = ( @@ -97,10 +95,9 @@ def test_ctor_w_kms_key_name(self): "keyRings/test-ring/" "cryptoKeys/test-key" ) - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - blob = self._make_one( - BLOB_NAME, bucket=bucket, kms_key_name=KMS_RESOURCE) + blob = self._make_one(BLOB_NAME, bucket=bucket, kms_key_name=KMS_RESOURCE) self.assertEqual(blob._encryption_key, None) self.assertEqual(blob.kms_key_name, KMS_RESOURCE) @@ -108,53 +105,49 @@ def _set_properties_helper(self, kms_key_name=None): import datetime from google.cloud._helpers import UTC from google.cloud._helpers import _RFC3339_MICROS + now = datetime.datetime.utcnow().replace(tzinfo=UTC) NOW = now.strftime(_RFC3339_MICROS) - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" GENERATION = 12345 - BLOB_ID = 'name/{}/{}'.format(BLOB_NAME, GENERATION) - SELF_LINK = 'http://example.com/self/' + BLOB_ID = "name/{}/{}".format(BLOB_NAME, GENERATION) + SELF_LINK = "http://example.com/self/" METAGENERATION = 23456 SIZE = 12345 - MD5_HASH = 'DEADBEEF' - MEDIA_LINK = 'http://example.com/media/' - ENTITY = 'project-owner-12345' - ENTITY_ID = '23456' - CRC32C = 'FACE0DAC' + MD5_HASH = "DEADBEEF" + MEDIA_LINK = "http://example.com/media/" + ENTITY = "project-owner-12345" + ENTITY_ID = "23456" + CRC32C = "FACE0DAC" COMPONENT_COUNT = 2 - ETAG = 'ETAG' + ETAG = "ETAG" resource = { - 'id': BLOB_ID, - 'selfLink': SELF_LINK, - 'generation': GENERATION, - 'metageneration': METAGENERATION, - 'contentType': 'text/plain', - 'timeCreated': NOW, - 'updated': NOW, - 'timeDeleted': NOW, - 'storageClass': 'NEARLINE', - 'timeStorageClassUpdated': NOW, - 'size': SIZE, - 'md5Hash': MD5_HASH, - 'mediaLink': MEDIA_LINK, - 'contentEncoding': 'gzip', - 'contentDisposition': 'inline', - 'contentLanguage': 'en-US', - 'cacheControl': 'private', - 'metadata': { - 'foo': 'Foo', - }, - 'owner': { - 'entity': ENTITY, - 'entityId': ENTITY_ID, - }, - 'crc32c': CRC32C, - 'componentCount': COMPONENT_COUNT, - 'etag': ETAG, + "id": BLOB_ID, + "selfLink": SELF_LINK, + "generation": GENERATION, + "metageneration": METAGENERATION, + "contentType": "text/plain", + "timeCreated": NOW, + "updated": NOW, + "timeDeleted": NOW, + "storageClass": "NEARLINE", + "timeStorageClassUpdated": NOW, + "size": SIZE, + "md5Hash": MD5_HASH, + "mediaLink": MEDIA_LINK, + "contentEncoding": "gzip", + "contentDisposition": "inline", + "contentLanguage": "en-US", + "cacheControl": "private", + "metadata": {"foo": "Foo"}, + "owner": {"entity": ENTITY, "entityId": ENTITY_ID}, + "crc32c": CRC32C, + "componentCount": COMPONENT_COUNT, + "etag": ETAG, } if kms_key_name is not None: - resource['kmsKeyName'] = kms_key_name + resource["kmsKeyName"] = kms_key_name bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) @@ -165,20 +158,20 @@ def _set_properties_helper(self, kms_key_name=None): self.assertEqual(blob.self_link, SELF_LINK) self.assertEqual(blob.generation, GENERATION) self.assertEqual(blob.metageneration, METAGENERATION) - self.assertEqual(blob.content_type, 'text/plain') + self.assertEqual(blob.content_type, "text/plain") self.assertEqual(blob.time_created, now) self.assertEqual(blob.updated, now) self.assertEqual(blob.time_deleted, now) - self.assertEqual(blob.storage_class, 'NEARLINE') + self.assertEqual(blob.storage_class, "NEARLINE") self.assertEqual(blob.size, SIZE) self.assertEqual(blob.md5_hash, MD5_HASH) self.assertEqual(blob.media_link, MEDIA_LINK) - self.assertEqual(blob.content_encoding, 'gzip') - self.assertEqual(blob.content_disposition, 'inline') - self.assertEqual(blob.content_language, 'en-US') - self.assertEqual(blob.cache_control, 'private') - self.assertEqual(blob.metadata, {'foo': 'Foo'}) - self.assertEqual(blob.owner, {'entity': ENTITY, 'entityId': ENTITY_ID}) + self.assertEqual(blob.content_encoding, "gzip") + self.assertEqual(blob.content_disposition, "inline") + self.assertEqual(blob.content_language, "en-US") + self.assertEqual(blob.cache_control, "private") + self.assertEqual(blob.metadata, {"foo": "Foo"}) + self.assertEqual(blob.owner, {"entity": ENTITY, "entityId": ENTITY_ID}) self.assertEqual(blob.crc32c, CRC32C) self.assertEqual(blob.component_count, COMPONENT_COUNT) self.assertEqual(blob.etag, ETAG) @@ -203,14 +196,14 @@ def test__set_properties_w_kms_key_name(self): def test_chunk_size_ctor(self): from google.cloud.storage.blob import Blob - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" BUCKET = object() chunk_size = 10 * Blob._CHUNK_SIZE_MULTIPLE blob = self._make_one(BLOB_NAME, bucket=BUCKET, chunk_size=chunk_size) self.assertEqual(blob._chunk_size, chunk_size) def test_chunk_size_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" BUCKET = object() blob = self._make_one(BLOB_NAME, bucket=BUCKET) self.assertIsNone(blob.chunk_size) @@ -219,7 +212,7 @@ def test_chunk_size_getter(self): self.assertIs(blob.chunk_size, VALUE) def test_chunk_size_setter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" BUCKET = object() blob = self._make_one(BLOB_NAME, bucket=BUCKET) self.assertIsNone(blob._chunk_size) @@ -228,7 +221,7 @@ def test_chunk_size_setter(self): self.assertEqual(blob._chunk_size, 20) def test_chunk_size_setter_bad_value(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" BUCKET = object() blob = self._make_one(BLOB_NAME, bucket=BUCKET) self.assertIsNone(blob._chunk_size) @@ -240,107 +233,107 @@ def test_acl_property(self): from google.cloud.storage.acl import ObjectACL fake_bucket = _Bucket() - blob = self._make_one(u'name', bucket=fake_bucket) + blob = self._make_one(u"name", bucket=fake_bucket) acl = blob.acl self.assertIsInstance(acl, ObjectACL) self.assertIs(acl, blob._acl) def test_path_bad_bucket(self): fake_bucket = object() - name = u'blob-name' + name = u"blob-name" blob = self._make_one(name, bucket=fake_bucket) - self.assertRaises(AttributeError, getattr, blob, 'path') + self.assertRaises(AttributeError, getattr, blob, "path") def test_path_no_name(self): bucket = _Bucket() - blob = self._make_one(u'', bucket=bucket) - self.assertRaises(ValueError, getattr, blob, 'path') + blob = self._make_one(u"", bucket=bucket) + self.assertRaises(ValueError, getattr, blob, "path") def test_path_normal(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) - self.assertEqual(blob.path, '/b/name/o/%s' % BLOB_NAME) + self.assertEqual(blob.path, "/b/name/o/%s" % BLOB_NAME) def test_path_w_slash_in_name(self): - BLOB_NAME = 'parent/child' + BLOB_NAME = "parent/child" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) - self.assertEqual(blob.path, '/b/name/o/parent%2Fchild') + self.assertEqual(blob.path, "/b/name/o/parent%2Fchild") def test_path_with_non_ascii(self): - blob_name = u'Caf\xe9' + blob_name = u"Caf\xe9" bucket = _Bucket() blob = self._make_one(blob_name, bucket=bucket) - self.assertEqual(blob.path, '/b/name/o/Caf%C3%A9') + self.assertEqual(blob.path, "/b/name/o/Caf%C3%A9") def test_client(self): - blob_name = 'BLOB' + blob_name = "BLOB" bucket = _Bucket() blob = self._make_one(blob_name, bucket=bucket) self.assertIs(blob.client, bucket.client) def test_user_project(self): - user_project = 'user-project-123' - blob_name = 'BLOB' + user_project = "user-project-123" + blob_name = "BLOB" bucket = _Bucket(user_project=user_project) blob = self._make_one(blob_name, bucket=bucket) self.assertEqual(blob.user_project, user_project) def test_public_url(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) - self.assertEqual(blob.public_url, - 'https://storage.googleapis.com/name/%s' % - BLOB_NAME) + self.assertEqual( + blob.public_url, "https://storage.googleapis.com/name/%s" % BLOB_NAME + ) def test_public_url_w_slash_in_name(self): - BLOB_NAME = 'parent/child' + BLOB_NAME = "parent/child" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertEqual( - blob.public_url, - 'https://storage.googleapis.com/name/parent/child') + blob.public_url, "https://storage.googleapis.com/name/parent/child" + ) def test_public_url_with_non_ascii(self): - blob_name = u'winter \N{snowman}' + blob_name = u"winter \N{snowman}" bucket = _Bucket() blob = self._make_one(blob_name, bucket=bucket) - expected_url = 'https://storage.googleapis.com/name/winter%20%E2%98%83' + expected_url = "https://storage.googleapis.com/name/winter%20%E2%98%83" self.assertEqual(blob.public_url, expected_url) def _basic_generate_signed_url_helper(self, credentials=None): - BLOB_NAME = 'blob-name' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = "blob-name" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - '&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + "http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + "&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): - signed_uri = blob.generate_signed_url(EXPIRATION, - credentials=credentials) + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): + signed_uri = blob.generate_signed_url(EXPIRATION, credentials=credentials) self.assertEqual(signed_uri, URI) - PATH = '/name/%s' % (BLOB_NAME,) + PATH = "/name/%s" % (BLOB_NAME,) if credentials is None: EXPECTED_ARGS = (_Connection.credentials,) else: EXPECTED_ARGS = (credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'GET', - 'resource': PATH, - 'content_type': None, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "GET", + "resource": PATH, + "content_type": None, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) @@ -348,34 +341,34 @@ def test_generate_signed_url_w_default_method(self): self._basic_generate_signed_url_helper() def test_generate_signed_url_w_content_type(self): - BLOB_NAME = 'blob-name' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = "blob-name" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - '&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + "http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + "&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() CONTENT_TYPE = "text/html" - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): - signed_url = blob.generate_signed_url(EXPIRATION, - content_type=CONTENT_TYPE) + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): + signed_url = blob.generate_signed_url(EXPIRATION, content_type=CONTENT_TYPE) self.assertEqual(signed_url, URI) - PATH = '/name/%s' % (BLOB_NAME,) + PATH = "/name/%s" % (BLOB_NAME,) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'GET', - 'resource': PATH, - 'content_type': CONTENT_TYPE, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "GET", + "resource": PATH, + "content_type": CONTENT_TYPE, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) @@ -384,129 +377,135 @@ def test_generate_signed_url_w_credentials(self): self._basic_generate_signed_url_helper(credentials=credentials) def test_generate_signed_url_lowercase_method(self): - BLOB_NAME = 'blob-name' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = "blob-name" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = (u'http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - u'&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + u"http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + u"&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): - signed_url = blob.generate_signed_url(EXPIRATION, method='get') + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): + signed_url = blob.generate_signed_url(EXPIRATION, method="get") self.assertEqual(signed_url, URI) - PATH = '/name/%s' % (BLOB_NAME,) + PATH = "/name/%s" % (BLOB_NAME,) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'GET', - 'resource': PATH, - 'content_type': None, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "GET", + "resource": PATH, + "content_type": None, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) def test_generate_signed_url_non_ascii(self): - BLOB_NAME = u'\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = u"\u0410\u043a\u043a\u043e\u0440\u0434\u044b.txt" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = (u'http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - u'&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + u"http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + u"&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): signed_url = blob.generate_signed_url(EXPIRATION) self.assertEqual(signed_url, URI) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'GET', - 'resource': '/name/%D0%90%D0%BA%D0%BA%D0%BE%D1%80%D0%B4%D1%8B.txt', - 'content_type': None, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "GET", + "resource": "/name/%D0%90%D0%BA%D0%BA%D0%BE%D1%80%D0%B4%D1%8B.txt", + "content_type": None, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) def test_generate_signed_url_w_slash_in_name(self): - BLOB_NAME = 'parent/child' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = "parent/child" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - '&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + "http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + "&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): signed_url = blob.generate_signed_url(EXPIRATION) self.assertEqual(signed_url, URI) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'GET', - 'resource': '/name/parent/child', - 'content_type': None, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "GET", + "resource": "/name/parent/child", + "content_type": None, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) def test_generate_signed_url_w_method_arg(self): - BLOB_NAME = 'blob-name' - EXPIRATION = '2014-10-16T20:34:37.000Z' + BLOB_NAME = "blob-name" + EXPIRATION = "2014-10-16T20:34:37.000Z" connection = _Connection() client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) - URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - '&Expiration=2014-10-16T20:34:37.000Z') + URI = ( + "http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + "&Expiration=2014-10-16T20:34:37.000Z" + ) SIGNER = _Signer() - with mock.patch('google.cloud.storage.blob.generate_signed_url', - new=SIGNER): - signed_uri = blob.generate_signed_url(EXPIRATION, method='POST') + with mock.patch("google.cloud.storage.blob.generate_signed_url", new=SIGNER): + signed_uri = blob.generate_signed_url(EXPIRATION, method="POST") self.assertEqual(signed_uri, URI) - PATH = '/name/%s' % (BLOB_NAME,) + PATH = "/name/%s" % (BLOB_NAME,) EXPECTED_ARGS = (_Connection.credentials,) EXPECTED_KWARGS = { - 'api_access_endpoint': 'https://storage.googleapis.com', - 'expiration': EXPIRATION, - 'method': 'POST', - 'resource': PATH, - 'content_type': None, - 'response_type': None, - 'response_disposition': None, - 'generation': None, + "api_access_endpoint": "https://storage.googleapis.com", + "expiration": EXPIRATION, + "method": "POST", + "resource": PATH, + "content_type": None, + "response_type": None, + "response_disposition": None, + "generation": None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) - @mock.patch('google.cloud.storage._signing.get_signed_query_params', - return_value={ - 'GoogleAccessId': 'service-account-name', - 'Expires': 12345, - 'Signature': 'signed-data', - }) + @mock.patch( + "google.cloud.storage._signing.get_signed_query_params", + return_value={ + "GoogleAccessId": "service-account-name", + "Expires": 12345, + "Signature": "signed-data", + }, + ) def test_generate_resumable_signed_url(self, mock_get_signed_query_params): """ Verify correct behavior of resumable upload URL generation @@ -517,36 +516,39 @@ def test_generate_resumable_signed_url(self, mock_get_signed_query_params): expiry = get_expiration_seconds(datetime.timedelta(hours=1)) signed_url = generate_signed_url( - _make_credentials(), 'a-bucket', expiry, method='RESUMABLE' + _make_credentials(), "a-bucket", expiry, method="RESUMABLE" ) self.assertTrue(mock_get_signed_query_params.called) self.assertGreater(len(signed_url), 0) - self.assertIn('a-bucket', signed_url) - self.assertIn('GoogleAccessId', signed_url) - self.assertIn('Expires', signed_url) - self.assertIn('Signature', signed_url) + self.assertIn("a-bucket", signed_url) + self.assertIn("GoogleAccessId", signed_url) + self.assertIn("Expires", signed_url) + self.assertIn("Signature", signed_url) def test_exists_miss(self): - NONESUCH = 'nonesuch' - not_found_response = ({'status': http_client.NOT_FOUND}, b'') + NONESUCH = "nonesuch" + not_found_response = ({"status": http_client.NOT_FOUND}, b"") connection = _Connection(not_found_response) client = _Client(connection) bucket = _Bucket(client) blob = self._make_one(NONESUCH, bucket=bucket) self.assertFalse(blob.exists()) self.assertEqual(len(connection._requested), 1) - self.assertEqual(connection._requested[0], { - 'method': 'GET', - 'path': '/b/name/o/{}'.format(NONESUCH), - 'query_params': {'fields': 'name'}, - '_target_object': None, - }) + self.assertEqual( + connection._requested[0], + { + "method": "GET", + "path": "/b/name/o/{}".format(NONESUCH), + "query_params": {"fields": "name"}, + "_target_object": None, + }, + ) def test_exists_hit_w_user_project(self): - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - found_response = ({'status': http_client.OK}, b'') + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" + found_response = ({"status": http_client.OK}, b"") connection = _Connection(found_response) client = _Client(connection) bucket = _Bucket(client, user_project=USER_PROJECT) @@ -554,16 +556,19 @@ def test_exists_hit_w_user_project(self): bucket._blobs[BLOB_NAME] = 1 self.assertTrue(blob.exists()) self.assertEqual(len(connection._requested), 1) - self.assertEqual(connection._requested[0], { - 'method': 'GET', - 'path': '/b/name/o/{}'.format(BLOB_NAME), - 'query_params': {'fields': 'name', 'userProject': USER_PROJECT}, - '_target_object': None, - }) + self.assertEqual( + connection._requested[0], + { + "method": "GET", + "path": "/b/name/o/{}".format(BLOB_NAME), + "query_params": {"fields": "name", "userProject": USER_PROJECT}, + "_target_object": None, + }, + ) def test_delete(self): - BLOB_NAME = 'blob-name' - not_found_response = ({'status': http_client.NOT_FOUND}, b'') + BLOB_NAME = "blob-name" + not_found_response = ({"status": http_client.NOT_FOUND}, b"") connection = _Connection(not_found_response) client = _Client(connection) bucket = _Bucket(client) @@ -574,77 +579,80 @@ def test_delete(self): self.assertEqual(bucket._deleted, [(BLOB_NAME, None)]) def test__get_transport(self): - client = mock.Mock(spec=[u'_credentials', '_http']) + client = mock.Mock(spec=[u"_credentials", "_http"]) client._http = mock.sentinel.transport - blob = self._make_one(u'blob-name', bucket=None) + blob = self._make_one(u"blob-name", bucket=None) transport = blob._get_transport(client) self.assertIs(transport, mock.sentinel.transport) def test__get_download_url_with_media_link(self): - blob_name = 'something.txt' - bucket = _Bucket(name='IRRELEVANT') + blob_name = "something.txt" + bucket = _Bucket(name="IRRELEVANT") blob = self._make_one(blob_name, bucket=bucket) - media_link = 'http://test.invalid' + media_link = "http://test.invalid" # Set the media link on the blob - blob._properties['mediaLink'] = media_link + blob._properties["mediaLink"] = media_link download_url = blob._get_download_url() self.assertEqual(download_url, media_link) def test__get_download_url_with_media_link_w_user_project(self): - blob_name = 'something.txt' - user_project = 'user-project-123' - bucket = _Bucket(name='IRRELEVANT', user_project=user_project) + blob_name = "something.txt" + user_project = "user-project-123" + bucket = _Bucket(name="IRRELEVANT", user_project=user_project) blob = self._make_one(blob_name, bucket=bucket) - media_link = 'http://test.invalid' + media_link = "http://test.invalid" # Set the media link on the blob - blob._properties['mediaLink'] = media_link + blob._properties["mediaLink"] = media_link download_url = blob._get_download_url() self.assertEqual( - download_url, '{}?userProject={}'.format(media_link, user_project)) + download_url, "{}?userProject={}".format(media_link, user_project) + ) def test__get_download_url_on_the_fly(self): - blob_name = 'bzzz-fly.txt' - bucket = _Bucket(name='buhkit') + blob_name = "bzzz-fly.txt" + bucket = _Bucket(name="buhkit") blob = self._make_one(blob_name, bucket=bucket) self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'buhkit/o/bzzz-fly.txt?alt=media') + "https://www.googleapis.com/download/storage/v1/b/" + "buhkit/o/bzzz-fly.txt?alt=media" + ) self.assertEqual(download_url, expected_url) def test__get_download_url_on_the_fly_with_generation(self): - blob_name = 'pretend.txt' - bucket = _Bucket(name='fictional') + blob_name = "pretend.txt" + bucket = _Bucket(name="fictional") blob = self._make_one(blob_name, bucket=bucket) generation = 1493058489532987 # Set the media link on the blob - blob._properties['generation'] = str(generation) + blob._properties["generation"] = str(generation) self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'fictional/o/pretend.txt?alt=media&generation=1493058489532987') + "https://www.googleapis.com/download/storage/v1/b/" + "fictional/o/pretend.txt?alt=media&generation=1493058489532987" + ) self.assertEqual(download_url, expected_url) def test__get_download_url_on_the_fly_with_user_project(self): - blob_name = 'pretend.txt' - user_project = 'user-project-123' - bucket = _Bucket(name='fictional', user_project=user_project) + blob_name = "pretend.txt" + user_project = "user-project-123" + bucket = _Bucket(name="fictional", user_project=user_project) blob = self._make_one(blob_name, bucket=bucket) self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'fictional/o/pretend.txt?alt=media&userProject={}'.format( - user_project)) + "https://www.googleapis.com/download/storage/v1/b/" + "fictional/o/pretend.txt?alt=media&userProject={}".format(user_project) + ) self.assertEqual(download_url, expected_url) def test__get_download_url_on_the_fly_with_kms_key_name(self): @@ -654,21 +662,20 @@ def test__get_download_url_on_the_fly_with_kms_key_name(self): "keyRings/test-ring/" "cryptoKeys/test-key" ) - blob_name = 'bzzz-fly.txt' - bucket = _Bucket(name='buhkit') - blob = self._make_one( - blob_name, bucket=bucket, kms_key_name=kms_resource) + blob_name = "bzzz-fly.txt" + bucket = _Bucket(name="buhkit") + blob = self._make_one(blob_name, bucket=bucket, kms_key_name=kms_resource) self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'buhkit/o/bzzz-fly.txt?alt=media') + "https://www.googleapis.com/download/storage/v1/b/" + "buhkit/o/bzzz-fly.txt?alt=media" + ) self.assertEqual(download_url, expected_url) @staticmethod - def _mock_requests_response( - status_code, headers, content=b'', stream=False): + def _mock_requests_response(status_code, headers, content=b"", stream=False): import requests response = requests.Response() @@ -683,40 +690,42 @@ def _mock_requests_response( response.raw = None response._content = content - response.request = requests.Request( - 'POST', 'http://example.com').prepare() + response.request = requests.Request("POST", "http://example.com").prepare() return response def _mock_download_transport(self): - fake_transport = mock.Mock(spec=['request']) + fake_transport = mock.Mock(spec=["request"]) # Give the transport two fake responses. chunk1_response = self._mock_requests_response( http_client.PARTIAL_CONTENT, - {'content-length': '3', 'content-range': 'bytes 0-2/6'}, - content=b'abc') + {"content-length": "3", "content-range": "bytes 0-2/6"}, + content=b"abc", + ) chunk2_response = self._mock_requests_response( http_client.PARTIAL_CONTENT, - {'content-length': '3', 'content-range': 'bytes 3-5/6'}, - content=b'def') + {"content-length": "3", "content-range": "bytes 3-5/6"}, + content=b"def", + ) fake_transport.request.side_effect = [chunk1_response, chunk2_response] return fake_transport def _mock_download_transport_range(self): - fake_transport = mock.Mock(spec=['request']) + fake_transport = mock.Mock(spec=["request"]) # Give the transport two fake responses. chunk1_response = self._mock_requests_response( http_client.PARTIAL_CONTENT, - {'content-length': '2', 'content-range': 'bytes 1-2/6'}, - content=b'bc') + {"content-length": "2", "content-range": "bytes 1-2/6"}, + content=b"bc", + ) chunk2_response = self._mock_requests_response( http_client.PARTIAL_CONTENT, - {'content-length': '2', 'content-range': 'bytes 3-4/6'}, - content=b'de') + {"content-length": "2", "content-range": "bytes 3-4/6"}, + content=b"de", + ) fake_transport.request.side_effect = [chunk1_response, chunk2_response] return fake_transport - def _check_session_mocks(self, client, transport, - expected_url, headers=None): + def _check_session_mocks(self, client, transport, expected_url, headers=None): # Check that the transport was called exactly twice. self.assertEqual(transport.request.call_count, 2) if headers is None: @@ -724,75 +733,72 @@ def _check_session_mocks(self, client, transport, # NOTE: bytes=0-2 never shows up because the mock was called with # **MUTABLE** headers and it was mutated before the # second request. - headers['range'] = 'bytes=3-5' - headers['accept-encoding'] = 'gzip' - call = mock.call( - 'GET', expected_url, data=None, headers=headers) + headers["range"] = "bytes=3-5" + headers["accept-encoding"] = "gzip" + call = mock.call("GET", expected_url, data=None, headers=headers) self.assertEqual(transport.request.mock_calls, [call, call]) def test__do_download_simple(self): - blob_name = 'blob-name' + blob_name = "blob-name" # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock( - _credentials=_make_credentials(), spec=['_credentials']) + client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) # Make sure this will not be chunked. self.assertIsNone(blob.chunk_size) - transport = mock.Mock(spec=['request']) + transport = mock.Mock(spec=["request"]) transport.request.return_value = self._mock_requests_response( http_client.OK, - {'content-length': '6', 'content-range': 'bytes 0-5/6'}, - content=b'abcdef', + {"content-length": "6", "content-range": "bytes 0-5/6"}, + content=b"abcdef", stream=True, ) file_obj = io.BytesIO() - download_url = 'http://test.invalid' + download_url = "http://test.invalid" headers = {} blob._do_download(transport, file_obj, download_url, headers) # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b'abcdef') + self.assertEqual(file_obj.getvalue(), b"abcdef") transport.request.assert_called_once_with( - 'GET', download_url, data=None, headers=headers, stream=True) + "GET", download_url, data=None, headers=headers, stream=True + ) def test__do_download_simple_with_range(self): - blob_name = 'blob-name' + blob_name = "blob-name" # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock( - _credentials=_make_credentials(), spec=['_credentials']) + client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) # Make sure this will not be chunked. self.assertIsNone(blob.chunk_size) - transport = mock.Mock(spec=['request']) + transport = mock.Mock(spec=["request"]) transport.request.return_value = self._mock_requests_response( http_client.OK, - {'content-length': '3', 'content-range': 'bytes 1-3'}, - content=b'bcd', + {"content-length": "3", "content-range": "bytes 1-3"}, + content=b"bcd", stream=True, ) file_obj = io.BytesIO() - download_url = 'http://test.invalid' + download_url = "http://test.invalid" headers = {} - blob._do_download( - transport, file_obj, download_url, headers, start=1, end=3) + blob._do_download(transport, file_obj, download_url, headers, start=1, end=3) # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b'bcd') - self.assertEqual(headers['range'], 'bytes=1-3') + self.assertEqual(file_obj.getvalue(), b"bcd") + self.assertEqual(headers["range"], "bytes=1-3") transport.request.assert_called_once_with( - 'GET', download_url, data=None, headers=headers, stream=True) + "GET", download_url, data=None, headers=headers, stream=True + ) def test__do_download_chunked(self): - blob_name = 'blob-name' + blob_name = "blob-name" # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock( - _credentials=_make_credentials(), spec=['_credentials']) + client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) @@ -802,25 +808,23 @@ def test__do_download_chunked(self): transport = self._mock_download_transport() file_obj = io.BytesIO() - download_url = 'http://test.invalid' + download_url = "http://test.invalid" headers = {} blob._do_download(transport, file_obj, download_url, headers) # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b'abcdef') + self.assertEqual(file_obj.getvalue(), b"abcdef") # Check that the transport was called exactly twice. self.assertEqual(transport.request.call_count, 2) # ``headers`` was modified (in place) once for each API call. - self.assertEqual(headers, {'range': 'bytes=3-5'}) - call = mock.call( - 'GET', download_url, data=None, headers=headers) + self.assertEqual(headers, {"range": "bytes=3-5"}) + call = mock.call("GET", download_url, data=None, headers=headers) self.assertEqual(transport.request.mock_calls, [call, call]) def test__do_download_chunked_with_range(self): - blob_name = 'blob-name' + blob_name = "blob-name" # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock( - _credentials=_make_credentials(), spec=['_credentials']) + client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) @@ -830,38 +834,37 @@ def test__do_download_chunked_with_range(self): transport = self._mock_download_transport_range() file_obj = io.BytesIO() - download_url = 'http://test.invalid' + download_url = "http://test.invalid" headers = {} - blob._do_download( - transport, file_obj, download_url, headers, start=1, end=4) + blob._do_download(transport, file_obj, download_url, headers, start=1, end=4) # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b'bcde') + self.assertEqual(file_obj.getvalue(), b"bcde") # Check that the transport was called exactly twice. self.assertEqual(transport.request.call_count, 2) # ``headers`` was modified (in place) once for each API call. - self.assertEqual(headers, {'range': 'bytes=3-4'}) - call = mock.call( - 'GET', download_url, data=None, headers=headers) + self.assertEqual(headers, {"range": "bytes=3-4"}) + call = mock.call("GET", download_url, data=None, headers=headers) self.assertEqual(transport.request.mock_calls, [call, call]) def test_download_to_file_with_failure(self): from google.cloud import exceptions - blob_name = 'blob-name' - transport = mock.Mock(spec=['request']) + blob_name = "blob-name" + transport = mock.Mock(spec=["request"]) bad_response_headers = { - 'Content-Length': '9', - 'Content-Type': 'text/html; charset=UTF-8', + "Content-Length": "9", + "Content-Type": "text/html; charset=UTF-8", } transport.request.return_value = self._mock_requests_response( - http_client.NOT_FOUND, bad_response_headers, content=b'Not found') + http_client.NOT_FOUND, bad_response_headers, content=b"Not found" + ) # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u'_http']) + client = mock.Mock(_http=transport, spec=[u"_http"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) # Set the media link on the blob - blob._properties['mediaLink'] = 'http://test.invalid' + blob._properties["mediaLink"] = "http://test.invalid" file_obj = io.BytesIO() with self.assertRaises(exceptions.NotFound): @@ -870,14 +873,18 @@ def test_download_to_file_with_failure(self): self.assertEqual(file_obj.tell(), 0) # Check that the transport was called once. transport.request.assert_called_once_with( - 'GET', blob.media_link, data=None, - headers={'accept-encoding': 'gzip'}, stream=True) + "GET", + blob.media_link, + data=None, + headers={"accept-encoding": "gzip"}, + stream=True, + ) def test_download_to_file_wo_media_link(self): - blob_name = 'blob-name' + blob_name = "blob-name" transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u'_http']) + client = mock.Mock(_http=transport, spec=[u"_http"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) # Modify the blob so there there will be 2 chunks of size 3. @@ -886,23 +893,24 @@ def test_download_to_file_wo_media_link(self): file_obj = io.BytesIO() blob.download_to_file(file_obj) - self.assertEqual(file_obj.getvalue(), b'abcdef') + self.assertEqual(file_obj.getvalue(), b"abcdef") # Make sure the media link is still unknown. self.assertIsNone(blob.media_link) expected_url = ( - 'https://www.googleapis.com/download/storage/v1/b/' - 'name/o/blob-name?alt=media') + "https://www.googleapis.com/download/storage/v1/b/" + "name/o/blob-name?alt=media" + ) self._check_session_mocks(client, transport, expected_url) def _download_to_file_helper(self, use_chunks=False): - blob_name = 'blob-name' + blob_name = "blob-name" transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u'_http']) + client = mock.Mock(_http=transport, spec=[u"_http"]) bucket = _Bucket(client) - media_link = 'http://example.com/media/' - properties = {'mediaLink': media_link} + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} blob = self._make_one(blob_name, bucket=bucket, properties=properties) if use_chunks: # Modify the blob so there there will be 2 chunks of size 3. @@ -912,22 +920,26 @@ def _download_to_file_helper(self, use_chunks=False): # Modify the response. single_chunk_response = self._mock_requests_response( http_client.OK, - {'content-length': '6', 'content-range': 'bytes 0-5/6'}, - content=b'abcdef', + {"content-length": "6", "content-range": "bytes 0-5/6"}, + content=b"abcdef", stream=True, ) transport.request.side_effect = [single_chunk_response] file_obj = io.BytesIO() blob.download_to_file(file_obj) - self.assertEqual(file_obj.getvalue(), b'abcdef') + self.assertEqual(file_obj.getvalue(), b"abcdef") if use_chunks: self._check_session_mocks(client, transport, media_link) else: transport.request.assert_called_once_with( - 'GET', media_link, data=None, - headers={'accept-encoding': 'gzip'}, stream=True) + "GET", + media_link, + data=None, + headers={"accept-encoding": "gzip"}, + stream=True, + ) def test_download_to_file_default(self): self._download_to_file_helper() @@ -940,15 +952,15 @@ def _download_to_filename_helper(self, updated=None): import time from google.cloud._testing import _NamedTemporaryFile - blob_name = 'blob-name' + blob_name = "blob-name" transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=['_http']) + client = mock.Mock(_http=transport, spec=["_http"]) bucket = _Bucket(client) - media_link = 'http://example.com/media/' - properties = {'mediaLink': media_link} + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} if updated is not None: - properties['updated'] = updated + properties["updated"] = updated blob = self._make_one(blob_name, bucket=bucket, properties=properties) # Modify the blob so there there will be 2 chunks of size 3. @@ -957,7 +969,7 @@ def _download_to_filename_helper(self, updated=None): with _NamedTemporaryFile() as temp: blob.download_to_filename(temp.name) - with open(temp.name, 'rb') as file_obj: + with open(temp.name, "rb") as file_obj: wrote = file_obj.read() if updated is None: self.assertIsNone(blob.updated) @@ -966,12 +978,12 @@ def _download_to_filename_helper(self, updated=None): updated_time = time.mktime(blob.updated.timetuple()) self.assertEqual(mtime, updated_time) - self.assertEqual(wrote, b'abcdef') + self.assertEqual(wrote, b"abcdef") self._check_session_mocks(client, transport, media_link) def test_download_to_filename(self): - updated = '2014-12-06T13:13:50.690Z' + updated = "2014-12-06T13:13:50.690Z" self._download_to_filename_helper(updated=updated) def test_download_to_filename_wo_updated(self): @@ -981,49 +993,45 @@ def test_download_to_filename_corrupted(self): from google.resumable_media import DataCorruption from google.resumable_media.requests.download import _CHECKSUM_MISMATCH - blob_name = 'blob-name' - transport = mock.Mock(spec=['request']) - empty_hash = base64.b64encode( - hashlib.md5(b'').digest()).decode(u'utf-8') - headers = {'x-goog-hash': 'md5=' + empty_hash} - mock_raw = mock.Mock(headers=headers, spec=['headers']) + blob_name = "blob-name" + transport = mock.Mock(spec=["request"]) + empty_hash = base64.b64encode(hashlib.md5(b"").digest()).decode(u"utf-8") + headers = {"x-goog-hash": "md5=" + empty_hash} + mock_raw = mock.Mock(headers=headers, spec=["headers"]) response = mock.MagicMock( headers=headers, status_code=http_client.OK, raw=mock_raw, spec=[ - '__enter__', - '__exit__', - 'headers', - 'iter_content', - 'status_code', - 'raw', + "__enter__", + "__exit__", + "headers", + "iter_content", + "status_code", + "raw", ], ) # i.e. context manager returns ``self``. response.__enter__.return_value = response response.__exit__.return_value = None - chunks = (b'noms1', b'coooookies2') + chunks = (b"noms1", b"coooookies2") response.iter_content.return_value = iter(chunks) transport.request.return_value = response # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=['_http']) + client = mock.Mock(_http=transport, spec=["_http"]) bucket = mock.Mock( - client=client, - user_project=None, - spec=['client', 'user_project'], + client=client, user_project=None, spec=["client", "user_project"] ) - media_link = 'http://example.com/media/' - properties = {'mediaLink': media_link} + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} blob = self._make_one(blob_name, bucket=bucket, properties=properties) # Make sure the download is **not** chunked. self.assertIsNone(blob.chunk_size) # Make sure the hash will be wrong. - content = b''.join(chunks) - expected_hash = base64.b64encode( - hashlib.md5(content).digest()).decode(u'utf-8') + content = b"".join(chunks) + expected_hash = base64.b64encode(hashlib.md5(content).digest()).decode(u"utf-8") self.assertNotEqual(empty_hash, expected_hash) # Try to download into a temporary file (don't use @@ -1043,12 +1051,13 @@ def test_download_to_filename_corrupted(self): response.__enter__.assert_called_once_with() response.__exit__.assert_called_once_with(None, None, None) response.iter_content.assert_called_once_with( - chunk_size=8192, decode_unicode=False) + chunk_size=8192, decode_unicode=False + ) transport.request.assert_called_once_with( - 'GET', + "GET", media_link, data=None, - headers={'accept-encoding': 'gzip'}, + headers={"accept-encoding": "gzip"}, stream=True, ) @@ -1057,173 +1066,179 @@ def test_download_to_filename_w_key(self): import time from google.cloud._testing import _NamedTemporaryFile - blob_name = 'blob-name' + blob_name = "blob-name" transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=['_http']) + client = mock.Mock(_http=transport, spec=["_http"]) bucket = _Bucket(client) - media_link = 'http://example.com/media/' - properties = {'mediaLink': media_link, - 'updated': '2014-12-06T13:13:50.690Z'} - key = b'aa426195405adee2c8081bb9e7e74b19' + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link, "updated": "2014-12-06T13:13:50.690Z"} + key = b"aa426195405adee2c8081bb9e7e74b19" blob = self._make_one( - blob_name, bucket=bucket, properties=properties, - encryption_key=key) + blob_name, bucket=bucket, properties=properties, encryption_key=key + ) # Modify the blob so there there will be 2 chunks of size 3. blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 with _NamedTemporaryFile() as temp: blob.download_to_filename(temp.name) - with open(temp.name, 'rb') as file_obj: + with open(temp.name, "rb") as file_obj: wrote = file_obj.read() mtime = os.path.getmtime(temp.name) updated_time = time.mktime(blob.updated.timetuple()) - self.assertEqual(wrote, b'abcdef') + self.assertEqual(wrote, b"abcdef") self.assertEqual(mtime, updated_time) - header_key_value = 'YWE0MjYxOTU0MDVhZGVlMmM4MDgxYmI5ZTdlNzRiMTk=' - header_key_hash_value = 'V3Kwe46nKc3xLv96+iJ707YfZfFvlObta8TQcx2gpm0=' + header_key_value = "YWE0MjYxOTU0MDVhZGVlMmM4MDgxYmI5ZTdlNzRiMTk=" + header_key_hash_value = "V3Kwe46nKc3xLv96+iJ707YfZfFvlObta8TQcx2gpm0=" key_headers = { - 'X-Goog-Encryption-Key-Sha256': header_key_hash_value, - 'X-Goog-Encryption-Algorithm': 'AES256', - 'X-Goog-Encryption-Key': header_key_value, - 'accept-encoding': 'gzip', + "X-Goog-Encryption-Key-Sha256": header_key_hash_value, + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": header_key_value, + "accept-encoding": "gzip", } - self._check_session_mocks( - client, transport, media_link, headers=key_headers) + self._check_session_mocks(client, transport, media_link, headers=key_headers) def test_download_as_string(self): - blob_name = 'blob-name' + blob_name = "blob-name" transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=['_http']) + client = mock.Mock(_http=transport, spec=["_http"]) bucket = _Bucket(client) - media_link = 'http://example.com/media/' - properties = {'mediaLink': media_link} + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} blob = self._make_one(blob_name, bucket=bucket, properties=properties) # Modify the blob so there there will be 2 chunks of size 3. blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 fetched = blob.download_as_string() - self.assertEqual(fetched, b'abcdef') + self.assertEqual(fetched, b"abcdef") self._check_session_mocks(client, transport, media_link) def test__get_content_type_explicit(self): - blob = self._make_one(u'blob-name', bucket=None) + blob = self._make_one(u"blob-name", bucket=None) - content_type = u'text/plain' + content_type = u"text/plain" return_value = blob._get_content_type(content_type) self.assertEqual(return_value, content_type) def test__get_content_type_from_blob(self): - blob = self._make_one(u'blob-name', bucket=None) - blob.content_type = u'video/mp4' + blob = self._make_one(u"blob-name", bucket=None) + blob.content_type = u"video/mp4" return_value = blob._get_content_type(None) self.assertEqual(return_value, blob.content_type) def test__get_content_type_from_filename(self): - blob = self._make_one(u'blob-name', bucket=None) + blob = self._make_one(u"blob-name", bucket=None) - return_value = blob._get_content_type(None, filename='archive.tar') - self.assertEqual(return_value, 'application/x-tar') + return_value = blob._get_content_type(None, filename="archive.tar") + self.assertEqual(return_value, "application/x-tar") def test__get_content_type_default(self): - blob = self._make_one(u'blob-name', bucket=None) + blob = self._make_one(u"blob-name", bucket=None) return_value = blob._get_content_type(None) - self.assertEqual(return_value, u'application/octet-stream') + self.assertEqual(return_value, u"application/octet-stream") def test__get_writable_metadata_no_changes(self): - name = u'blob-name' + name = u"blob-name" blob = self._make_one(name, bucket=None) object_metadata = blob._get_writable_metadata() - expected = {'name': name} + expected = {"name": name} self.assertEqual(object_metadata, expected) def test__get_writable_metadata_with_changes(self): - name = u'blob-name' + name = u"blob-name" blob = self._make_one(name, bucket=None) - blob.storage_class = 'NEARLINE' - blob.cache_control = 'max-age=3600' - blob.metadata = {'color': 'red'} + blob.storage_class = "NEARLINE" + blob.cache_control = "max-age=3600" + blob.metadata = {"color": "red"} object_metadata = blob._get_writable_metadata() expected = { - 'cacheControl': blob.cache_control, - 'metadata': blob.metadata, - 'name': name, - 'storageClass': blob.storage_class, + "cacheControl": blob.cache_control, + "metadata": blob.metadata, + "name": name, + "storageClass": blob.storage_class, } self.assertEqual(object_metadata, expected) def test__get_writable_metadata_unwritable_field(self): - name = u'blob-name' - properties = {'updated': '2016-10-16T18:18:18.181Z'} + name = u"blob-name" + properties = {"updated": "2016-10-16T18:18:18.181Z"} blob = self._make_one(name, bucket=None, properties=properties) # Fake that `updated` is in changes. - blob._changes.add('updated') + blob._changes.add("updated") object_metadata = blob._get_writable_metadata() - expected = {'name': name} + expected = {"name": name} self.assertEqual(object_metadata, expected) def test__get_upload_arguments(self): - name = u'blob-name' - key = b'[pXw@,p@@AfBfrR3x-2b2SCHR,.?YwRO' + name = u"blob-name" + key = b"[pXw@,p@@AfBfrR3x-2b2SCHR,.?YwRO" blob = self._make_one(name, bucket=None, encryption_key=key) - blob.content_disposition = 'inline' + blob.content_disposition = "inline" - content_type = u'image/jpeg' + content_type = u"image/jpeg" info = blob._get_upload_arguments(content_type) headers, object_metadata, new_content_type = info - header_key_value = 'W3BYd0AscEBAQWZCZnJSM3gtMmIyU0NIUiwuP1l3Uk8=' - header_key_hash_value = 'G0++dxF4q5rG4o9kE8gvEKn15RH6wLm0wXV1MgAlXOg=' + header_key_value = "W3BYd0AscEBAQWZCZnJSM3gtMmIyU0NIUiwuP1l3Uk8=" + header_key_hash_value = "G0++dxF4q5rG4o9kE8gvEKn15RH6wLm0wXV1MgAlXOg=" expected_headers = { - 'X-Goog-Encryption-Algorithm': 'AES256', - 'X-Goog-Encryption-Key': header_key_value, - 'X-Goog-Encryption-Key-Sha256': header_key_hash_value, + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": header_key_value, + "X-Goog-Encryption-Key-Sha256": header_key_hash_value, } self.assertEqual(headers, expected_headers) expected_metadata = { - 'contentDisposition': blob.content_disposition, - 'name': name, + "contentDisposition": blob.content_disposition, + "name": name, } self.assertEqual(object_metadata, expected_metadata) self.assertEqual(new_content_type, content_type) - def _mock_transport(self, status_code, headers, content=b''): - fake_transport = mock.Mock(spec=['request']) + def _mock_transport(self, status_code, headers, content=b""): + fake_transport = mock.Mock(spec=["request"]) fake_response = self._mock_requests_response( - status_code, headers, content=content) + status_code, headers, content=content + ) fake_transport.request.return_value = fake_response return fake_transport - def _do_multipart_success(self, mock_get_boundary, size=None, - num_retries=None, user_project=None, - predefined_acl=None, kms_key_name=None): + def _do_multipart_success( + self, + mock_get_boundary, + size=None, + num_retries=None, + user_project=None, + predefined_acl=None, + kms_key_name=None, + ): from six.moves.urllib.parse import urlencode - bucket = _Bucket(name='w00t', user_project=user_project) - blob = self._make_one( - u'blob-name', bucket=bucket, kms_key_name=kms_key_name) + + bucket = _Bucket(name="w00t", user_project=user_project) + blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name) self.assertIsNone(blob.chunk_size) # Create mocks to be checked for doing transport. transport = self._mock_transport(http_client.OK, {}) # Create some mock arguments. - client = mock.Mock(_http=transport, spec=['_http']) - data = b'data here hear hier' + client = mock.Mock(_http=transport, spec=["_http"]) + data = b"data here hear hier" stream = io.BytesIO(data) - content_type = u'application/xml' + content_type = u"application/xml" response = blob._do_multipart_upload( - client, stream, content_type, size, num_retries, predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) # Check the mocks and the returned value. self.assertIs(response, transport.request.return_value) @@ -1236,53 +1251,49 @@ def _do_multipart_success(self, mock_get_boundary, size=None, mock_get_boundary.assert_called_once_with() - upload_url = ( - 'https://www.googleapis.com/upload/storage/v1' + bucket.path + '/o') + upload_url = "https://www.googleapis.com/upload/storage/v1" + bucket.path + "/o" - qs_params = [('uploadType', 'multipart')] + qs_params = [("uploadType", "multipart")] if user_project is not None: - qs_params.append(('userProject', user_project)) + qs_params.append(("userProject", user_project)) if predefined_acl is not None: - qs_params.append(('predefinedAcl', predefined_acl)) + qs_params.append(("predefinedAcl", predefined_acl)) if kms_key_name is not None: - qs_params.append(('kmsKeyName', kms_key_name)) + qs_params.append(("kmsKeyName", kms_key_name)) - upload_url += '?' + urlencode(qs_params) + upload_url += "?" + urlencode(qs_params) payload = ( - b'--==0==\r\n' - + b'content-type: application/json; charset=UTF-8\r\n\r\n' + b"--==0==\r\n" + + b"content-type: application/json; charset=UTF-8\r\n\r\n" + b'{"name": "blob-name"}\r\n' - + b'--==0==\r\n' - + b'content-type: application/xml\r\n\r\n' + + b"--==0==\r\n" + + b"content-type: application/xml\r\n\r\n" + data_read - + b'\r\n--==0==--') - headers = {'content-type': b'multipart/related; boundary="==0=="'} + + b"\r\n--==0==--" + ) + headers = {"content-type": b'multipart/related; boundary="==0=="'} transport.request.assert_called_once_with( - 'POST', upload_url, data=payload, headers=headers) + "POST", upload_url, data=payload, headers=headers + ) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size(self, mock_get_boundary): - self._do_multipart_success(mock_get_boundary, predefined_acl='private') + self._do_multipart_success(mock_get_boundary, predefined_acl="private") - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, size=10) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_user_project(self, mock_get_boundary): - user_project = 'user-project-123' - self._do_multipart_success( - mock_get_boundary, user_project=user_project) + user_project = "user-project-123" + self._do_multipart_success(mock_get_boundary, user_project=user_project) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_kms(self, mock_get_boundary): kms_resource = ( "projects/test-project-123/" @@ -1290,18 +1301,16 @@ def test__do_multipart_upload_with_kms(self, mock_get_boundary): "keyRings/test-ring/" "cryptoKeys/test-key" ) - self._do_multipart_success( - mock_get_boundary, kms_key_name=kms_resource) + self._do_multipart_success(mock_get_boundary, kms_key_name=kms_resource) - @mock.patch(u'google.resumable_media._upload.get_boundary', - return_value=b'==0==') + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, num_retries=8) def test__do_multipart_upload_bad_size(self): - blob = self._make_one(u'blob-name', bucket=None) + blob = self._make_one(u"blob-name", bucket=None) - data = b'data here hear hier' + data = b"data here hear hier" stream = io.BytesIO(data) size = 50 self.assertGreater(size, len(data)) @@ -1310,21 +1319,26 @@ def test__do_multipart_upload_bad_size(self): blob._do_multipart_upload(None, stream, None, size, None, None) exc_contents = str(exc_info.exception) - self.assertIn( - 'was specified but the file-like object only had', exc_contents) + self.assertIn("was specified but the file-like object only had", exc_contents) self.assertEqual(stream.tell(), len(data)) def _initiate_resumable_helper( - self, size=None, extra_headers=None, chunk_size=None, - num_retries=None, user_project=None, predefined_acl=None, - blob_chunk_size=786432, kms_key_name=None): + self, + size=None, + extra_headers=None, + chunk_size=None, + num_retries=None, + user_project=None, + predefined_acl=None, + blob_chunk_size=786432, + kms_key_name=None, + ): from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload - bucket = _Bucket(name='whammy', user_project=user_project) - blob = self._make_one( - u'blob-name', bucket=bucket, kms_key_name=kms_key_name) - blob.metadata = {'rook': 'takes knight'} + bucket = _Bucket(name="whammy", user_project=user_project) + blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name) + blob.metadata = {"rook": "takes knight"} blob.chunk_size = blob_chunk_size if blob_chunk_size is not None: self.assertIsNotNone(blob.chunk_size) @@ -1334,41 +1348,45 @@ def _initiate_resumable_helper( # Need to make sure **same** dict is used because ``json.dumps()`` # will depend on the hash order. object_metadata = blob._get_writable_metadata() - blob._get_writable_metadata = mock.Mock( - return_value=object_metadata, spec=[]) + blob._get_writable_metadata = mock.Mock(return_value=object_metadata, spec=[]) # Create mocks to be checked for doing transport. - resumable_url = 'http://test.invalid?upload_id=hey-you' - response_headers = {'location': resumable_url} + resumable_url = "http://test.invalid?upload_id=hey-you" + response_headers = {"location": resumable_url} transport = self._mock_transport(http_client.OK, response_headers) # Create some mock arguments and call the method under test. - client = mock.Mock(_http=transport, spec=[u'_http']) - data = b'hello hallo halo hi-low' + client = mock.Mock(_http=transport, spec=[u"_http"]) + data = b"hello hallo halo hi-low" stream = io.BytesIO(data) - content_type = u'text/plain' + content_type = u"text/plain" upload, transport = blob._initiate_resumable_upload( - client, stream, content_type, size, num_retries, + client, + stream, + content_type, + size, + num_retries, extra_headers=extra_headers, - chunk_size=chunk_size, predefined_acl=predefined_acl) + chunk_size=chunk_size, + predefined_acl=predefined_acl, + ) # Check the returned values. self.assertIsInstance(upload, ResumableUpload) - upload_url = ( - 'https://www.googleapis.com/upload/storage/v1' + bucket.path + '/o') - qs_params = [('uploadType', 'resumable')] + upload_url = "https://www.googleapis.com/upload/storage/v1" + bucket.path + "/o" + qs_params = [("uploadType", "resumable")] if user_project is not None: - qs_params.append(('userProject', user_project)) + qs_params.append(("userProject", user_project)) if predefined_acl is not None: - qs_params.append(('predefinedAcl', predefined_acl)) + qs_params.append(("predefinedAcl", predefined_acl)) if kms_key_name is not None: - qs_params.append(('kmsKeyName', kms_key_name)) + qs_params.append(("kmsKeyName", kms_key_name)) - upload_url += '?' + urlencode(qs_params) + upload_url += "?" + urlencode(qs_params) self.assertEqual(upload.upload_url, upload_url) if extra_headers is None: @@ -1379,8 +1397,9 @@ def _initiate_resumable_helper( self.assertFalse(upload.finished) if chunk_size is None: if blob_chunk_size is None: - self.assertEqual(upload._chunk_size, - google.cloud.storage.blob._DEFAULT_CHUNKSIZE) + self.assertEqual( + upload._chunk_size, google.cloud.storage.blob._DEFAULT_CHUNKSIZE + ) else: self.assertEqual(upload._chunk_size, blob.chunk_size) else: @@ -1407,17 +1426,18 @@ def _initiate_resumable_helper( # Check the mocks. blob._get_writable_metadata.assert_called_once_with() - payload = json.dumps(object_metadata).encode('utf-8') + payload = json.dumps(object_metadata).encode("utf-8") expected_headers = { - 'content-type': 'application/json; charset=UTF-8', - 'x-upload-content-type': content_type, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-type": content_type, } if size is not None: - expected_headers['x-upload-content-length'] = str(size) + expected_headers["x-upload-content-length"] = str(size) if extra_headers is not None: expected_headers.update(extra_headers) transport.request.assert_called_once_with( - 'POST', upload_url, data=payload, headers=expected_headers) + "POST", upload_url, data=payload, headers=expected_headers + ) def test__initiate_resumable_upload_no_size(self): self._initiate_resumable_helper() @@ -1426,7 +1446,7 @@ def test__initiate_resumable_upload_with_size(self): self._initiate_resumable_helper(size=10000) def test__initiate_resumable_upload_with_user_project(self): - user_project = 'user-project-123' + user_project = "user-project-123" self._initiate_resumable_helper(user_project=user_project) def test__initiate_resumable_upload_with_kms(self): @@ -1446,97 +1466,94 @@ def test__initiate_resumable_upload_with_chunk_size(self): self._initiate_resumable_helper(chunk_size=one_mb) def test__initiate_resumable_upload_with_extra_headers(self): - extra_headers = {'origin': 'http://not-in-kansas-anymore.invalid'} + extra_headers = {"origin": "http://not-in-kansas-anymore.invalid"} self._initiate_resumable_helper(extra_headers=extra_headers) def test__initiate_resumable_upload_with_retry(self): self._initiate_resumable_helper(num_retries=11) def test__initiate_resumable_upload_with_predefined_acl(self): - self._initiate_resumable_helper(predefined_acl='private') + self._initiate_resumable_helper(predefined_acl="private") - def _make_resumable_transport(self, headers1, headers2, - headers3, total_bytes): + def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): from google import resumable_media - fake_transport = mock.Mock(spec=['request']) + fake_transport = mock.Mock(spec=["request"]) - fake_response1 = self._mock_requests_response( - http_client.OK, headers1) + fake_response1 = self._mock_requests_response(http_client.OK, headers1) fake_response2 = self._mock_requests_response( - resumable_media.PERMANENT_REDIRECT, headers2) + resumable_media.PERMANENT_REDIRECT, headers2 + ) json_body = '{{"size": "{:d}"}}'.format(total_bytes) fake_response3 = self._mock_requests_response( - http_client.OK, headers3, - content=json_body.encode('utf-8')) + http_client.OK, headers3, content=json_body.encode("utf-8") + ) responses = [fake_response1, fake_response2, fake_response3] fake_transport.request.side_effect = responses return fake_transport, responses @staticmethod - def _do_resumable_upload_call0( - blob, content_type, size=None, predefined_acl=None): + def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=None): # First mock transport.request() does initiates upload. upload_url = ( - 'https://www.googleapis.com/upload/storage/v1' + "https://www.googleapis.com/upload/storage/v1" + blob.bucket.path - + '/o?uploadType=resumable') + + "/o?uploadType=resumable" + ) if predefined_acl is not None: - upload_url += '&predefinedAcl={}'.format(predefined_acl) + upload_url += "&predefinedAcl={}".format(predefined_acl) expected_headers = { - 'content-type': 'application/json; charset=UTF-8', - 'x-upload-content-type': content_type, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-type": content_type, } if size is not None: - expected_headers['x-upload-content-length'] = str(size) - payload = json.dumps({'name': blob.name}).encode('utf-8') - return mock.call( - 'POST', upload_url, data=payload, headers=expected_headers) + expected_headers["x-upload-content-length"] = str(size) + payload = json.dumps({"name": blob.name}).encode("utf-8") + return mock.call("POST", upload_url, data=payload, headers=expected_headers) @staticmethod def _do_resumable_upload_call1( - blob, content_type, data, resumable_url, size=None, - predefined_acl=None): + blob, content_type, data, resumable_url, size=None, predefined_acl=None + ): # Second mock transport.request() does sends first chunk. if size is None: - content_range = 'bytes 0-{:d}/*'.format(blob.chunk_size - 1) + content_range = "bytes 0-{:d}/*".format(blob.chunk_size - 1) else: - content_range = 'bytes 0-{:d}/{:d}'.format( - blob.chunk_size - 1, size) + content_range = "bytes 0-{:d}/{:d}".format(blob.chunk_size - 1, size) expected_headers = { - 'content-type': content_type, - 'content-range': content_range, + "content-type": content_type, + "content-range": content_range, } - payload = data[:blob.chunk_size] - return mock.call( - 'PUT', resumable_url, data=payload, headers=expected_headers) + payload = data[: blob.chunk_size] + return mock.call("PUT", resumable_url, data=payload, headers=expected_headers) @staticmethod def _do_resumable_upload_call2( - blob, content_type, data, resumable_url, total_bytes, - predefined_acl=None): + blob, content_type, data, resumable_url, total_bytes, predefined_acl=None + ): # Third mock transport.request() does sends last chunk. - content_range = 'bytes {:d}-{:d}/{:d}'.format( - blob.chunk_size, total_bytes - 1, total_bytes) + content_range = "bytes {:d}-{:d}/{:d}".format( + blob.chunk_size, total_bytes - 1, total_bytes + ) expected_headers = { - 'content-type': content_type, - 'content-range': content_range, + "content-type": content_type, + "content-range": content_range, } - payload = data[blob.chunk_size:] - return mock.call( - 'PUT', resumable_url, data=payload, headers=expected_headers) + payload = data[blob.chunk_size :] + return mock.call("PUT", resumable_url, data=payload, headers=expected_headers) def _do_resumable_helper( - self, use_size=False, num_retries=None, predefined_acl=None): - bucket = _Bucket(name='yesterday') - blob = self._make_one(u'blob-name', bucket=bucket) + self, use_size=False, num_retries=None, predefined_acl=None + ): + bucket = _Bucket(name="yesterday") + blob = self._make_one(u"blob-name", bucket=bucket) blob.chunk_size = blob._CHUNK_SIZE_MULTIPLE self.assertIsNotNone(blob.chunk_size) # Data to be uploaded. - data = b'' + (b'A' * blob.chunk_size) + b'' + data = b"" + (b"A" * blob.chunk_size) + b"" total_bytes = len(data) if use_size: size = total_bytes @@ -1544,18 +1561,20 @@ def _do_resumable_helper( size = None # Create mocks to be checked for doing transport. - resumable_url = 'http://test.invalid?upload_id=and-then-there-was-1' - headers1 = {'location': resumable_url} - headers2 = {'range': 'bytes=0-{:d}'.format(blob.chunk_size - 1)} + resumable_url = "http://test.invalid?upload_id=and-then-there-was-1" + headers1 = {"location": resumable_url} + headers2 = {"range": "bytes=0-{:d}".format(blob.chunk_size - 1)} transport, responses = self._make_resumable_transport( - headers1, headers2, {}, total_bytes) + headers1, headers2, {}, total_bytes + ) # Create some mock arguments and call the method under test. - client = mock.Mock(_http=transport, spec=['_http']) + client = mock.Mock(_http=transport, spec=["_http"]) stream = io.BytesIO(data) - content_type = u'text/html' + content_type = u"text/html" response = blob._do_resumable_upload( - client, stream, content_type, size, num_retries, predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) # Check the returned values. self.assertIs(response, responses[2]) @@ -1563,15 +1582,25 @@ def _do_resumable_helper( # Check the mocks. call0 = self._do_resumable_upload_call0( - blob, content_type, size=size, predefined_acl=predefined_acl) + blob, content_type, size=size, predefined_acl=predefined_acl + ) call1 = self._do_resumable_upload_call1( - blob, content_type, data, resumable_url, size=size, - predefined_acl=predefined_acl) + blob, + content_type, + data, + resumable_url, + size=size, + predefined_acl=predefined_acl, + ) call2 = self._do_resumable_upload_call2( - blob, content_type, data, resumable_url, total_bytes, - predefined_acl=predefined_acl) - self.assertEqual( - transport.request.mock_calls, [call0, call1, call2]) + blob, + content_type, + data, + resumable_url, + total_bytes, + predefined_acl=predefined_acl, + ) + self.assertEqual(transport.request.mock_calls, [call0, call1, call2]) def test__do_resumable_upload_no_size(self): self._do_resumable_helper() @@ -1583,15 +1612,15 @@ def test__do_resumable_upload_with_retry(self): self._do_resumable_helper(num_retries=6) def test__do_resumable_upload_with_predefined_acl(self): - self._do_resumable_helper(predefined_acl='private') + self._do_resumable_helper(predefined_acl="private") def _do_upload_helper( - self, chunk_size=None, num_retries=None, predefined_acl=None, - size=None): - blob = self._make_one(u'blob-name', bucket=None) + self, chunk_size=None, num_retries=None, predefined_acl=None, size=None + ): + blob = self._make_one(u"blob-name", bucket=None) # Create a fake response. - response = mock.Mock(spec=[u'json']) + response = mock.Mock(spec=[u"json"]) response.json.return_value = mock.sentinel.json # Mock **both** helpers. blob._do_multipart_upload = mock.Mock(return_value=response, spec=[]) @@ -1605,34 +1634,34 @@ def _do_upload_helper( client = mock.sentinel.client stream = mock.sentinel.stream - content_type = u'video/mp4' + content_type = u"video/mp4" if size is None: size = 12345654321 # Make the request and check the mocks. created_json = blob._do_upload( - client, stream, content_type, size, num_retries, predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() - if size is not None and \ - size <= google.cloud.storage.blob._MAX_MULTIPART_SIZE: + if size is not None and size <= google.cloud.storage.blob._MAX_MULTIPART_SIZE: blob._do_multipart_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, - predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) blob._do_resumable_upload.assert_not_called() else: blob._do_multipart_upload.assert_not_called() blob._do_resumable_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, - predefined_acl) + client, stream, content_type, size, num_retries, predefined_acl + ) def test__do_upload_uses_multipart(self): - self._do_upload_helper( - size=google.cloud.storage.blob._MAX_MULTIPART_SIZE) + self._do_upload_helper(size=google.cloud.storage.blob._MAX_MULTIPART_SIZE) def test__do_upload_uses_resumable(self): self._do_upload_helper( chunk_size=256 * 1024, # 256KB - size=google.cloud.storage.blob._MAX_MULTIPART_SIZE + 1) + size=google.cloud.storage.blob._MAX_MULTIPART_SIZE + 1, + ) def test__do_upload_with_retry(self): self._do_upload_helper(num_retries=20) @@ -1640,51 +1669,49 @@ def test__do_upload_with_retry(self): def _upload_from_file_helper(self, side_effect=None, **kwargs): from google.cloud._helpers import UTC - blob = self._make_one('blob-name', bucket=None) + blob = self._make_one("blob-name", bucket=None) # Mock low-level upload helper on blob (it is tested elsewhere). - created_json = {'updated': '2017-01-01T09:09:09.081Z'} + created_json = {"updated": "2017-01-01T09:09:09.081Z"} blob._do_upload = mock.Mock(return_value=created_json, spec=[]) if side_effect is not None: blob._do_upload.side_effect = side_effect # Make sure `updated` is empty before the request. self.assertIsNone(blob.updated) - data = b'data is here' + data = b"data is here" stream = io.BytesIO(data) stream.seek(2) # Not at zero. - content_type = u'font/woff' + content_type = u"font/woff" client = mock.sentinel.client - predefined_acl = kwargs.get('predefined_acl', None) + predefined_acl = kwargs.get("predefined_acl", None) ret_val = blob.upload_from_file( - stream, size=len(data), content_type=content_type, - client=client, **kwargs) + stream, size=len(data), content_type=content_type, client=client, **kwargs + ) # Check the response and side-effects. self.assertIsNone(ret_val) - new_updated = datetime.datetime( - 2017, 1, 1, 9, 9, 9, 81000, tzinfo=UTC) + new_updated = datetime.datetime(2017, 1, 1, 9, 9, 9, 81000, tzinfo=UTC) self.assertEqual(blob.updated, new_updated) # Check the mock. - num_retries = kwargs.get('num_retries') + num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( - client, stream, content_type, - len(data), num_retries, predefined_acl) + client, stream, content_type, len(data), num_retries, predefined_acl + ) return stream def test_upload_from_file_success(self): - stream = self._upload_from_file_helper(predefined_acl='private') + stream = self._upload_from_file_helper(predefined_acl="private") assert stream.tell() == 2 - @mock.patch('warnings.warn') + @mock.patch("warnings.warn") def test_upload_from_file_with_retries(self, mock_warn): from google.cloud.storage import blob as blob_module self._upload_from_file_helper(num_retries=20) mock_warn.assert_called_once_with( - blob_module._NUM_RETRIES_MESSAGE, - DeprecationWarning, - stacklevel=2) + blob_module._NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 + ) def test_upload_from_file_with_rewind(self): stream = self._upload_from_file_helper(rewind=True) @@ -1696,11 +1723,10 @@ def test_upload_from_file_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = 'Someone is already in this spot.' + message = "Someone is already in this spot." response = requests.Response() response.status_code = http_client.CONFLICT - response.request = requests.Request( - 'POST', 'http://example.com').prepare() + response.request = requests.Request("POST", "http://example.com").prepare() side_effect = InvalidResponse(response, message) with self.assertRaises(exceptions.Conflict) as exc_info: @@ -1713,7 +1739,7 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size): self.assertEqual(blob._do_upload.call_count, 1) mock_call = blob._do_upload.mock_calls[0] call_name, pos_args, kwargs = mock_call - self.assertEqual(call_name, '') + self.assertEqual(call_name, "") self.assertEqual(len(pos_args), 6) self.assertEqual(pos_args[0], client) self.assertEqual(pos_args[2], content_type) @@ -1727,41 +1753,41 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size): def test_upload_from_filename(self): from google.cloud._testing import _NamedTemporaryFile - blob = self._make_one('blob-name', bucket=None) + blob = self._make_one("blob-name", bucket=None) # Mock low-level upload helper on blob (it is tested elsewhere). - created_json = {'metadata': {'mint': 'ice-cream'}} + created_json = {"metadata": {"mint": "ice-cream"}} blob._do_upload = mock.Mock(return_value=created_json, spec=[]) # Make sure `metadata` is empty before the request. self.assertIsNone(blob.metadata) - data = b'soooo much data' - content_type = u'image/svg+xml' + data = b"soooo much data" + content_type = u"image/svg+xml" client = mock.sentinel.client with _NamedTemporaryFile() as temp: - with open(temp.name, 'wb') as file_obj: + with open(temp.name, "wb") as file_obj: file_obj.write(data) ret_val = blob.upload_from_filename( - temp.name, content_type=content_type, client=client) + temp.name, content_type=content_type, client=client + ) # Check the response and side-effects. self.assertIsNone(ret_val) - self.assertEqual(blob.metadata, created_json['metadata']) + self.assertEqual(blob.metadata, created_json["metadata"]) # Check the mock. - stream = self._do_upload_mock_call_helper( - blob, client, content_type, len(data)) + stream = self._do_upload_mock_call_helper(blob, client, content_type, len(data)) self.assertTrue(stream.closed) - self.assertEqual(stream.mode, 'rb') + self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) def _upload_from_string_helper(self, data, **kwargs): from google.cloud._helpers import _to_bytes - blob = self._make_one('blob-name', bucket=None) + blob = self._make_one("blob-name", bucket=None) # Mock low-level upload helper on blob (it is tested elsewhere). - created_json = {'componentCount': '5'} + created_json = {"componentCount": "5"} blob._do_upload = mock.Mock(return_value=created_json, spec=[]) # Make sure `metadata` is empty before the request. self.assertIsNone(blob.component_count) @@ -1774,42 +1800,41 @@ def _upload_from_string_helper(self, data, **kwargs): self.assertEqual(blob.component_count, 5) # Check the mock. - payload = _to_bytes(data, encoding='utf-8') + payload = _to_bytes(data, encoding="utf-8") stream = self._do_upload_mock_call_helper( - blob, client, 'text/plain', len(payload)) + blob, client, "text/plain", len(payload) + ) self.assertIsInstance(stream, io.BytesIO) self.assertEqual(stream.getvalue(), payload) def test_upload_from_string_w_bytes(self): - data = b'XB]jb\xb8tad\xe0' + data = b"XB]jb\xb8tad\xe0" self._upload_from_string_helper(data) def test_upload_from_string_w_text(self): - data = u'\N{snowman} \N{sailboat}' + data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data) - def _create_resumable_upload_session_helper(self, origin=None, - side_effect=None): - bucket = _Bucket(name='alex-trebek') - blob = self._make_one('blob-name', bucket=bucket) + def _create_resumable_upload_session_helper(self, origin=None, side_effect=None): + bucket = _Bucket(name="alex-trebek") + blob = self._make_one("blob-name", bucket=bucket) chunk_size = 99 * blob._CHUNK_SIZE_MULTIPLE blob.chunk_size = chunk_size # Create mocks to be checked for doing transport. - resumable_url = 'http://test.invalid?upload_id=clean-up-everybody' - response_headers = {'location': resumable_url} - transport = self._mock_transport( - http_client.OK, response_headers) + resumable_url = "http://test.invalid?upload_id=clean-up-everybody" + response_headers = {"location": resumable_url} + transport = self._mock_transport(http_client.OK, response_headers) if side_effect is not None: transport.request.side_effect = side_effect # Create some mock arguments and call the method under test. - content_type = u'text/plain' + content_type = u"text/plain" size = 10000 - client = mock.Mock(_http=transport, spec=[u'_http']) + client = mock.Mock(_http=transport, spec=[u"_http"]) new_url = blob.create_resumable_upload_session( - content_type=content_type, size=size, - origin=origin, client=client) + content_type=content_type, size=size, origin=origin, client=client + ) # Check the returned value and (lack of) side-effect. self.assertEqual(new_url, resumable_url) @@ -1817,39 +1842,40 @@ def _create_resumable_upload_session_helper(self, origin=None, # Check the mocks. upload_url = ( - 'https://www.googleapis.com/upload/storage/v1' + "https://www.googleapis.com/upload/storage/v1" + bucket.path - + '/o?uploadType=resumable') + + "/o?uploadType=resumable" + ) payload = b'{"name": "blob-name"}' expected_headers = { - 'content-type': 'application/json; charset=UTF-8', - 'x-upload-content-length': str(size), - 'x-upload-content-type': content_type, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-length": str(size), + "x-upload-content-type": content_type, } if origin is not None: - expected_headers['Origin'] = origin + expected_headers["Origin"] = origin transport.request.assert_called_once_with( - 'POST', upload_url, data=payload, headers=expected_headers) + "POST", upload_url, data=payload, headers=expected_headers + ) def test_create_resumable_upload_session(self): self._create_resumable_upload_session_helper() def test_create_resumable_upload_session_with_origin(self): - self._create_resumable_upload_session_helper( - origin='http://google.com') + self._create_resumable_upload_session_helper(origin="http://google.com") def test_create_resumable_upload_session_with_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = '5-oh-3 woe is me.' + message = "5-oh-3 woe is me." response = self._mock_requests_response( - status_code=http_client.SERVICE_UNAVAILABLE, headers={}) + status_code=http_client.SERVICE_UNAVAILABLE, headers={} + ) side_effect = InvalidResponse(response, message) with self.assertRaises(exceptions.ServiceUnavailable) as exc_info: - self._create_resumable_upload_session_helper( - side_effect=side_effect) + self._create_resumable_upload_session_helper(side_effect=side_effect) self.assertIn(message, exc_info.exception.message) self.assertEqual(exc_info.exception.errors, []) @@ -1860,30 +1886,30 @@ def test_get_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.cloud.iam import Policy - BLOB_NAME = 'blob-name' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' + BLOB_NAME = "blob-name" + PATH = "/b/name/o/%s" % (BLOB_NAME,) + ETAG = "DEADBEEF" VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + "resourceId": PATH, + "etag": ETAG, + "version": VERSION, + "bindings": [ + {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, ], } - after = ({'status': http_client.OK}, RETURNED) + after = ({"status": http_client.OK}, RETURNED) EXPECTED = { - binding['role']: set(binding['members']) - for binding in RETURNED['bindings']} + binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + } connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) @@ -1892,34 +1918,37 @@ def test_get_iam_policy(self): policy = blob.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) + self.assertEqual(policy.etag, RETURNED["etag"]) + self.assertEqual(policy.version, RETURNED["version"]) self.assertEqual(dict(policy), EXPECTED) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '%s/iam' % (PATH,), - 'query_params': {}, - '_target_object': None, - }) + self.assertEqual( + kw[0], + { + "method": "GET", + "path": "%s/iam" % (PATH,), + "query_params": {}, + "_target_object": None, + }, + ) def test_get_iam_policy_w_user_project(self): from google.cloud.iam import Policy - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" + PATH = "/b/name/o/%s" % (BLOB_NAME,) + ETAG = "DEADBEEF" VERSION = 17 RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [], + "resourceId": PATH, + "etag": ETAG, + "version": VERSION, + "bindings": [], } - after = ({'status': http_client.OK}, RETURNED) + after = ({"status": http_client.OK}, RETURNED) EXPECTED = {} connection = _Connection(after) client = _Client(connection) @@ -1929,18 +1958,21 @@ def test_get_iam_policy_w_user_project(self): policy = blob.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) + self.assertEqual(policy.etag, RETURNED["etag"]) + self.assertEqual(policy.version, RETURNED["version"]) self.assertEqual(dict(policy), EXPECTED) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'GET', - 'path': '%s/iam' % (PATH,), - 'query_params': {'userProject': USER_PROJECT}, - '_target_object': None, - }) + self.assertEqual( + kw[0], + { + "method": "GET", + "path": "%s/iam" % (PATH,), + "query_params": {"userProject": USER_PROJECT}, + "_target_object": None, + }, + ) def test_set_iam_policy(self): import operator @@ -1949,30 +1981,26 @@ def test_set_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.cloud.iam import Policy - BLOB_NAME = 'blob-name' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' + BLOB_NAME = "blob-name" + PATH = "/b/name/o/%s" % (BLOB_NAME,) + ETAG = "DEADBEEF" VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" BINDINGS = [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, ] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } - after = ({'status': http_client.OK}, RETURNED) + RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} + after = ({"status": http_client.OK}, RETURNED) policy = Policy() for binding in BINDINGS: - policy[binding['role']] = binding['members'] + policy[binding["role"]] = binding["members"] connection = _Connection(after) client = _Client(connection) @@ -1987,35 +2015,30 @@ def test_set_iam_policy(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) - sent = kw[0]['data'] - self.assertEqual(sent['resourceId'], PATH) - self.assertEqual(len(sent['bindings']), len(BINDINGS)) - key = operator.itemgetter('role') + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {}) + sent = kw[0]["data"] + self.assertEqual(sent["resourceId"], PATH) + self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + key = operator.itemgetter("role") for found, expected in zip( - sorted(sent['bindings'], key=key), - sorted(BINDINGS, key=key)): - self.assertEqual(found['role'], expected['role']) - self.assertEqual( - sorted(found['members']), sorted(expected['members'])) + sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + ): + self.assertEqual(found["role"], expected["role"]) + self.assertEqual(sorted(found["members"]), sorted(expected["members"])) def test_set_iam_policy_w_user_project(self): from google.cloud.iam import Policy - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) - ETAG = 'DEADBEEF' + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" + PATH = "/b/name/o/%s" % (BLOB_NAME,) + ETAG = "DEADBEEF" VERSION = 17 BINDINGS = [] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } - after = ({'status': http_client.OK}, RETURNED) + RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} + after = ({"status": http_client.OK}, RETURNED) policy = Policy() connection = _Connection(after) @@ -2031,26 +2054,26 @@ def test_set_iam_policy_w_user_project(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) - self.assertEqual(kw[0]['data'], {'resourceId': PATH}) + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) + self.assertEqual(kw[0]["data"], {"resourceId": PATH}) def test_test_iam_permissions(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = 'blob-name' - PATH = '/b/name/o/%s' % (BLOB_NAME,) + BLOB_NAME = "blob-name" + PATH = "/b/name/o/%s" % (BLOB_NAME,) PERMISSIONS = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} - after = ({'status': http_client.OK}, RETURNED) + RETURNED = {"permissions": ALLOWED} + after = ({"status": http_client.OK}, RETURNED) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) @@ -2062,26 +2085,26 @@ def test_test_iam_permissions(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'permissions': PERMISSIONS}) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) def test_test_iam_permissions_w_user_project(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - PATH = '/b/name/o/%s' % (BLOB_NAME,) + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" + PATH = "/b/name/o/%s" % (BLOB_NAME,) PERMISSIONS = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} - after = ({'status': http_client.OK}, RETURNED) + RETURNED = {"permissions": ALLOWED} + after = ({"status": http_client.OK}, RETURNED) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client, user_project=USER_PROJECT) @@ -2093,18 +2116,19 @@ def test_test_iam_permissions_w_user_project(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) self.assertEqual( - kw[0]['query_params'], - {'permissions': PERMISSIONS, 'userProject': USER_PROJECT}) + kw[0]["query_params"], + {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + ) def test_make_public(self): from google.cloud.storage.acl import _ACLEntity - BLOB_NAME = 'blob-name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = ({'status': http_client.OK}, {'acl': permissive}) + BLOB_NAME = "blob-name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + after = ({"status": http_client.OK}, {"acl": permissive}) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) @@ -2114,15 +2138,15 @@ def test_make_public(self): self.assertEqual(list(blob.acl), permissive) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % BLOB_NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) + self.assertEqual(kw[0]["data"], {"acl": permissive}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) def test_make_private(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" no_permissions = [] - after = ({'status': http_client.OK}, {'acl': no_permissions}) + after = ({"status": http_client.OK}, {"acl": no_permissions}) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) @@ -2132,17 +2156,17 @@ def test_make_private(self): self.assertEqual(list(blob.acl), no_permissions) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % BLOB_NAME) - self.assertEqual(kw[0]['data'], {'acl': no_permissions}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) + self.assertEqual(kw[0]["data"], {"acl": no_permissions}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) def test_compose_wo_content_type_set(self): - SOURCE_1 = 'source-1' - SOURCE_2 = 'source-2' - DESTINATION = 'destinaton' + SOURCE_1 = "source-1" + SOURCE_2 = "source-2" + DESTINATION = "destinaton" RESOURCE = {} - after = ({'status': http_client.OK}, RESOURCE) + after = ({"status": http_client.OK}, RESOURCE) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) @@ -2157,115 +2181,107 @@ def test_compose_wo_content_type_set(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'POST', - 'path': '/b/name/o/%s/compose' % DESTINATION, - 'query_params': {}, - 'data': { - 'sourceObjects': [ - {'name': source_1.name}, - {'name': source_2.name}, - ], - 'destination': {}, + self.assertEqual( + kw[0], + { + "method": "POST", + "path": "/b/name/o/%s/compose" % DESTINATION, + "query_params": {}, + "data": { + "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], + "destination": {}, + }, + "_target_object": destination, }, - '_target_object': destination, - }) + ) def test_compose_minimal_w_user_project(self): - SOURCE_1 = 'source-1' - SOURCE_2 = 'source-2' - DESTINATION = 'destinaton' - RESOURCE = { - 'etag': 'DEADBEEF' - } - USER_PROJECT = 'user-project-123' - after = ({'status': http_client.OK}, RESOURCE) + SOURCE_1 = "source-1" + SOURCE_2 = "source-2" + DESTINATION = "destinaton" + RESOURCE = {"etag": "DEADBEEF"} + USER_PROJECT = "user-project-123" + after = ({"status": http_client.OK}, RESOURCE) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client, user_project=USER_PROJECT) source_1 = self._make_one(SOURCE_1, bucket=bucket) source_2 = self._make_one(SOURCE_2, bucket=bucket) destination = self._make_one(DESTINATION, bucket=bucket) - destination.content_type = 'text/plain' + destination.content_type = "text/plain" destination.compose(sources=[source_1, source_2]) - self.assertEqual(destination.etag, 'DEADBEEF') + self.assertEqual(destination.etag, "DEADBEEF") kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'POST', - 'path': '/b/name/o/%s/compose' % DESTINATION, - 'query_params': {'userProject': USER_PROJECT}, - 'data': { - 'sourceObjects': [ - {'name': source_1.name}, - {'name': source_2.name}, - ], - 'destination': { - 'contentType': 'text/plain', + self.assertEqual( + kw[0], + { + "method": "POST", + "path": "/b/name/o/%s/compose" % DESTINATION, + "query_params": {"userProject": USER_PROJECT}, + "data": { + "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], + "destination": {"contentType": "text/plain"}, }, + "_target_object": destination, }, - '_target_object': destination, - }) + ) def test_compose_w_additional_property_changes(self): - SOURCE_1 = 'source-1' - SOURCE_2 = 'source-2' - DESTINATION = 'destinaton' - RESOURCE = { - 'etag': 'DEADBEEF' - } - after = ({'status': http_client.OK}, RESOURCE) + SOURCE_1 = "source-1" + SOURCE_2 = "source-2" + DESTINATION = "destinaton" + RESOURCE = {"etag": "DEADBEEF"} + after = ({"status": http_client.OK}, RESOURCE) connection = _Connection(after) client = _Client(connection) bucket = _Bucket(client=client) source_1 = self._make_one(SOURCE_1, bucket=bucket) source_2 = self._make_one(SOURCE_2, bucket=bucket) destination = self._make_one(DESTINATION, bucket=bucket) - destination.content_type = 'text/plain' - destination.content_language = 'en-US' - destination.metadata = {'my-key': 'my-value'} + destination.content_type = "text/plain" + destination.content_language = "en-US" + destination.metadata = {"my-key": "my-value"} destination.compose(sources=[source_1, source_2]) - self.assertEqual(destination.etag, 'DEADBEEF') + self.assertEqual(destination.etag, "DEADBEEF") kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0], { - 'method': 'POST', - 'path': '/b/name/o/%s/compose' % DESTINATION, - 'query_params': {}, - 'data': { - 'sourceObjects': [ - {'name': source_1.name}, - {'name': source_2.name}, - ], - 'destination': { - 'contentType': 'text/plain', - 'contentLanguage': 'en-US', - 'metadata': { - 'my-key': 'my-value', - } + self.assertEqual( + kw[0], + { + "method": "POST", + "path": "/b/name/o/%s/compose" % DESTINATION, + "query_params": {}, + "data": { + "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], + "destination": { + "contentType": "text/plain", + "contentLanguage": "en-US", + "metadata": {"my-key": "my-value"}, + }, }, + "_target_object": destination, }, - '_target_object': destination, - }) + ) def test_rewrite_response_without_resource(self): - SOURCE_BLOB = 'source' - DEST_BLOB = 'dest' - DEST_BUCKET = 'other-bucket' - TOKEN = 'TOKEN' + SOURCE_BLOB = "source" + DEST_BLOB = "dest" + DEST_BUCKET = "other-bucket" + TOKEN = "TOKEN" RESPONSE = { - 'totalBytesRewritten': 33, - 'objectSize': 42, - 'done': False, - 'rewriteToken': TOKEN, + "totalBytesRewritten": 33, + "objectSize": 42, + "done": False, + "rewriteToken": TOKEN, } - response = ({'status': http_client.OK}, RESPONSE) + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) source_bucket = _Bucket(client=client) @@ -2280,18 +2296,18 @@ def test_rewrite_response_without_resource(self): self.assertEqual(size, 42) def test_rewrite_other_bucket_other_name_no_encryption_partial(self): - SOURCE_BLOB = 'source' - DEST_BLOB = 'dest' - DEST_BUCKET = 'other-bucket' - TOKEN = 'TOKEN' + SOURCE_BLOB = "source" + DEST_BLOB = "dest" + DEST_BUCKET = "other-bucket" + TOKEN = "TOKEN" RESPONSE = { - 'totalBytesRewritten': 33, - 'objectSize': 42, - 'done': False, - 'rewriteToken': TOKEN, - 'resource': {'etag': 'DEADBEEF'}, + "totalBytesRewritten": 33, + "objectSize": 42, + "done": False, + "rewriteToken": TOKEN, + "resource": {"etag": "DEADBEEF"}, } - response = ({'status': http_client.OK}, RESPONSE) + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) source_bucket = _Bucket(client=client) @@ -2307,46 +2323,47 @@ def test_rewrite_other_bucket_other_name_no_encryption_partial(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/%s/o/%s' % ( - SOURCE_BLOB, DEST_BUCKET, DEST_BLOB) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {}) + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/%s/o/%s" % ( + SOURCE_BLOB, + DEST_BUCKET, + DEST_BLOB, + ) + self.assertEqual(kw[0]["path"], PATH) + self.assertEqual(kw[0]["query_params"], {}) SENT = {} - self.assertEqual(kw[0]['data'], SENT) + self.assertEqual(kw[0]["data"], SENT) - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} - self.assertNotIn('X-Goog-Copy-Source-Encryption-Algorithm', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key-Sha256', headers) - self.assertNotIn('X-Goog-Encryption-Algorithm', headers) - self.assertNotIn('X-Goog-Encryption-Key', headers) - self.assertNotIn('X-Goog-Encryption-Key-Sha256', headers) + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} + self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) + self.assertNotIn("X-Goog-Encryption-Algorithm", headers) + self.assertNotIn("X-Goog-Encryption-Key", headers) + self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): import base64 import hashlib - KEY = b'01234567890123456789012345678901' # 32 bytes - KEY_B64 = base64.b64encode(KEY).rstrip().decode('ascii') + KEY = b"01234567890123456789012345678901" # 32 bytes + KEY_B64 = base64.b64encode(KEY).rstrip().decode("ascii") KEY_HASH = hashlib.sha256(KEY).digest() - KEY_HASH_B64 = base64.b64encode(KEY_HASH).rstrip().decode('ascii') - BLOB_NAME = 'blob' - USER_PROJECT = 'user-project-123' + KEY_HASH_B64 = base64.b64encode(KEY_HASH).rstrip().decode("ascii") + BLOB_NAME = "blob" + USER_PROJECT = "user-project-123" RESPONSE = { - 'totalBytesRewritten': 42, - 'objectSize': 42, - 'done': True, - 'resource': {'etag': 'DEADBEEF'}, + "totalBytesRewritten": 42, + "objectSize": 42, + "done": True, + "resource": {"etag": "DEADBEEF"}, } - response = ({'status': http_client.OK}, RESPONSE) + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) bucket = _Bucket(client=client, user_project=USER_PROJECT) plain = self._make_one(BLOB_NAME, bucket=bucket) - encrypted = self._make_one(BLOB_NAME, bucket=bucket, - encryption_key=KEY) + encrypted = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) token, rewritten, size = encrypted.rewrite(plain) @@ -2356,52 +2373,47 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]["path"], PATH) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) SENT = {} - self.assertEqual(kw[0]['data'], SENT) + self.assertEqual(kw[0]["data"], SENT) - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} - self.assertNotIn('X-Goog-Copy-Source-Encryption-Algorithm', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key-Sha256', headers) - self.assertEqual(headers['X-Goog-Encryption-Algorithm'], 'AES256') - self.assertEqual(headers['X-Goog-Encryption-Key'], KEY_B64) - self.assertEqual(headers['X-Goog-Encryption-Key-Sha256'], KEY_HASH_B64) + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} + self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) + self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Encryption-Key"], KEY_B64) + self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], KEY_HASH_B64) def test_rewrite_same_name_no_key_new_key_w_token(self): import base64 import hashlib - SOURCE_KEY = b'01234567890123456789012345678901' # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode('ascii') + SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes + SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode( - SOURCE_KEY_HASH).rstrip().decode('ascii') - DEST_KEY = b'90123456789012345678901234567890' # 32 bytes - DEST_KEY_B64 = base64.b64encode(DEST_KEY).rstrip().decode('ascii') + SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") + DEST_KEY = b"90123456789012345678901234567890" # 32 bytes + DEST_KEY_B64 = base64.b64encode(DEST_KEY).rstrip().decode("ascii") DEST_KEY_HASH = hashlib.sha256(DEST_KEY).digest() - DEST_KEY_HASH_B64 = base64.b64encode( - DEST_KEY_HASH).rstrip().decode('ascii') - BLOB_NAME = 'blob' - TOKEN = 'TOKEN' + DEST_KEY_HASH_B64 = base64.b64encode(DEST_KEY_HASH).rstrip().decode("ascii") + BLOB_NAME = "blob" + TOKEN = "TOKEN" RESPONSE = { - 'totalBytesRewritten': 42, - 'objectSize': 42, - 'done': True, - 'resource': {'etag': 'DEADBEEF'}, + "totalBytesRewritten": 42, + "objectSize": 42, + "done": True, + "resource": {"etag": "DEADBEEF"}, } - response = ({'status': http_client.OK}, RESPONSE) + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) bucket = _Bucket(client=client) - source = self._make_one( - BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, - encryption_key=DEST_KEY) + source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) + dest = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=DEST_KEY) token, rewritten, size = dest.rewrite(source, token=TOKEN) @@ -2411,59 +2423,50 @@ def test_rewrite_same_name_no_key_new_key_w_token(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {'rewriteToken': TOKEN}) + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]["path"], PATH) + self.assertEqual(kw[0]["query_params"], {"rewriteToken": TOKEN}) SENT = {} - self.assertEqual(kw[0]['data'], SENT) + self.assertEqual(kw[0]["data"], SENT) - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Algorithm'], 'AES256') - self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key'], SOURCE_KEY_B64) - self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key-Sha256'], - SOURCE_KEY_HASH_B64) - self.assertEqual( - headers['X-Goog-Encryption-Algorithm'], 'AES256') - self.assertEqual( - headers['X-Goog-Encryption-Key'], DEST_KEY_B64) - self.assertEqual( - headers['X-Goog-Encryption-Key-Sha256'], DEST_KEY_HASH_B64) + headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + ) + self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Encryption-Key"], DEST_KEY_B64) + self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], DEST_KEY_HASH_B64) def test_rewrite_same_name_w_old_key_new_kms_key(self): import base64 import hashlib - SOURCE_KEY = b'01234567890123456789012345678901' # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode('ascii') + SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes + SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode( - SOURCE_KEY_HASH).rstrip().decode('ascii') + SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") DEST_KMS_RESOURCE = ( "projects/test-project-123/" "locations/us/" "keyRings/test-ring/" "cryptoKeys/test-key" ) - BLOB_NAME = 'blob' + BLOB_NAME = "blob" RESPONSE = { - 'totalBytesRewritten': 42, - 'objectSize': 42, - 'done': True, - 'resource': {'etag': 'DEADBEEF'}, + "totalBytesRewritten": 42, + "objectSize": 42, + "done": True, + "resource": {"etag": "DEADBEEF"}, } - response = ({'status': http_client.OK}, RESPONSE) + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) bucket = _Bucket(client=client) - source = self._make_one( - BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, - kms_key_name=DEST_KMS_RESOURCE) + source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) + dest = self._make_one(BLOB_NAME, bucket=bucket, kms_key_name=DEST_KMS_RESOURCE) token, rewritten, size = dest.rewrite(source) @@ -2473,131 +2476,114 @@ def test_rewrite_same_name_w_old_key_new_kms_key(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], - {'destinationKmsKeyName': DEST_KMS_RESOURCE}) - SENT = { - 'kmsKeyName': DEST_KMS_RESOURCE, - } - self.assertEqual(kw[0]['data'], SENT) - - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} - self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Algorithm'], 'AES256') + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]["path"], PATH) self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key'], SOURCE_KEY_B64) + kw[0]["query_params"], {"destinationKmsKeyName": DEST_KMS_RESOURCE} + ) + SENT = {"kmsKeyName": DEST_KMS_RESOURCE} + self.assertEqual(kw[0]["data"], SENT) + + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key-Sha256'], - SOURCE_KEY_HASH_B64) + headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + ) def test_update_storage_class_invalid(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) with self.assertRaises(ValueError): - blob.update_storage_class(u'BOGUS') + blob.update_storage_class(u"BOGUS") def test_update_storage_class_wo_encryption_key(self): - BLOB_NAME = 'blob-name' - STORAGE_CLASS = u'NEARLINE' - RESPONSE = { - 'resource': {'storageClass': STORAGE_CLASS}, - } - response = ({'status': http_client.OK}, RESPONSE) + BLOB_NAME = "blob-name" + STORAGE_CLASS = u"NEARLINE" + RESPONSE = {"resource": {"storageClass": STORAGE_CLASS}} + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) bucket = _Bucket(client=client) blob = self._make_one(BLOB_NAME, bucket=bucket) - blob.update_storage_class('NEARLINE') + blob.update_storage_class("NEARLINE") - self.assertEqual(blob.storage_class, 'NEARLINE') + self.assertEqual(blob.storage_class, "NEARLINE") kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {}) - SENT = {'storageClass': STORAGE_CLASS} - self.assertEqual(kw[0]['data'], SENT) - - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]["path"], PATH) + self.assertEqual(kw[0]["query_params"], {}) + SENT = {"storageClass": STORAGE_CLASS} + self.assertEqual(kw[0]["data"], SENT) + + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} # Blob has no key, and therefore the relevant headers are not sent. - self.assertNotIn('X-Goog-Copy-Source-Encryption-Algorithm', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key', headers) - self.assertNotIn('X-Goog-Copy-Source-Encryption-Key-Sha256', headers) - self.assertNotIn('X-Goog-Encryption-Algorithm', headers) - self.assertNotIn('X-Goog-Encryption-Key', headers) - self.assertNotIn('X-Goog-Encryption-Key-Sha256', headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) + self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) + self.assertNotIn("X-Goog-Encryption-Algorithm", headers) + self.assertNotIn("X-Goog-Encryption-Key", headers) + self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) def test_update_storage_class_w_encryption_key_w_user_project(self): import base64 import hashlib - BLOB_NAME = 'blob-name' - BLOB_KEY = b'01234567890123456789012345678901' # 32 bytes - BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode('ascii') + BLOB_NAME = "blob-name" + BLOB_KEY = b"01234567890123456789012345678901" # 32 bytes + BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode("ascii") BLOB_KEY_HASH = hashlib.sha256(BLOB_KEY).digest() - BLOB_KEY_HASH_B64 = base64.b64encode( - BLOB_KEY_HASH).rstrip().decode('ascii') - STORAGE_CLASS = u'NEARLINE' - USER_PROJECT = 'user-project-123' - RESPONSE = { - 'resource': {'storageClass': STORAGE_CLASS}, - } - response = ({'status': http_client.OK}, RESPONSE) + BLOB_KEY_HASH_B64 = base64.b64encode(BLOB_KEY_HASH).rstrip().decode("ascii") + STORAGE_CLASS = u"NEARLINE" + USER_PROJECT = "user-project-123" + RESPONSE = {"resource": {"storageClass": STORAGE_CLASS}} + response = ({"status": http_client.OK}, RESPONSE) connection = _Connection(response) client = _Client(connection) bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one( - BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY) + blob = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY) - blob.update_storage_class('NEARLINE') + blob.update_storage_class("NEARLINE") - self.assertEqual(blob.storage_class, 'NEARLINE') + self.assertEqual(blob.storage_class, "NEARLINE") kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'POST') - PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]['path'], PATH) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) - SENT = {'storageClass': STORAGE_CLASS} - self.assertEqual(kw[0]['data'], SENT) - - headers = { - key.title(): str(value) for key, value in kw[0]['headers'].items()} + self.assertEqual(kw[0]["method"], "POST") + PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]["path"], PATH) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) + SENT = {"storageClass": STORAGE_CLASS} + self.assertEqual(kw[0]["data"], SENT) + + headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} # Blob has key, and therefore the relevant headers are sent. + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], BLOB_KEY_B64) self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Algorithm'], 'AES256') - self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key'], BLOB_KEY_B64) - self.assertEqual( - headers['X-Goog-Copy-Source-Encryption-Key-Sha256'], - BLOB_KEY_HASH_B64) - self.assertEqual( - headers['X-Goog-Encryption-Algorithm'], 'AES256') - self.assertEqual( - headers['X-Goog-Encryption-Key'], BLOB_KEY_B64) - self.assertEqual( - headers['X-Goog-Encryption-Key-Sha256'], BLOB_KEY_HASH_B64) + headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64 + ) + self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") + self.assertEqual(headers["X-Goog-Encryption-Key"], BLOB_KEY_B64) + self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64) def test_cache_control_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CACHE_CONTROL = 'no-cache' - properties = {'cacheControl': CACHE_CONTROL} + CACHE_CONTROL = "no-cache" + properties = {"cacheControl": CACHE_CONTROL} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.cache_control, CACHE_CONTROL) def test_cache_control_setter(self): - BLOB_NAME = 'blob-name' - CACHE_CONTROL = 'no-cache' + BLOB_NAME = "blob-name" + CACHE_CONTROL = "no-cache" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.cache_control) @@ -2607,34 +2593,37 @@ def test_cache_control_setter(self): def test_component_count(self): BUCKET = object() COMPONENT_COUNT = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'componentCount': COMPONENT_COUNT}) + blob = self._make_one( + "blob-name", bucket=BUCKET, properties={"componentCount": COMPONENT_COUNT} + ) self.assertEqual(blob.component_count, COMPONENT_COUNT) def test_component_count_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.component_count) def test_component_count_string_val(self): BUCKET = object() COMPONENT_COUNT = 42 blob = self._make_one( - 'blob-name', bucket=BUCKET, - properties={'componentCount': str(COMPONENT_COUNT)}) + "blob-name", + bucket=BUCKET, + properties={"componentCount": str(COMPONENT_COUNT)}, + ) self.assertEqual(blob.component_count, COMPONENT_COUNT) def test_content_disposition_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CONTENT_DISPOSITION = 'Attachment; filename=example.jpg' - properties = {'contentDisposition': CONTENT_DISPOSITION} + CONTENT_DISPOSITION = "Attachment; filename=example.jpg" + properties = {"contentDisposition": CONTENT_DISPOSITION} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.content_disposition, CONTENT_DISPOSITION) def test_content_disposition_setter(self): - BLOB_NAME = 'blob-name' - CONTENT_DISPOSITION = 'Attachment; filename=example.jpg' + BLOB_NAME = "blob-name" + CONTENT_DISPOSITION = "Attachment; filename=example.jpg" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.content_disposition) @@ -2642,16 +2631,16 @@ def test_content_disposition_setter(self): self.assertEqual(blob.content_disposition, CONTENT_DISPOSITION) def test_content_encoding_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CONTENT_ENCODING = 'gzip' - properties = {'contentEncoding': CONTENT_ENCODING} + CONTENT_ENCODING = "gzip" + properties = {"contentEncoding": CONTENT_ENCODING} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.content_encoding, CONTENT_ENCODING) def test_content_encoding_setter(self): - BLOB_NAME = 'blob-name' - CONTENT_ENCODING = 'gzip' + BLOB_NAME = "blob-name" + CONTENT_ENCODING = "gzip" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.content_encoding) @@ -2659,16 +2648,16 @@ def test_content_encoding_setter(self): self.assertEqual(blob.content_encoding, CONTENT_ENCODING) def test_content_language_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CONTENT_LANGUAGE = 'pt-BR' - properties = {'contentLanguage': CONTENT_LANGUAGE} + CONTENT_LANGUAGE = "pt-BR" + properties = {"contentLanguage": CONTENT_LANGUAGE} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.content_language, CONTENT_LANGUAGE) def test_content_language_setter(self): - BLOB_NAME = 'blob-name' - CONTENT_LANGUAGE = 'pt-BR' + BLOB_NAME = "blob-name" + CONTENT_LANGUAGE = "pt-BR" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.content_language) @@ -2676,16 +2665,16 @@ def test_content_language_setter(self): self.assertEqual(blob.content_language, CONTENT_LANGUAGE) def test_content_type_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CONTENT_TYPE = 'image/jpeg' - properties = {'contentType': CONTENT_TYPE} + CONTENT_TYPE = "image/jpeg" + properties = {"contentType": CONTENT_TYPE} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.content_type, CONTENT_TYPE) def test_content_type_setter(self): - BLOB_NAME = 'blob-name' - CONTENT_TYPE = 'image/jpeg' + BLOB_NAME = "blob-name" + CONTENT_TYPE = "image/jpeg" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.content_type) @@ -2693,16 +2682,16 @@ def test_content_type_setter(self): self.assertEqual(blob.content_type, CONTENT_TYPE) def test_crc32c_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - CRC32C = 'DEADBEEF' - properties = {'crc32c': CRC32C} + CRC32C = "DEADBEEF" + properties = {"crc32c": CRC32C} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.crc32c, CRC32C) def test_crc32c_setter(self): - BLOB_NAME = 'blob-name' - CRC32C = 'DEADBEEF' + BLOB_NAME = "blob-name" + CRC32C = "DEADBEEF" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.crc32c) @@ -2710,36 +2699,36 @@ def test_crc32c_setter(self): self.assertEqual(blob.crc32c, CRC32C) def test_etag(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - ETAG = 'ETAG' - properties = {'etag': ETAG} + ETAG = "ETAG" + properties = {"etag": ETAG} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.etag, ETAG) def test_event_based_hold_getter_missing(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() properties = {} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertIsNone(blob.event_based_hold) def test_event_based_hold_getter_false(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - properties = {'eventBasedHold': False} + properties = {"eventBasedHold": False} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertFalse(blob.event_based_hold) def test_event_based_hold_getter_true(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - properties = {'eventBasedHold': True} + properties = {"eventBasedHold": True} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertTrue(blob.event_based_hold) def test_event_based_hold_setter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.event_based_hold) @@ -2749,41 +2738,43 @@ def test_event_based_hold_setter(self): def test_generation(self): BUCKET = object() GENERATION = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'generation': GENERATION}) + blob = self._make_one( + "blob-name", bucket=BUCKET, properties={"generation": GENERATION} + ) self.assertEqual(blob.generation, GENERATION) def test_generation_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.generation) def test_generation_string_val(self): BUCKET = object() GENERATION = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'generation': str(GENERATION)}) + blob = self._make_one( + "blob-name", bucket=BUCKET, properties={"generation": str(GENERATION)} + ) self.assertEqual(blob.generation, GENERATION) def test_id(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - ID = 'ID' - properties = {'id': ID} + ID = "ID" + properties = {"id": ID} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.id, ID) def test_md5_hash_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - MD5_HASH = 'DEADBEEF' - properties = {'md5Hash': MD5_HASH} + MD5_HASH = "DEADBEEF" + properties = {"md5Hash": MD5_HASH} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.md5_hash, MD5_HASH) def test_md5_hash_setter(self): - BLOB_NAME = 'blob-name' - MD5_HASH = 'DEADBEEF' + BLOB_NAME = "blob-name" + MD5_HASH = "DEADBEEF" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.md5_hash) @@ -2791,24 +2782,24 @@ def test_md5_hash_setter(self): self.assertEqual(blob.md5_hash, MD5_HASH) def test_media_link(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - MEDIA_LINK = 'http://example.com/media/' - properties = {'mediaLink': MEDIA_LINK} + MEDIA_LINK = "http://example.com/media/" + properties = {"mediaLink": MEDIA_LINK} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.media_link, MEDIA_LINK) def test_metadata_getter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - METADATA = {'foo': 'Foo'} - properties = {'metadata': METADATA} + METADATA = {"foo": "Foo"} + properties = {"metadata": METADATA} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.metadata, METADATA) def test_metadata_setter(self): - BLOB_NAME = 'blob-name' - METADATA = {'foo': 'Foo'} + BLOB_NAME = "blob-name" + METADATA = {"foo": "Foo"} bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.metadata) @@ -2818,118 +2809,121 @@ def test_metadata_setter(self): def test_metageneration(self): BUCKET = object() METAGENERATION = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'metageneration': METAGENERATION}) + blob = self._make_one( + "blob-name", bucket=BUCKET, properties={"metageneration": METAGENERATION} + ) self.assertEqual(blob.metageneration, METAGENERATION) def test_metageneration_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.metageneration) def test_metageneration_string_val(self): BUCKET = object() METAGENERATION = 42 blob = self._make_one( - 'blob-name', bucket=BUCKET, - properties={'metageneration': str(METAGENERATION)}) + "blob-name", + bucket=BUCKET, + properties={"metageneration": str(METAGENERATION)}, + ) self.assertEqual(blob.metageneration, METAGENERATION) def test_owner(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'} - properties = {'owner': OWNER} + OWNER = {"entity": "project-owner-12345", "entityId": "23456"} + properties = {"owner": OWNER} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) owner = blob.owner - self.assertEqual(owner['entity'], 'project-owner-12345') - self.assertEqual(owner['entityId'], '23456') + self.assertEqual(owner["entity"], "project-owner-12345") + self.assertEqual(owner["entityId"], "23456") def test_retention_expiration_time(self): from google.cloud._helpers import _RFC3339_MICROS from google.cloud._helpers import UTC - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS) - properties = {'retentionExpirationTime': TIME_CREATED} + properties = {"retentionExpirationTime": TIME_CREATED} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.retention_expiration_time, TIMESTAMP) def test_retention_expiration_time_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.retention_expiration_time) def test_self_link(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - SELF_LINK = 'http://example.com/self/' - properties = {'selfLink': SELF_LINK} + SELF_LINK = "http://example.com/self/" + properties = {"selfLink": SELF_LINK} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.self_link, SELF_LINK) def test_size(self): BUCKET = object() SIZE = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'size': SIZE}) + blob = self._make_one("blob-name", bucket=BUCKET, properties={"size": SIZE}) self.assertEqual(blob.size, SIZE) def test_size_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.size) def test_size_string_val(self): BUCKET = object() SIZE = 42 - blob = self._make_one('blob-name', bucket=BUCKET, - properties={'size': str(SIZE)}) + blob = self._make_one( + "blob-name", bucket=BUCKET, properties={"size": str(SIZE)} + ) self.assertEqual(blob.size, SIZE) def test_storage_class_getter(self): - blob_name = 'blob-name' + blob_name = "blob-name" bucket = _Bucket() - storage_class = 'MULTI_REGIONAL' - properties = {'storageClass': storage_class} + storage_class = "MULTI_REGIONAL" + properties = {"storageClass": storage_class} blob = self._make_one(blob_name, bucket=bucket, properties=properties) self.assertEqual(blob.storage_class, storage_class) def test_storage_class_setter(self): - blob_name = 'blob-name' + blob_name = "blob-name" bucket = _Bucket() - storage_class = 'COLDLINE' + storage_class = "COLDLINE" blob = self._make_one(blob_name, bucket=bucket) self.assertIsNone(blob.storage_class) blob.storage_class = storage_class self.assertEqual(blob.storage_class, storage_class) - self.assertEqual(blob._properties, {'storageClass': storage_class}) + self.assertEqual(blob._properties, {"storageClass": storage_class}) def test_temporary_hold_getter_missing(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() properties = {} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertIsNone(blob.temporary_hold) def test_temporary_hold_getter_false(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - properties = {'temporaryHold': False} + properties = {"temporaryHold": False} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertFalse(blob.temporary_hold) def test_temporary_hold_getter_true(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() - properties = {'temporaryHold': True} + properties = {"temporaryHold": True} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertTrue(blob.temporary_hold) def test_temporary_hold_setter(self): - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket) self.assertIsNone(blob.temporary_hold) @@ -2940,56 +2934,55 @@ def test_time_deleted(self): from google.cloud._helpers import _RFC3339_MICROS from google.cloud._helpers import UTC - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) TIME_DELETED = TIMESTAMP.strftime(_RFC3339_MICROS) - properties = {'timeDeleted': TIME_DELETED} + properties = {"timeDeleted": TIME_DELETED} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.time_deleted, TIMESTAMP) def test_time_deleted_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.time_deleted) def test_time_created(self): from google.cloud._helpers import _RFC3339_MICROS from google.cloud._helpers import UTC - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS) - properties = {'timeCreated': TIME_CREATED} + properties = {"timeCreated": TIME_CREATED} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.time_created, TIMESTAMP) def test_time_created_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.time_created) def test_updated(self): from google.cloud._helpers import _RFC3339_MICROS from google.cloud._helpers import UTC - BLOB_NAME = 'blob-name' + BLOB_NAME = "blob-name" bucket = _Bucket() TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) UPDATED = TIMESTAMP.strftime(_RFC3339_MICROS) - properties = {'updated': UPDATED} + properties = {"updated": UPDATED} blob = self._make_one(BLOB_NAME, bucket=bucket, properties=properties) self.assertEqual(blob.updated, TIMESTAMP) def test_updated_unset(self): BUCKET = object() - blob = self._make_one('blob-name', bucket=BUCKET) + blob = self._make_one("blob-name", bucket=BUCKET) self.assertIsNone(blob.updated) class Test__quote(unittest.TestCase): - @staticmethod def _call_fut(value): from google.cloud.storage.blob import _quote @@ -2997,13 +2990,13 @@ def _call_fut(value): return _quote(value) def test_bytes(self): - quoted = self._call_fut(b'\xDE\xAD\xBE\xEF') - self.assertEqual(quoted, '%DE%AD%BE%EF') + quoted = self._call_fut(b"\xDE\xAD\xBE\xEF") + self.assertEqual(quoted, "%DE%AD%BE%EF") def test_unicode(self): - helicopter = u'\U0001f681' + helicopter = u"\U0001f681" quoted = self._call_fut(helicopter) - self.assertEqual(quoted, '%F0%9F%9A%81') + self.assertEqual(quoted, "%F0%9F%9A%81") def test_bad_type(self): with self.assertRaises(TypeError): @@ -3011,7 +3004,6 @@ def test_bad_type(self): class Test__maybe_rewind(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.storage.blob import _maybe_rewind @@ -3019,21 +3011,21 @@ def _call_fut(*args, **kwargs): return _maybe_rewind(*args, **kwargs) def test_default(self): - stream = mock.Mock(spec=[u'seek']) + stream = mock.Mock(spec=[u"seek"]) ret_val = self._call_fut(stream) self.assertIsNone(ret_val) stream.seek.assert_not_called() def test_do_not_rewind(self): - stream = mock.Mock(spec=[u'seek']) + stream = mock.Mock(spec=[u"seek"]) ret_val = self._call_fut(stream, rewind=False) self.assertIsNone(ret_val) stream.seek.assert_not_called() def test_do_rewind(self): - stream = mock.Mock(spec=[u'seek']) + stream = mock.Mock(spec=[u"seek"]) ret_val = self._call_fut(stream, rewind=True) self.assertIsNone(ret_val) @@ -3041,7 +3033,6 @@ def test_do_rewind(self): class Test__raise_from_invalid_response(unittest.TestCase): - @staticmethod def _call_fut(error): from google.cloud.storage.blob import _raise_from_invalid_response @@ -3055,8 +3046,7 @@ def _helper(self, message, code=http_client.BAD_REQUEST, args=()): from google.api_core import exceptions response = requests.Response() - response.request = requests.Request( - 'GET', 'http://example.com').prepare() + response.request = requests.Request("GET", "http://example.com").prepare() response.status_code = code error = InvalidResponse(response, message, *args) @@ -3066,24 +3056,22 @@ def _helper(self, message, code=http_client.BAD_REQUEST, args=()): return exc_info def test_default(self): - message = 'Failure' + message = "Failure" exc_info = self._helper(message) - expected = 'GET http://example.com/: {}'.format(message) + expected = "GET http://example.com/: {}".format(message) self.assertEqual(exc_info.exception.message, expected) self.assertEqual(exc_info.exception.errors, []) def test_w_206_and_args(self): - message = 'Failure' - args = ('one', 'two') - exc_info = self._helper( - message, code=http_client.PARTIAL_CONTENT, args=args) - expected = 'GET http://example.com/: {}'.format((message,) + args) + message = "Failure" + args = ("one", "two") + exc_info = self._helper(message, code=http_client.PARTIAL_CONTENT, args=args) + expected = "GET http://example.com/: {}".format((message,) + args) self.assertEqual(exc_info.exception.message, expected) self.assertEqual(exc_info.exception.errors, []) class Test__add_query_parameters(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.storage.blob import _add_query_parameters @@ -3091,32 +3079,30 @@ def _call_fut(*args, **kwargs): return _add_query_parameters(*args, **kwargs) def test_w_empty_list(self): - BASE_URL = 'https://test.example.com/base' + BASE_URL = "https://test.example.com/base" self.assertEqual(self._call_fut(BASE_URL, []), BASE_URL) def test_wo_existing_qs(self): - BASE_URL = 'https://test.example.com/base' - NV_LIST = [('one', 'One'), ('two', 'Two')] - expected = '&'.join([ - '{}={}'.format(name, value) for name, value in NV_LIST]) + BASE_URL = "https://test.example.com/base" + NV_LIST = [("one", "One"), ("two", "Two")] + expected = "&".join(["{}={}".format(name, value) for name, value in NV_LIST]) self.assertEqual( - self._call_fut(BASE_URL, NV_LIST), - '{}?{}'.format(BASE_URL, expected)) + self._call_fut(BASE_URL, NV_LIST), "{}?{}".format(BASE_URL, expected) + ) def test_w_existing_qs(self): - BASE_URL = 'https://test.example.com/base?one=Three' - NV_LIST = [('one', 'One'), ('two', 'Two')] - expected = '&'.join([ - '{}={}'.format(name, value) for name, value in NV_LIST]) + BASE_URL = "https://test.example.com/base?one=Three" + NV_LIST = [("one", "One"), ("two", "Two")] + expected = "&".join(["{}={}".format(name, value) for name, value in NV_LIST]) self.assertEqual( - self._call_fut(BASE_URL, NV_LIST), - '{}&{}'.format(BASE_URL, expected)) + self._call_fut(BASE_URL, NV_LIST), "{}&{}".format(BASE_URL, expected) + ) class _Connection(object): - API_BASE_URL = 'http://example.com' - USER_AGENT = 'testing 1.2.3' + API_BASE_URL = "http://example.com" + USER_AGENT = "testing 1.2.3" credentials = object() def __init__(self, *responses): @@ -3133,14 +3119,13 @@ def api_request(self, **kw): from google.cloud.exceptions import NotFound info, content = self._respond(**kw) - if info.get('status') == http_client.NOT_FOUND: + if info.get("status") == http_client.NOT_FOUND: raise NotFound(info) return content class _Bucket(object): - - def __init__(self, client=None, name='name', user_project=None): + def __init__(self, client=None, name="name", user_project=None): if client is None: connection = _Connection() client = _Client(connection) @@ -3149,7 +3134,7 @@ def __init__(self, client=None, name='name', user_project=None): self._copied = [] self._deleted = [] self.name = name - self.path = '/b/' + name + self.path = "/b/" + name self.user_project = user_project def delete_blob(self, blob_name, client=None): @@ -3158,18 +3143,18 @@ def delete_blob(self, blob_name, client=None): class _Signer(object): - def __init__(self): self._signed = [] def __call__(self, *args, **kwargs): self._signed.append((args, kwargs)) - return ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' - '&Expiration=%s' % kwargs.get('expiration')) + return ( + "http://example.com/abucket/a-blob-name?Signature=DEADBEEF" + "&Expiration=%s" % kwargs.get("expiration") + ) class _Client(object): - def __init__(self, connection): self._base_connection = connection diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index d9295991fe0f..8cb1523536de 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -22,8 +22,8 @@ def _create_signing_credentials(): import google.auth.credentials class _SigningCredentials( - google.auth.credentials.Credentials, - google.auth.credentials.Signing): + google.auth.credentials.Credentials, google.auth.credentials.Signing + ): pass credentials = mock.Mock(spec=_SigningCredentials) @@ -32,10 +32,10 @@ class _SigningCredentials( class Test_LifecycleRuleConditions(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.bucket import LifecycleRuleConditions + return LifecycleRuleConditions def _make_one(self, **kw): @@ -46,17 +46,13 @@ def test_ctor_wo_conditions(self): self._make_one() def test_ctor_w_age_and_matches_storage_class(self): - conditions = self._make_one( - age=10, matches_storage_class=['REGIONAL']) - expected = { - 'age': 10, - 'matchesStorageClass': ['REGIONAL'], - } + conditions = self._make_one(age=10, matches_storage_class=["REGIONAL"]) + expected = {"age": 10, "matchesStorageClass": ["REGIONAL"]} self.assertEqual(dict(conditions), expected) self.assertEqual(conditions.age, 10) self.assertIsNone(conditions.created_before) self.assertIsNone(conditions.is_live) - self.assertEqual(conditions.matches_storage_class, ['REGIONAL']) + self.assertEqual(conditions.matches_storage_class, ["REGIONAL"]) self.assertIsNone(conditions.number_of_newer_versions) def test_ctor_w_created_before_and_is_live(self): @@ -64,10 +60,7 @@ def test_ctor_w_created_before_and_is_live(self): before = datetime.date(2018, 8, 1) conditions = self._make_one(created_before=before, is_live=False) - expected = { - 'createdBefore': '2018-08-01', - 'isLive': False, - } + expected = {"createdBefore": "2018-08-01", "isLive": False} self.assertEqual(dict(conditions), expected) self.assertIsNone(conditions.age) self.assertEqual(conditions.created_before, before) @@ -77,9 +70,7 @@ def test_ctor_w_created_before_and_is_live(self): def test_ctor_w_number_of_newer_versions(self): conditions = self._make_one(number_of_newer_versions=3) - expected = { - 'numNewerVersions': 3, - } + expected = {"numNewerVersions": 3} self.assertEqual(dict(conditions), expected) self.assertIsNone(conditions.age) self.assertIsNone(conditions.created_before) @@ -93,25 +84,25 @@ def test_from_api_repr(self): before = datetime.date(2018, 8, 1) klass = self._get_target_class() resource = { - 'age': 10, - 'createdBefore': '2018-08-01', - 'isLive': True, - 'matchesStorageClass': ['REGIONAL'], - 'numNewerVersions': 3, + "age": 10, + "createdBefore": "2018-08-01", + "isLive": True, + "matchesStorageClass": ["REGIONAL"], + "numNewerVersions": 3, } conditions = klass.from_api_repr(resource) self.assertEqual(conditions.age, 10) self.assertEqual(conditions.created_before, before) self.assertEqual(conditions.is_live, True) - self.assertEqual(conditions.matches_storage_class, ['REGIONAL']) + self.assertEqual(conditions.matches_storage_class, ["REGIONAL"]) self.assertEqual(conditions.number_of_newer_versions, 3) class Test_LifecycleRuleDelete(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.bucket import LifecycleRuleDelete + return LifecycleRuleDelete def _make_one(self, **kw): @@ -122,43 +113,32 @@ def test_ctor_wo_conditions(self): self._make_one() def test_ctor_w_condition(self): - rule = self._make_one(age=10, matches_storage_class=['REGIONAL']) + rule = self._make_one(age=10, matches_storage_class=["REGIONAL"]) expected = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 10, - 'matchesStorageClass': ['REGIONAL'], - } + "action": {"type": "Delete"}, + "condition": {"age": 10, "matchesStorageClass": ["REGIONAL"]}, } self.assertEqual(dict(rule), expected) def test_from_api_repr(self): klass = self._get_target_class() conditions = { - 'age': 10, - 'createdBefore': '2018-08-01', - 'isLive': True, - 'matchesStorageClass': ['REGIONAL'], - 'numNewerVersions': 3, - } - resource = { - 'action': { - 'type': 'Delete', - }, - 'condition': conditions, + "age": 10, + "createdBefore": "2018-08-01", + "isLive": True, + "matchesStorageClass": ["REGIONAL"], + "numNewerVersions": 3, } + resource = {"action": {"type": "Delete"}, "condition": conditions} rule = klass.from_api_repr(resource) self.assertEqual(dict(rule), resource) class Test_LifecycleRuleSetStorageClass(unittest.TestCase): - @staticmethod def _get_target_class(): - from google.cloud.storage.bucket import ( - LifecycleRuleSetStorageClass) + from google.cloud.storage.bucket import LifecycleRuleSetStorageClass + return LifecycleRuleSetStorageClass def _make_one(self, **kw): @@ -166,54 +146,43 @@ def _make_one(self, **kw): def test_ctor_wo_conditions(self): with self.assertRaises(ValueError): - self._make_one(storage_class='REGIONAL') + self._make_one(storage_class="REGIONAL") def test_ctor_w_condition(self): rule = self._make_one( - storage_class='NEARLINE', - age=10, - matches_storage_class=['REGIONAL']) + storage_class="NEARLINE", age=10, matches_storage_class=["REGIONAL"] + ) expected = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'age': 10, - 'matchesStorageClass': ['REGIONAL'], - } + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"age": 10, "matchesStorageClass": ["REGIONAL"]}, } self.assertEqual(dict(rule), expected) def test_from_api_repr(self): klass = self._get_target_class() conditions = { - 'age': 10, - 'createdBefore': '2018-08-01', - 'isLive': True, - 'matchesStorageClass': ['REGIONAL'], - 'numNewerVersions': 3, + "age": 10, + "createdBefore": "2018-08-01", + "isLive": True, + "matchesStorageClass": ["REGIONAL"], + "numNewerVersions": 3, } resource = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': conditions, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": conditions, } rule = klass.from_api_repr(resource) self.assertEqual(dict(rule), resource) class Test_Bucket(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.bucket import Bucket + return Bucket - def _make_one( - self, client=None, name=None, properties=None, user_project=None): + def _make_one(self, client=None, name=None, properties=None, user_project=None): if client is None: connection = _Connection() client = _Client(connection) @@ -221,13 +190,14 @@ def _make_one( bucket = self._get_target_class()(client, name=name) else: bucket = self._get_target_class()( - client, name=name, user_project=user_project) + client, name=name, user_project=user_project + ) bucket._properties = properties or {} return bucket def test_ctor(self): - NAME = 'name' - properties = {'key': 'value'} + NAME = "name" + properties = {"key": "value"} bucket = self._make_one(name=NAME, properties=properties) self.assertEqual(bucket.name, NAME) self.assertEqual(bucket._properties, properties) @@ -238,8 +208,8 @@ def test_ctor(self): self.assertIsNone(bucket.user_project) def test_ctor_w_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' + NAME = "name" + USER_PROJECT = "user-project-123" connection = _Connection() client = _Client(connection) bucket = self._make_one(client, name=NAME, user_project=USER_PROJECT) @@ -254,13 +224,12 @@ def test_ctor_w_user_project(self): def test_blob_wo_keys(self): from google.cloud.storage.blob import Blob - BUCKET_NAME = 'BUCKET_NAME' - BLOB_NAME = 'BLOB_NAME' + BUCKET_NAME = "BUCKET_NAME" + BLOB_NAME = "BLOB_NAME" CHUNK_SIZE = 1024 * 1024 bucket = self._make_one(name=BUCKET_NAME) - blob = bucket.blob( - BLOB_NAME, chunk_size=CHUNK_SIZE) + blob = bucket.blob(BLOB_NAME, chunk_size=CHUNK_SIZE) self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) self.assertIs(blob.client, bucket.client) @@ -272,14 +241,13 @@ def test_blob_wo_keys(self): def test_blob_w_encryption_key(self): from google.cloud.storage.blob import Blob - BUCKET_NAME = 'BUCKET_NAME' - BLOB_NAME = 'BLOB_NAME' + BUCKET_NAME = "BUCKET_NAME" + BLOB_NAME = "BLOB_NAME" CHUNK_SIZE = 1024 * 1024 - KEY = b'01234567890123456789012345678901' # 32 bytes + KEY = b"01234567890123456789012345678901" # 32 bytes bucket = self._make_one(name=BUCKET_NAME) - blob = bucket.blob( - BLOB_NAME, chunk_size=CHUNK_SIZE, encryption_key=KEY) + blob = bucket.blob(BLOB_NAME, chunk_size=CHUNK_SIZE, encryption_key=KEY) self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) self.assertIs(blob.client, bucket.client) @@ -291,8 +259,8 @@ def test_blob_w_encryption_key(self): def test_blob_w_kms_key_name(self): from google.cloud.storage.blob import Blob - BUCKET_NAME = 'BUCKET_NAME' - BLOB_NAME = 'BLOB_NAME' + BUCKET_NAME = "BUCKET_NAME" + BLOB_NAME = "BLOB_NAME" CHUNK_SIZE = 1024 * 1024 KMS_RESOURCE = ( "projects/test-project-123/" @@ -302,8 +270,7 @@ def test_blob_w_kms_key_name(self): ) bucket = self._make_one(name=BUCKET_NAME) - blob = bucket.blob( - BLOB_NAME, chunk_size=CHUNK_SIZE, kms_key_name=KMS_RESOURCE) + blob = bucket.blob(BLOB_NAME, chunk_size=CHUNK_SIZE, kms_key_name=KMS_RESOURCE) self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) self.assertIs(blob.client, bucket.client) @@ -316,9 +283,9 @@ def test_notification_defaults(self): from google.cloud.storage.notification import BucketNotification from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - PROJECT = 'PROJECT' - BUCKET_NAME = 'BUCKET_NAME' - TOPIC_NAME = 'TOPIC_NAME' + PROJECT = "PROJECT" + BUCKET_NAME = "BUCKET_NAME" + TOPIC_NAME = "TOPIC_NAME" client = _Client(_Connection(), project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) @@ -337,18 +304,16 @@ def test_notification_explicit(self): BucketNotification, OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE, - JSON_API_V1_PAYLOAD_FORMAT) - - PROJECT = 'PROJECT' - BUCKET_NAME = 'BUCKET_NAME' - TOPIC_NAME = 'TOPIC_NAME' - TOPIC_ALT_PROJECT = 'topic-project-456' - CUSTOM_ATTRIBUTES = { - 'attr1': 'value1', - 'attr2': 'value2', - } + JSON_API_V1_PAYLOAD_FORMAT, + ) + + PROJECT = "PROJECT" + BUCKET_NAME = "BUCKET_NAME" + TOPIC_NAME = "TOPIC_NAME" + TOPIC_ALT_PROJECT = "topic-project-456" + CUSTOM_ATTRIBUTES = {"attr1": "value1", "attr2": "value2"} EVENT_TYPES = [OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE] - BLOB_NAME_PREFIX = 'blob-name-prefix/' + BLOB_NAME_PREFIX = "blob-name-prefix/" client = _Client(_Connection(), project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) @@ -367,24 +332,23 @@ def test_notification_explicit(self): self.assertEqual(notification.custom_attributes, CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, EVENT_TYPES) self.assertEqual(notification.blob_name_prefix, BLOB_NAME_PREFIX) - self.assertEqual( - notification.payload_format, JSON_API_V1_PAYLOAD_FORMAT) + self.assertEqual(notification.payload_format, JSON_API_V1_PAYLOAD_FORMAT) def test_bucket_name_value(self): - BUCKET_NAME = 'bucket-name' + BUCKET_NAME = "bucket-name" self._make_one(name=BUCKET_NAME) - bad_start_bucket_name = '/testing123' + bad_start_bucket_name = "/testing123" with self.assertRaises(ValueError): self._make_one(name=bad_start_bucket_name) - bad_end_bucket_name = 'testing123/' + bad_end_bucket_name = "testing123/" with self.assertRaises(ValueError): self._make_one(name=bad_end_bucket_name) def test_user_project(self): - BUCKET_NAME = 'name' - USER_PROJECT = 'user-project-123' + BUCKET_NAME = "name" + USER_PROJECT = "user-project-123" bucket = self._make_one(name=BUCKET_NAME) bucket._user_project = USER_PROJECT self.assertEqual(bucket.user_project, USER_PROJECT) @@ -401,23 +365,21 @@ def api_request(cls, *args, **kwargs): cls._called_with.append((args, kwargs)) raise NotFound(args) - BUCKET_NAME = 'bucket-name' + BUCKET_NAME = "bucket-name" bucket = self._make_one(name=BUCKET_NAME) client = _Client(_FakeConnection) self.assertFalse(bucket.exists(client=client)) expected_called_kwargs = { - 'method': 'GET', - 'path': bucket.path, - 'query_params': { - 'fields': 'name', - }, - '_target_object': None, + "method": "GET", + "path": bucket.path, + "query_params": {"fields": "name"}, + "_target_object": None, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) def test_exists_hit_w_user_project(self): - USER_PROJECT = 'user-project-123' + USER_PROJECT = "user-project-123" class _FakeConnection(object): @@ -429,26 +391,23 @@ def api_request(cls, *args, **kwargs): # exists() does not use the return value return object() - BUCKET_NAME = 'bucket-name' + BUCKET_NAME = "bucket-name" bucket = self._make_one(name=BUCKET_NAME, user_project=USER_PROJECT) client = _Client(_FakeConnection) self.assertTrue(bucket.exists(client=client)) expected_called_kwargs = { - 'method': 'GET', - 'path': bucket.path, - 'query_params': { - 'fields': 'name', - 'userProject': USER_PROJECT, - }, - '_target_object': None, + "method": "GET", + "path": bucket.path, + "query_params": {"fields": "name", "userProject": USER_PROJECT}, + "_target_object": None, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) def test_create_w_user_project(self): - PROJECT = 'PROJECT' - BUCKET_NAME = 'bucket-name' - USER_PROJECT = 'user-project-123' + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + USER_PROJECT = "user-project-123" connection = _Connection() client = _Client(connection, project=PROJECT) bucket = self._make_one(client, BUCKET_NAME, user_project=USER_PROJECT) @@ -457,7 +416,7 @@ def test_create_w_user_project(self): bucket.create() def test_create_w_missing_client_project(self): - BUCKET_NAME = 'bucket-name' + BUCKET_NAME = "bucket-name" connection = _Connection() client = _Client(connection, project=None) bucket = self._make_one(client, BUCKET_NAME) @@ -466,10 +425,10 @@ def test_create_w_missing_client_project(self): bucket.create() def test_create_w_explicit_project(self): - PROJECT = 'PROJECT' - BUCKET_NAME = 'bucket-name' - OTHER_PROJECT = 'other-project-123' - DATA = {'name': BUCKET_NAME} + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + OTHER_PROJECT = "other-project-123" + DATA = {"name": BUCKET_NAME} connection = _Connection(DATA) client = _Client(connection, project=PROJECT) bucket = self._make_one(client, BUCKET_NAME) @@ -477,70 +436,69 @@ def test_create_w_explicit_project(self): bucket.create(project=OTHER_PROJECT) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b') - self.assertEqual(kw['query_params'], {'project': OTHER_PROJECT}) - self.assertEqual(kw['data'], DATA) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + self.assertEqual(kw["query_params"], {"project": OTHER_PROJECT}) + self.assertEqual(kw["data"], DATA) def test_create_w_explicit_location(self): - PROJECT = 'PROJECT' - BUCKET_NAME = 'bucket-name' - LOCATION = 'us-central1' - DATA = {'location': LOCATION, 'name': BUCKET_NAME} + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + LOCATION = "us-central1" + DATA = {"location": LOCATION, "name": BUCKET_NAME} connection = _Connection( - DATA, - "{'location': 'us-central1', 'name': 'bucket-name'}") + DATA, "{'location': 'us-central1', 'name': 'bucket-name'}" + ) client = _Client(connection, project=PROJECT) bucket = self._make_one(client, BUCKET_NAME) bucket.create(location=LOCATION) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b') - self.assertEqual(kw['data'], DATA) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + self.assertEqual(kw["data"], DATA) self.assertEqual(bucket.location, LOCATION) def test_create_hit(self): - PROJECT = 'PROJECT' - BUCKET_NAME = 'bucket-name' - DATA = {'name': BUCKET_NAME} + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + DATA = {"name": BUCKET_NAME} connection = _Connection(DATA) client = _Client(connection, project=PROJECT) bucket = self._make_one(client=client, name=BUCKET_NAME) bucket.create() kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b') - self.assertEqual(kw['query_params'], {'project': PROJECT}) - self.assertEqual(kw['data'], DATA) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + self.assertEqual(kw["query_params"], {"project": PROJECT}) + self.assertEqual(kw["data"], DATA) def test_create_w_extra_properties(self): - BUCKET_NAME = 'bucket-name' - PROJECT = 'PROJECT' - CORS = [{ - 'maxAgeSeconds': 60, - 'methods': ['*'], - 'origin': ['https://example.com/frontend'], - 'responseHeader': ['X-Custom-Header'], - }] - LIFECYCLE_RULES = [{ - "action": {"type": "Delete"}, - "condition": {"age": 365} - }] - LOCATION = 'eu' - LABELS = {'color': 'red', 'flavor': 'cherry'} - STORAGE_CLASS = 'NEARLINE' + BUCKET_NAME = "bucket-name" + PROJECT = "PROJECT" + CORS = [ + { + "maxAgeSeconds": 60, + "methods": ["*"], + "origin": ["https://example.com/frontend"], + "responseHeader": ["X-Custom-Header"], + } + ] + LIFECYCLE_RULES = [{"action": {"type": "Delete"}, "condition": {"age": 365}}] + LOCATION = "eu" + LABELS = {"color": "red", "flavor": "cherry"} + STORAGE_CLASS = "NEARLINE" DATA = { - 'name': BUCKET_NAME, - 'cors': CORS, - 'lifecycle': {'rule': LIFECYCLE_RULES}, - 'location': LOCATION, - 'storageClass': STORAGE_CLASS, - 'versioning': {'enabled': True}, - 'billing': {'requesterPays': True}, - 'labels': LABELS, + "name": BUCKET_NAME, + "cors": CORS, + "lifecycle": {"rule": LIFECYCLE_RULES}, + "location": LOCATION, + "storageClass": STORAGE_CLASS, + "versioning": {"enabled": True}, + "billing": {"requesterPays": True}, + "labels": LABELS, } connection = _Connection(DATA) client = _Client(connection, project=PROJECT) @@ -554,10 +512,10 @@ def test_create_w_extra_properties(self): bucket.create(location=LOCATION) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b') - self.assertEqual(kw['query_params'], {'project': PROJECT}) - self.assertEqual(kw['data'], DATA) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + self.assertEqual(kw["query_params"], {"project": PROJECT}) + self.assertEqual(kw["data"], DATA) def test_acl_property(self): from google.cloud.storage.acl import BucketACL @@ -577,49 +535,49 @@ def test_default_object_acl_property(self): def test_path_no_name(self): bucket = self._make_one() - self.assertRaises(ValueError, getattr, bucket, 'path') + self.assertRaises(ValueError, getattr, bucket, "path") def test_path_w_name(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - self.assertEqual(bucket.path, '/b/%s' % NAME) + self.assertEqual(bucket.path, "/b/%s" % NAME) def test_get_blob_miss(self): - NAME = 'name' - NONESUCH = 'nonesuch' + NAME = "name" + NONESUCH = "nonesuch" connection = _Connection() client = _Client(connection) bucket = self._make_one(name=NAME) result = bucket.get_blob(NONESUCH, client=client) self.assertIsNone(result) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) def test_get_blob_hit_w_user_project(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' - connection = _Connection({'name': BLOB_NAME}) + NAME = "name" + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" + connection = _Connection({"name": BLOB_NAME}) client = _Client(connection) bucket = self._make_one(name=NAME, user_project=USER_PROJECT) blob = bucket.get_blob(BLOB_NAME, client=client) self.assertIs(blob.bucket, bucket) self.assertEqual(blob.name, BLOB_NAME) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) def test_get_blob_hit_with_kwargs(self): from google.cloud.storage.blob import _get_encryption_headers - NAME = 'name' - BLOB_NAME = 'blob-name' + NAME = "name" + BLOB_NAME = "blob-name" CHUNK_SIZE = 1024 * 1024 - KEY = b'01234567890123456789012345678901' # 32 bytes + KEY = b"01234567890123456789012345678901" # 32 bytes - connection = _Connection({'name': BLOB_NAME}) + connection = _Connection({"name": BLOB_NAME}) client = _Client(connection) bucket = self._make_one(name=NAME) blob = bucket.get_blob( @@ -628,46 +586,46 @@ def test_get_blob_hit_with_kwargs(self): self.assertIs(blob.bucket, bucket) self.assertEqual(blob.name, BLOB_NAME) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw['headers'], _get_encryption_headers(KEY)) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw["headers"], _get_encryption_headers(KEY)) self.assertEqual(blob.chunk_size, CHUNK_SIZE) self.assertEqual(blob._encryption_key, KEY) def test_list_blobs_defaults(self): - NAME = 'name' - connection = _Connection({'items': []}) + NAME = "name" + connection = _Connection({"items": []}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) iterator = bucket.list_blobs() blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o' % NAME) - self.assertEqual(kw['query_params'], {'projection': 'noAcl'}) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o" % NAME) + self.assertEqual(kw["query_params"], {"projection": "noAcl"}) def test_list_blobs_w_all_arguments_and_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' + NAME = "name" + USER_PROJECT = "user-project-123" MAX_RESULTS = 10 - PAGE_TOKEN = 'ABCD' - PREFIX = 'subfolder' - DELIMITER = '/' + PAGE_TOKEN = "ABCD" + PREFIX = "subfolder" + DELIMITER = "/" VERSIONS = True - PROJECTION = 'full' - FIELDS = 'items/contentLanguage,nextPageToken' + PROJECTION = "full" + FIELDS = "items/contentLanguage,nextPageToken" EXPECTED = { - 'maxResults': 10, - 'pageToken': PAGE_TOKEN, - 'prefix': PREFIX, - 'delimiter': DELIMITER, - 'versions': VERSIONS, - 'projection': PROJECTION, - 'fields': FIELDS, - 'userProject': USER_PROJECT, + "maxResults": 10, + "pageToken": PAGE_TOKEN, + "prefix": PREFIX, + "delimiter": DELIMITER, + "versions": VERSIONS, + "projection": PROJECTION, + "fields": FIELDS, + "userProject": USER_PROJECT, } - connection = _Connection({'items': []}) + connection = _Connection({"items": []}) client = _Client(connection) bucket = self._make_one(name=NAME, user_project=USER_PROJECT) iterator = bucket.list_blobs( @@ -683,50 +641,52 @@ def test_list_blobs_w_all_arguments_and_user_project(self): blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o' % NAME) - self.assertEqual(kw['query_params'], EXPECTED) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o" % NAME) + self.assertEqual(kw["query_params"], EXPECTED) def test_list_blobs(self): - NAME = 'name' - connection = _Connection({'items': []}) + NAME = "name" + connection = _Connection({"items": []}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) iterator = bucket.list_blobs() blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested - self.assertEqual(kw['method'], 'GET') - self.assertEqual(kw['path'], '/b/%s/o' % NAME) - self.assertEqual(kw['query_params'], {'projection': 'noAcl'}) + self.assertEqual(kw["method"], "GET") + self.assertEqual(kw["path"], "/b/%s/o" % NAME) + self.assertEqual(kw["query_params"], {"projection": "noAcl"}) def test_list_notifications(self): from google.cloud.storage.notification import BucketNotification from google.cloud.storage.notification import _TOPIC_REF_FMT from google.cloud.storage.notification import ( - JSON_API_V1_PAYLOAD_FORMAT, NONE_PAYLOAD_FORMAT) + JSON_API_V1_PAYLOAD_FORMAT, + NONE_PAYLOAD_FORMAT, + ) - NAME = 'name' + NAME = "name" - topic_refs = [ - ('my-project-123', 'topic-1'), - ('other-project-456', 'topic-2'), - ] + topic_refs = [("my-project-123", "topic-1"), ("other-project-456", "topic-2")] - resources = [{ - 'topic': _TOPIC_REF_FMT.format(*topic_refs[0]), - 'id': '1', - 'etag': 'DEADBEEF', - 'selfLink': 'https://example.com/notification/1', - 'payload_format': NONE_PAYLOAD_FORMAT, - }, { - 'topic': _TOPIC_REF_FMT.format(*topic_refs[1]), - 'id': '2', - 'etag': 'FACECABB', - 'selfLink': 'https://example.com/notification/2', - 'payload_format': JSON_API_V1_PAYLOAD_FORMAT, - }] - connection = _Connection({'items': resources}) + resources = [ + { + "topic": _TOPIC_REF_FMT.format(*topic_refs[0]), + "id": "1", + "etag": "DEADBEEF", + "selfLink": "https://example.com/notification/1", + "payload_format": NONE_PAYLOAD_FORMAT, + }, + { + "topic": _TOPIC_REF_FMT.format(*topic_refs[1]), + "id": "2", + "etag": "FACECABB", + "selfLink": "https://example.com/notification/2", + "payload_format": JSON_API_V1_PAYLOAD_FORMAT, + }, + ] + connection = _Connection({"items": resources}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -734,89 +694,89 @@ def test_list_notifications(self): self.assertEqual(len(notifications), len(resources)) for notification, resource, topic_ref in zip( - notifications, resources, topic_refs): + notifications, resources, topic_refs + ): self.assertIsInstance(notification, BucketNotification) self.assertEqual(notification.topic_project, topic_ref[0]) self.assertEqual(notification.topic_name, topic_ref[1]) - self.assertEqual(notification.notification_id, resource['id']) - self.assertEqual(notification.etag, resource['etag']) - self.assertEqual(notification.self_link, resource['selfLink']) - self.assertEqual( - notification.custom_attributes, - resource.get('custom_attributes')) + self.assertEqual(notification.notification_id, resource["id"]) + self.assertEqual(notification.etag, resource["etag"]) + self.assertEqual(notification.self_link, resource["selfLink"]) self.assertEqual( - notification.event_types, resource.get('event_types')) + notification.custom_attributes, resource.get("custom_attributes") + ) + self.assertEqual(notification.event_types, resource.get("event_types")) self.assertEqual( - notification.blob_name_prefix, - resource.get('blob_name_prefix')) + notification.blob_name_prefix, resource.get("blob_name_prefix") + ) self.assertEqual( - notification.payload_format, resource.get('payload_format')) + notification.payload_format, resource.get("payload_format") + ) def test_delete_miss(self): from google.cloud.exceptions import NotFound - NAME = 'name' + NAME = "name" connection = _Connection() client = _Client(connection) bucket = self._make_one(client=client, name=NAME) self.assertRaises(NotFound, bucket.delete) - expected_cw = [{ - 'method': 'DELETE', - 'path': bucket.path, - 'query_params': {}, - '_target_object': None, - }] + expected_cw = [ + { + "method": "DELETE", + "path": bucket.path, + "query_params": {}, + "_target_object": None, + } + ] self.assertEqual(connection._deleted_buckets, expected_cw) def test_delete_hit_with_user_project(self): - NAME = 'name' - USER_PROJECT = 'user-project-123' - GET_BLOBS_RESP = {'items': []} + NAME = "name" + USER_PROJECT = "user-project-123" + GET_BLOBS_RESP = {"items": []} connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) result = bucket.delete(force=True) self.assertIsNone(result) - expected_cw = [{ - 'method': 'DELETE', - 'path': bucket.path, - '_target_object': None, - 'query_params': {'userProject': USER_PROJECT}, - }] + expected_cw = [ + { + "method": "DELETE", + "path": bucket.path, + "_target_object": None, + "query_params": {"userProject": USER_PROJECT}, + } + ] self.assertEqual(connection._deleted_buckets, expected_cw) def test_delete_force_delete_blobs(self): - NAME = 'name' - BLOB_NAME1 = 'blob-name1' - BLOB_NAME2 = 'blob-name2' - GET_BLOBS_RESP = { - 'items': [ - {'name': BLOB_NAME1}, - {'name': BLOB_NAME2}, - ], - } + NAME = "name" + BLOB_NAME1 = "blob-name1" + BLOB_NAME2 = "blob-name2" + GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {} - connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, - DELETE_BLOB2_RESP) + connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) connection._delete_bucket = True client = _Client(connection) bucket = self._make_one(client=client, name=NAME) result = bucket.delete(force=True) self.assertIsNone(result) - expected_cw = [{ - 'method': 'DELETE', - 'path': bucket.path, - 'query_params': {}, - '_target_object': None, - }] + expected_cw = [ + { + "method": "DELETE", + "path": bucket.path, + "query_params": {}, + "_target_object": None, + } + ] self.assertEqual(connection._deleted_buckets, expected_cw) def test_delete_force_miss_blobs(self): - NAME = 'name' - BLOB_NAME = 'blob-name1' - GET_BLOBS_RESP = {'items': [{'name': BLOB_NAME}]} + NAME = "name" + BLOB_NAME = "blob-name1" + GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME}]} # Note the connection does not have a response for the blob. connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True @@ -824,24 +784,21 @@ def test_delete_force_miss_blobs(self): bucket = self._make_one(client=client, name=NAME) result = bucket.delete(force=True) self.assertIsNone(result) - expected_cw = [{ - 'method': 'DELETE', - 'path': bucket.path, - 'query_params': {}, - '_target_object': None, - }] + expected_cw = [ + { + "method": "DELETE", + "path": bucket.path, + "query_params": {}, + "_target_object": None, + } + ] self.assertEqual(connection._deleted_buckets, expected_cw) def test_delete_too_many(self): - NAME = 'name' - BLOB_NAME1 = 'blob-name1' - BLOB_NAME2 = 'blob-name2' - GET_BLOBS_RESP = { - 'items': [ - {'name': BLOB_NAME1}, - {'name': BLOB_NAME2}, - ], - } + NAME = "name" + BLOB_NAME1 = "blob-name1" + BLOB_NAME2 = "blob-name2" + GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) @@ -855,34 +812,33 @@ def test_delete_too_many(self): def test_delete_blob_miss(self): from google.cloud.exceptions import NotFound - NAME = 'name' - NONESUCH = 'nonesuch' + NAME = "name" + NONESUCH = "nonesuch" connection = _Connection() client = _Client(connection) bucket = self._make_one(client=client, name=NAME) self.assertRaises(NotFound, bucket.delete_blob, NONESUCH) kw, = connection._requested - self.assertEqual(kw['method'], 'DELETE') - self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) - self.assertEqual(kw['query_params'], {}) + self.assertEqual(kw["method"], "DELETE") + self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) + self.assertEqual(kw["query_params"], {}) def test_delete_blob_hit_with_user_project(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' + NAME = "name" + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" connection = _Connection({}) client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) result = bucket.delete_blob(BLOB_NAME) self.assertIsNone(result) kw, = connection._requested - self.assertEqual(kw['method'], 'DELETE') - self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw["method"], "DELETE") + self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) def test_delete_blobs_empty(self): - NAME = 'name' + NAME = "name" connection = _Connection() client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -890,41 +846,40 @@ def test_delete_blobs_empty(self): self.assertEqual(connection._requested, []) def test_delete_blobs_hit_w_user_project(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - USER_PROJECT = 'user-project-123' + NAME = "name" + BLOB_NAME = "blob-name" + USER_PROJECT = "user-project-123" connection = _Connection({}) client = _Client(connection) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) bucket.delete_blobs([BLOB_NAME]) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'DELETE') - self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw[0]["method"], "DELETE") + self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) def test_delete_blobs_miss_no_on_error(self): from google.cloud.exceptions import NotFound - NAME = 'name' - BLOB_NAME = 'blob-name' - NONESUCH = 'nonesuch' + NAME = "name" + BLOB_NAME = "blob-name" + NONESUCH = "nonesuch" connection = _Connection({}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH]) kw = connection._requested self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'DELETE') - self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw[1]['method'], 'DELETE') - self.assertEqual(kw[1]['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) + self.assertEqual(kw[0]["method"], "DELETE") + self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw[1]["method"], "DELETE") + self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) def test_delete_blobs_miss_w_on_error(self): - NAME = 'name' - BLOB_NAME = 'blob-name' - NONESUCH = 'nonesuch' + NAME = "name" + BLOB_NAME = "blob-name" + NONESUCH = "nonesuch" connection = _Connection({}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -933,10 +888,10 @@ def test_delete_blobs_miss_w_on_error(self): self.assertEqual(errors, [NONESUCH]) kw = connection._requested self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'DELETE') - self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) - self.assertEqual(kw[1]['method'], 'DELETE') - self.assertEqual(kw[1]['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) + self.assertEqual(kw[0]["method"], "DELETE") + self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) + self.assertEqual(kw[1]["method"], "DELETE") + self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) @staticmethod def _make_blob(bucket_name, blob_name): @@ -944,13 +899,13 @@ def _make_blob(bucket_name, blob_name): blob = mock.create_autospec(Blob) blob.name = blob_name - blob.path = '/b/{}/o/{}'.format(bucket_name, blob_name) + blob.path = "/b/{}/o/{}".format(bucket_name, blob_name) return blob def test_copy_blobs_wo_name(self): - SOURCE = 'source' - DEST = 'dest' - BLOB_NAME = 'blob-name' + SOURCE = "source" + DEST = "dest" + BLOB_NAME = "blob-name" connection = _Connection({}) client = _Client(connection) source = self._make_one(client=client, name=SOURCE) @@ -963,16 +918,17 @@ def test_copy_blobs_wo_name(self): self.assertEqual(new_blob.name, BLOB_NAME) kw, = connection._requested - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME) - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {}) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + SOURCE, BLOB_NAME, DEST, BLOB_NAME + ) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], COPY_PATH) + self.assertEqual(kw["query_params"], {}) def test_copy_blobs_source_generation(self): - SOURCE = 'source' - DEST = 'dest' - BLOB_NAME = 'blob-name' + SOURCE = "source" + DEST = "dest" + BLOB_NAME = "blob-name" GENERATION = 1512565576797178 connection = _Connection({}) @@ -987,19 +943,20 @@ def test_copy_blobs_source_generation(self): self.assertEqual(new_blob.name, BLOB_NAME) kw, = connection._requested - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME) - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {'sourceGeneration': GENERATION}) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + SOURCE, BLOB_NAME, DEST, BLOB_NAME + ) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], COPY_PATH) + self.assertEqual(kw["query_params"], {"sourceGeneration": GENERATION}) def test_copy_blobs_preserve_acl(self): from google.cloud.storage.acl import ObjectACL - SOURCE = 'source' - DEST = 'dest' - BLOB_NAME = 'blob-name' - NEW_NAME = 'new_name' + SOURCE = "source" + DEST = "dest" + BLOB_NAME = "blob-name" + NEW_NAME = "new_name" connection = _Connection({}, {}) client = _Client(connection) @@ -1008,35 +965,36 @@ def test_copy_blobs_preserve_acl(self): blob = self._make_blob(SOURCE, BLOB_NAME) new_blob = source.copy_blob( - blob, dest, NEW_NAME, client=client, preserve_acl=False) + blob, dest, NEW_NAME, client=client, preserve_acl=False + ) self.assertIs(new_blob.bucket, dest) self.assertEqual(new_blob.name, NEW_NAME) self.assertIsInstance(new_blob.acl, ObjectACL) kw1, kw2 = connection._requested - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - SOURCE, BLOB_NAME, DEST, NEW_NAME) - NEW_BLOB_PATH = '/b/{}/o/{}'.format(DEST, NEW_NAME) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + SOURCE, BLOB_NAME, DEST, NEW_NAME + ) + NEW_BLOB_PATH = "/b/{}/o/{}".format(DEST, NEW_NAME) - self.assertEqual(kw1['method'], 'POST') - self.assertEqual(kw1['path'], COPY_PATH) - self.assertEqual(kw1['query_params'], {}) + self.assertEqual(kw1["method"], "POST") + self.assertEqual(kw1["path"], COPY_PATH) + self.assertEqual(kw1["query_params"], {}) - self.assertEqual(kw2['method'], 'PATCH') - self.assertEqual(kw2['path'], NEW_BLOB_PATH) - self.assertEqual(kw2['query_params'], {'projection': 'full'}) + self.assertEqual(kw2["method"], "PATCH") + self.assertEqual(kw2["path"], NEW_BLOB_PATH) + self.assertEqual(kw2["query_params"], {"projection": "full"}) def test_copy_blobs_w_name_and_user_project(self): - SOURCE = 'source' - DEST = 'dest' - BLOB_NAME = 'blob-name' - NEW_NAME = 'new_name' - USER_PROJECT = 'user-project-123' + SOURCE = "source" + DEST = "dest" + BLOB_NAME = "blob-name" + NEW_NAME = "new_name" + USER_PROJECT = "user-project-123" connection = _Connection({}) client = _Client(connection) - source = self._make_one( - client=client, name=SOURCE, user_project=USER_PROJECT) + source = self._make_one(client=client, name=SOURCE, user_project=USER_PROJECT) dest = self._make_one(client=client, name=DEST) blob = self._make_blob(SOURCE, BLOB_NAME) @@ -1045,18 +1003,19 @@ def test_copy_blobs_w_name_and_user_project(self): self.assertIs(new_blob.bucket, dest) self.assertEqual(new_blob.name, NEW_NAME) - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - SOURCE, BLOB_NAME, DEST, NEW_NAME) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + SOURCE, BLOB_NAME, DEST, NEW_NAME + ) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], COPY_PATH) + self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) def test_rename_blob(self): - BUCKET_NAME = 'BUCKET_NAME' - BLOB_NAME = 'blob-name' - NEW_BLOB_NAME = 'new-blob-name' - DATA = {'name': NEW_BLOB_NAME} + BUCKET_NAME = "BUCKET_NAME" + BLOB_NAME = "blob-name" + NEW_BLOB_NAME = "new-blob-name" + DATA = {"name": NEW_BLOB_NAME} connection = _Connection(DATA) client = _Client(connection) bucket = self._make_one(client=client, name=BUCKET_NAME) @@ -1067,19 +1026,20 @@ def test_rename_blob(self): self.assertIs(renamed_blob.bucket, bucket) self.assertEqual(renamed_blob.name, NEW_BLOB_NAME) - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME + ) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {}) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], COPY_PATH) + self.assertEqual(kw["query_params"], {}) blob.delete.assert_called_once_with(client) def test_rename_blob_to_itself(self): - BUCKET_NAME = 'BUCKET_NAME' - BLOB_NAME = 'blob-name' - DATA = {'name': BLOB_NAME} + BUCKET_NAME = "BUCKET_NAME" + BLOB_NAME = "blob-name" + DATA = {"name": BLOB_NAME} connection = _Connection(DATA) client = _Client(connection) bucket = self._make_one(client=client, name=BUCKET_NAME) @@ -1090,60 +1050,53 @@ def test_rename_blob_to_itself(self): self.assertIs(renamed_blob.bucket, bucket) self.assertEqual(renamed_blob.name, BLOB_NAME) - COPY_PATH = '/b/{}/o/{}/copyTo/b/{}/o/{}'.format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, BLOB_NAME) + COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + BUCKET_NAME, BLOB_NAME, BUCKET_NAME, BLOB_NAME + ) kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], COPY_PATH) - self.assertEqual(kw['query_params'], {}) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], COPY_PATH) + self.assertEqual(kw["query_params"], {}) blob.delete.assert_not_called() def test_etag(self): - ETAG = 'ETAG' - properties = {'etag': ETAG} + ETAG = "ETAG" + properties = {"etag": ETAG} bucket = self._make_one(properties=properties) self.assertEqual(bucket.etag, ETAG) def test_id(self): - ID = 'ID' - properties = {'id': ID} + ID = "ID" + properties = {"id": ID} bucket = self._make_one(properties=properties) self.assertEqual(bucket.id, ID) def test_location_getter(self): - NAME = 'name' - before = {'location': 'AS'} + NAME = "name" + before = {"location": "AS"} bucket = self._make_one(name=NAME, properties=before) - self.assertEqual(bucket.location, 'AS') + self.assertEqual(bucket.location, "AS") - @mock.patch('warnings.warn') + @mock.patch("warnings.warn") def test_location_setter(self, mock_warn): from google.cloud.storage import bucket as bucket_module - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) self.assertIsNone(bucket.location) - bucket.location = 'AS' - self.assertEqual(bucket.location, 'AS') - self.assertTrue('location' in bucket._changes) + bucket.location = "AS" + self.assertEqual(bucket.location, "AS") + self.assertTrue("location" in bucket._changes) mock_warn.assert_called_once_with( - bucket_module._LOCATION_SETTER_MESSAGE, - DeprecationWarning, - stacklevel=2) + bucket_module._LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2 + ) def test_lifecycle_rules_getter_unknown_action_type(self): - NAME = 'name' - BOGUS_RULE = { - 'action': { - 'type': 'Bogus', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + BOGUS_RULE = {"action": {"type": "Bogus"}, "condition": {"age": 42}} rules = [BOGUS_RULE] - properties = {'lifecycle': {'rule': rules}} + properties = {"lifecycle": {"rule": rules}} bucket = self._make_one(name=NAME, properties=properties) with self.assertRaises(ValueError): @@ -1151,28 +1104,18 @@ def test_lifecycle_rules_getter_unknown_action_type(self): def test_lifecycle_rules_getter(self): from google.cloud.storage.bucket import ( - LifecycleRuleDelete, LifecycleRuleSetStorageClass) + LifecycleRuleDelete, + LifecycleRuleSetStorageClass, + ) - NAME = 'name' - DELETE_RULE = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + DELETE_RULE = {"action": {"type": "Delete"}, "condition": {"age": 42}} SSC_RULE = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'isLive': False, - }, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"isLive": False}, } rules = [DELETE_RULE, SSC_RULE] - properties = {'lifecycle': {'rule': rules}} + properties = {"lifecycle": {"rule": rules}} bucket = self._make_one(name=NAME, properties=properties) found = list(bucket.lifecycle_rules) @@ -1186,23 +1129,11 @@ def test_lifecycle_rules_getter(self): self.assertEqual(dict(ssc_rule), SSC_RULE) def test_lifecycle_rules_setter_w_dicts(self): - NAME = 'name' - DELETE_RULE = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + DELETE_RULE = {"action": {"type": "Delete"}, "condition": {"age": 42}} SSC_RULE = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'isLive': False, - }, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"isLive": False}, } rules = [DELETE_RULE, SSC_RULE] bucket = self._make_one(name=NAME) @@ -1210,31 +1141,20 @@ def test_lifecycle_rules_setter_w_dicts(self): bucket.lifecycle_rules = rules - self.assertEqual( - [dict(rule) for rule in bucket.lifecycle_rules], rules) - self.assertTrue('lifecycle' in bucket._changes) + self.assertEqual([dict(rule) for rule in bucket.lifecycle_rules], rules) + self.assertTrue("lifecycle" in bucket._changes) def test_lifecycle_rules_setter_w_helpers(self): from google.cloud.storage.bucket import ( - LifecycleRuleDelete, LifecycleRuleSetStorageClass) + LifecycleRuleDelete, + LifecycleRuleSetStorageClass, + ) - NAME = 'name' - DELETE_RULE = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + DELETE_RULE = {"action": {"type": "Delete"}, "condition": {"age": 42}} SSC_RULE = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'isLive': False, - }, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"isLive": False}, } rules = [DELETE_RULE, SSC_RULE] bucket = self._make_one(name=NAME) @@ -1242,92 +1162,65 @@ def test_lifecycle_rules_setter_w_helpers(self): bucket.lifecycle_rules = [ LifecycleRuleDelete(age=42), - LifecycleRuleSetStorageClass('NEARLINE', is_live=False), + LifecycleRuleSetStorageClass("NEARLINE", is_live=False), ] - self.assertEqual( - [dict(rule) for rule in bucket.lifecycle_rules], rules) - self.assertTrue('lifecycle' in bucket._changes) + self.assertEqual([dict(rule) for rule in bucket.lifecycle_rules], rules) + self.assertTrue("lifecycle" in bucket._changes) def test_clear_lifecycle_rules(self): - NAME = 'name' - DELETE_RULE = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + DELETE_RULE = {"action": {"type": "Delete"}, "condition": {"age": 42}} SSC_RULE = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'isLive': False, - }, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"isLive": False}, } rules = [DELETE_RULE, SSC_RULE] bucket = self._make_one(name=NAME) - bucket._properties['lifecycle'] = {'rule': rules} + bucket._properties["lifecycle"] = {"rule": rules} self.assertEqual(list(bucket.lifecycle_rules), rules) bucket.clear_lifecyle_rules() self.assertEqual(list(bucket.lifecycle_rules), []) - self.assertTrue('lifecycle' in bucket._changes) + self.assertTrue("lifecycle" in bucket._changes) def test_add_lifecycle_delete_rule(self): - NAME = 'name' - DELETE_RULE = { - 'action': { - 'type': 'Delete', - }, - 'condition': { - 'age': 42, - }, - } + NAME = "name" + DELETE_RULE = {"action": {"type": "Delete"}, "condition": {"age": 42}} rules = [DELETE_RULE] bucket = self._make_one(name=NAME) self.assertEqual(list(bucket.lifecycle_rules), []) bucket.add_lifecycle_delete_rule(age=42) - self.assertEqual( - [dict(rule) for rule in bucket.lifecycle_rules], rules) - self.assertTrue('lifecycle' in bucket._changes) + self.assertEqual([dict(rule) for rule in bucket.lifecycle_rules], rules) + self.assertTrue("lifecycle" in bucket._changes) def test_add_lifecycle_set_storage_class_rule(self): - NAME = 'name' + NAME = "name" SSC_RULE = { - 'action': { - 'type': 'SetStorageClass', - 'storageClass': 'NEARLINE', - }, - 'condition': { - 'isLive': False, - }, + "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "condition": {"isLive": False}, } rules = [SSC_RULE] bucket = self._make_one(name=NAME) self.assertEqual(list(bucket.lifecycle_rules), []) - bucket.add_lifecycle_set_storage_class_rule('NEARLINE', is_live=False) + bucket.add_lifecycle_set_storage_class_rule("NEARLINE", is_live=False) - self.assertEqual( - [dict(rule) for rule in bucket.lifecycle_rules], rules) - self.assertTrue('lifecycle' in bucket._changes) + self.assertEqual([dict(rule) for rule in bucket.lifecycle_rules], rules) + self.assertTrue("lifecycle" in bucket._changes) def test_cors_getter(self): - NAME = 'name' + NAME = "name" CORS_ENTRY = { - 'maxAgeSeconds': 1234, - 'method': ['OPTIONS', 'GET'], - 'origin': ['127.0.0.1'], - 'responseHeader': ['Content-Type'], + "maxAgeSeconds": 1234, + "method": ["OPTIONS", "GET"], + "origin": ["127.0.0.1"], + "responseHeader": ["Content-Type"], } - properties = {'cors': [CORS_ENTRY, {}]} + properties = {"cors": [CORS_ENTRY, {}]} bucket = self._make_one(name=NAME, properties=properties) entries = bucket.cors self.assertEqual(len(entries), 2) @@ -1337,57 +1230,52 @@ def test_cors_getter(self): self.assertIsNot(entries[0], CORS_ENTRY) def test_cors_setter(self): - NAME = 'name' + NAME = "name" CORS_ENTRY = { - 'maxAgeSeconds': 1234, - 'method': ['OPTIONS', 'GET'], - 'origin': ['127.0.0.1'], - 'responseHeader': ['Content-Type'], + "maxAgeSeconds": 1234, + "method": ["OPTIONS", "GET"], + "origin": ["127.0.0.1"], + "responseHeader": ["Content-Type"], } bucket = self._make_one(name=NAME) self.assertEqual(bucket.cors, []) bucket.cors = [CORS_ENTRY] self.assertEqual(bucket.cors, [CORS_ENTRY]) - self.assertTrue('cors' in bucket._changes) + self.assertTrue("cors" in bucket._changes) def test_default_kms_key_name_getter(self): - NAME = 'name' + NAME = "name" KMS_RESOURCE = ( - 'projects/test-project-123/' - 'locations/us/' - 'keyRings/test-ring/' - 'cryptoKeys/test-key' + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" ) - ENCRYPTION_CONFIG = { - 'defaultKmsKeyName': KMS_RESOURCE, - } + ENCRYPTION_CONFIG = {"defaultKmsKeyName": KMS_RESOURCE} bucket = self._make_one(name=NAME) self.assertIsNone(bucket.default_kms_key_name) - bucket._properties['encryption'] = ENCRYPTION_CONFIG + bucket._properties["encryption"] = ENCRYPTION_CONFIG self.assertEqual(bucket.default_kms_key_name, KMS_RESOURCE) def test_default_kms_key_name_setter(self): - NAME = 'name' + NAME = "name" KMS_RESOURCE = ( - 'projects/test-project-123/' - 'locations/us/' - 'keyRings/test-ring/' - 'cryptoKeys/test-key' + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" ) - ENCRYPTION_CONFIG = { - 'defaultKmsKeyName': KMS_RESOURCE, - } + ENCRYPTION_CONFIG = {"defaultKmsKeyName": KMS_RESOURCE} bucket = self._make_one(name=NAME) bucket.default_kms_key_name = KMS_RESOURCE - self.assertEqual( - bucket._properties['encryption'], ENCRYPTION_CONFIG) - self.assertTrue('encryption' in bucket._changes) + self.assertEqual(bucket._properties["encryption"], ENCRYPTION_CONFIG) + self.assertTrue("encryption" in bucket._changes) def test_labels_getter(self): - NAME = 'name' - LABELS = {'color': 'red', 'flavor': 'cherry'} - properties = {'labels': LABELS} + NAME = "name" + LABELS = {"color": "red", "flavor": "cherry"} + properties = {"labels": LABELS} bucket = self._make_one(name=NAME, properties=properties) labels = bucket.labels self.assertEqual(labels, LABELS) @@ -1395,84 +1283,79 @@ def test_labels_getter(self): self.assertIsNot(labels, LABELS) def test_labels_setter(self): - NAME = 'name' - LABELS = {'color': 'red', 'flavor': 'cherry'} + NAME = "name" + LABELS = {"color": "red", "flavor": "cherry"} bucket = self._make_one(name=NAME) self.assertEqual(bucket.labels, {}) bucket.labels = LABELS self.assertEqual(bucket.labels, LABELS) - self.assertIsNot(bucket._properties['labels'], LABELS) - self.assertIn('labels', bucket._changes) + self.assertIsNot(bucket._properties["labels"], LABELS) + self.assertIn("labels", bucket._changes) def test_labels_setter_with_removal(self): # Make sure the bucket labels look correct and follow the expected # public structure. - bucket = self._make_one(name='name') + bucket = self._make_one(name="name") self.assertEqual(bucket.labels, {}) - bucket.labels = {'color': 'red', 'flavor': 'cherry'} - self.assertEqual(bucket.labels, {'color': 'red', 'flavor': 'cherry'}) - bucket.labels = {'color': 'red'} - self.assertEqual(bucket.labels, {'color': 'red'}) + bucket.labels = {"color": "red", "flavor": "cherry"} + self.assertEqual(bucket.labels, {"color": "red", "flavor": "cherry"}) + bucket.labels = {"color": "red"} + self.assertEqual(bucket.labels, {"color": "red"}) # Make sure that a patch call correctly removes the flavor label. - client = mock.NonCallableMock(spec=('_connection',)) - client._connection = mock.NonCallableMock(spec=('api_request',)) + client = mock.NonCallableMock(spec=("_connection",)) + client._connection = mock.NonCallableMock(spec=("api_request",)) bucket.patch(client=client) client._connection.api_request.assert_called() _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertEqual(len(kwargs['data']['labels']), 2) - self.assertEqual(kwargs['data']['labels']['color'], 'red') - self.assertIsNone(kwargs['data']['labels']['flavor']) + self.assertEqual(len(kwargs["data"]["labels"]), 2) + self.assertEqual(kwargs["data"]["labels"]["color"], "red") + self.assertIsNone(kwargs["data"]["labels"]["flavor"]) # A second patch call should be a no-op for labels. client._connection.api_request.reset_mock() bucket.patch(client=client) client._connection.api_request.assert_called() _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertNotIn('labels', kwargs['data']) + self.assertNotIn("labels", kwargs["data"]) def test_get_logging_w_prefix(self): - NAME = 'name' - LOG_BUCKET = 'logs' - LOG_PREFIX = 'pfx' - before = { - 'logging': { - 'logBucket': LOG_BUCKET, - 'logObjectPrefix': LOG_PREFIX, - }, - } + NAME = "name" + LOG_BUCKET = "logs" + LOG_PREFIX = "pfx" + before = {"logging": {"logBucket": LOG_BUCKET, "logObjectPrefix": LOG_PREFIX}} bucket = self._make_one(name=NAME, properties=before) info = bucket.get_logging() - self.assertEqual(info['logBucket'], LOG_BUCKET) - self.assertEqual(info['logObjectPrefix'], LOG_PREFIX) + self.assertEqual(info["logBucket"], LOG_BUCKET) + self.assertEqual(info["logObjectPrefix"], LOG_PREFIX) def test_enable_logging_defaults(self): - NAME = 'name' - LOG_BUCKET = 'logs' - before = {'logging': None} + NAME = "name" + LOG_BUCKET = "logs" + before = {"logging": None} bucket = self._make_one(name=NAME, properties=before) self.assertIsNone(bucket.get_logging()) bucket.enable_logging(LOG_BUCKET) info = bucket.get_logging() - self.assertEqual(info['logBucket'], LOG_BUCKET) - self.assertEqual(info['logObjectPrefix'], '') + self.assertEqual(info["logBucket"], LOG_BUCKET) + self.assertEqual(info["logObjectPrefix"], "") def test_enable_logging(self): - NAME = 'name' - LOG_BUCKET = 'logs' - LOG_PFX = 'pfx' - before = {'logging': None} + NAME = "name" + LOG_BUCKET = "logs" + LOG_PFX = "pfx" + before = {"logging": None} bucket = self._make_one(name=NAME, properties=before) self.assertIsNone(bucket.get_logging()) bucket.enable_logging(LOG_BUCKET, LOG_PFX) info = bucket.get_logging() - self.assertEqual(info['logBucket'], LOG_BUCKET) - self.assertEqual(info['logObjectPrefix'], LOG_PFX) + self.assertEqual(info["logBucket"], LOG_BUCKET) + self.assertEqual(info["logObjectPrefix"], LOG_PFX) def test_disable_logging(self): - NAME = 'name' - before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}} + NAME = "name" + before = {"logging": {"logBucket": "logs", "logObjectPrefix": "pfx"}} bucket = self._make_one(name=NAME, properties=before) self.assertIsNotNone(bucket.get_logging()) bucket.disable_logging() @@ -1480,7 +1363,7 @@ def test_disable_logging(self): def test_metageneration(self): METAGENERATION = 42 - properties = {'metageneration': METAGENERATION} + properties = {"metageneration": METAGENERATION} bucket = self._make_one(properties=properties) self.assertEqual(bucket.metageneration, METAGENERATION) @@ -1490,21 +1373,21 @@ def test_metageneration_unset(self): def test_metageneration_string_val(self): METAGENERATION = 42 - properties = {'metageneration': str(METAGENERATION)} + properties = {"metageneration": str(METAGENERATION)} bucket = self._make_one(properties=properties) self.assertEqual(bucket.metageneration, METAGENERATION) def test_owner(self): - OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'} - properties = {'owner': OWNER} + OWNER = {"entity": "project-owner-12345", "entityId": "23456"} + properties = {"owner": OWNER} bucket = self._make_one(properties=properties) owner = bucket.owner - self.assertEqual(owner['entity'], 'project-owner-12345') - self.assertEqual(owner['entityId'], '23456') + self.assertEqual(owner["entity"], "project-owner-12345") + self.assertEqual(owner["entityId"], "23456") def test_project_number(self): PROJECT_NUMBER = 12345 - properties = {'projectNumber': PROJECT_NUMBER} + properties = {"projectNumber": PROJECT_NUMBER} bucket = self._make_one(properties=properties) self.assertEqual(bucket.project_number, PROJECT_NUMBER) @@ -1514,7 +1397,7 @@ def test_project_number_unset(self): def test_project_number_string_val(self): PROJECT_NUMBER = 12345 - properties = {'projectNumber': str(PROJECT_NUMBER)} + properties = {"projectNumber": str(PROJECT_NUMBER)} bucket = self._make_one(properties=properties) self.assertEqual(bucket.project_number, PROJECT_NUMBER) @@ -1523,10 +1406,7 @@ def test_retention_policy_effective_time_policy_missing(self): self.assertIsNone(bucket.retention_policy_effective_time) def test_retention_policy_effective_time_et_missing(self): - properties = { - 'retentionPolicy': { - }, - } + properties = {"retentionPolicy": {}} bucket = self._make_one(properties=properties) self.assertIsNone(bucket.retention_policy_effective_time) @@ -1538,34 +1418,23 @@ def test_retention_policy_effective_time(self): effective_time = datetime.datetime.utcnow().replace(tzinfo=UTC) properties = { - 'retentionPolicy': { - 'effectiveTime': _datetime_to_rfc3339(effective_time), - }, + "retentionPolicy": {"effectiveTime": _datetime_to_rfc3339(effective_time)} } bucket = self._make_one(properties=properties) - self.assertEqual( - bucket.retention_policy_effective_time, effective_time) + self.assertEqual(bucket.retention_policy_effective_time, effective_time) def test_retention_policy_locked_missing(self): bucket = self._make_one() self.assertFalse(bucket.retention_policy_locked) def test_retention_policy_locked_false(self): - properties = { - 'retentionPolicy': { - 'isLocked': False, - }, - } + properties = {"retentionPolicy": {"isLocked": False}} bucket = self._make_one(properties=properties) self.assertFalse(bucket.retention_policy_locked) def test_retention_policy_locked_true(self): - properties = { - 'retentionPolicy': { - 'isLocked': True, - }, - } + properties = {"retentionPolicy": {"isLocked": True}} bucket = self._make_one(properties=properties) self.assertTrue(bucket.retention_policy_locked) @@ -1575,21 +1444,14 @@ def test_retention_period_getter_policymissing(self): self.assertIsNone(bucket.retention_period) def test_retention_period_getter_pr_missing(self): - properties = { - 'retentionPolicy': { - }, - } + properties = {"retentionPolicy": {}} bucket = self._make_one(properties=properties) self.assertIsNone(bucket.retention_period) def test_retention_period_getter(self): period = 86400 * 100 # 100 days - properties = { - 'retentionPolicy': { - 'retentionPeriod': str(period), - }, - } + properties = {"retentionPolicy": {"retentionPeriod": str(period)}} bucket = self._make_one(properties=properties) self.assertEqual(bucket.retention_period, period) @@ -1597,14 +1459,11 @@ def test_retention_period_getter(self): def test_retention_period_setter_w_none(self): period = 86400 * 100 # 100 days bucket = self._make_one() - bucket._properties['retentionPolicy'] = { - 'retentionPeriod': period, - } + bucket._properties["retentionPolicy"] = {"retentionPeriod": period} bucket.retention_period = None - self.assertIsNone( - bucket._properties['retentionPolicy']) + self.assertIsNone(bucket._properties["retentionPolicy"]) def test_retention_period_setter_w_int(self): period = 86400 * 100 # 100 days @@ -1613,69 +1472,69 @@ def test_retention_period_setter_w_int(self): bucket.retention_period = period self.assertEqual( - bucket._properties['retentionPolicy']['retentionPeriod'], - str(period)) + bucket._properties["retentionPolicy"]["retentionPeriod"], str(period) + ) def test_self_link(self): - SELF_LINK = 'http://example.com/self/' - properties = {'selfLink': SELF_LINK} + SELF_LINK = "http://example.com/self/" + properties = {"selfLink": SELF_LINK} bucket = self._make_one(properties=properties) self.assertEqual(bucket.self_link, SELF_LINK) def test_storage_class_getter(self): - STORAGE_CLASS = 'http://example.com/self/' - properties = {'storageClass': STORAGE_CLASS} + STORAGE_CLASS = "http://example.com/self/" + properties = {"storageClass": STORAGE_CLASS} bucket = self._make_one(properties=properties) self.assertEqual(bucket.storage_class, STORAGE_CLASS) def test_storage_class_setter_invalid(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) with self.assertRaises(ValueError): - bucket.storage_class = 'BOGUS' - self.assertFalse('storageClass' in bucket._changes) + bucket.storage_class = "BOGUS" + self.assertFalse("storageClass" in bucket._changes) def test_storage_class_setter_STANDARD(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'STANDARD' - self.assertEqual(bucket.storage_class, 'STANDARD') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "STANDARD" + self.assertEqual(bucket.storage_class, "STANDARD") + self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_NEARLINE(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'NEARLINE' - self.assertEqual(bucket.storage_class, 'NEARLINE') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "NEARLINE" + self.assertEqual(bucket.storage_class, "NEARLINE") + self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_COLDLINE(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'COLDLINE' - self.assertEqual(bucket.storage_class, 'COLDLINE') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "COLDLINE" + self.assertEqual(bucket.storage_class, "COLDLINE") + self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_MULTI_REGIONAL(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'MULTI_REGIONAL' - self.assertEqual(bucket.storage_class, 'MULTI_REGIONAL') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "MULTI_REGIONAL" + self.assertEqual(bucket.storage_class, "MULTI_REGIONAL") + self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_REGIONAL(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'REGIONAL' - self.assertEqual(bucket.storage_class, 'REGIONAL') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "REGIONAL" + self.assertEqual(bucket.storage_class, "REGIONAL") + self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_DURABLE_REDUCED_AVAILABILITY(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = 'DURABLE_REDUCED_AVAILABILITY' - self.assertEqual(bucket.storage_class, 'DURABLE_REDUCED_AVAILABILITY') - self.assertTrue('storageClass' in bucket._changes) + bucket.storage_class = "DURABLE_REDUCED_AVAILABILITY" + self.assertEqual(bucket.storage_class, "DURABLE_REDUCED_AVAILABILITY") + self.assertTrue("storageClass" in bucket._changes) def test_time_created(self): from google.cloud._helpers import _RFC3339_MICROS @@ -1683,7 +1542,7 @@ def test_time_created(self): TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC) TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS) - properties = {'timeCreated': TIME_CREATED} + properties = {"timeCreated": TIME_CREATED} bucket = self._make_one(properties=properties) self.assertEqual(bucket.time_created, TIMESTAMP) @@ -1692,61 +1551,60 @@ def test_time_created_unset(self): self.assertIsNone(bucket.time_created) def test_versioning_enabled_getter_missing(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) self.assertEqual(bucket.versioning_enabled, False) def test_versioning_enabled_getter(self): - NAME = 'name' - before = {'versioning': {'enabled': True}} + NAME = "name" + before = {"versioning": {"enabled": True}} bucket = self._make_one(name=NAME, properties=before) self.assertEqual(bucket.versioning_enabled, True) def test_versioning_enabled_setter(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) self.assertFalse(bucket.versioning_enabled) bucket.versioning_enabled = True self.assertTrue(bucket.versioning_enabled) def test_requester_pays_getter_missing(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) self.assertEqual(bucket.requester_pays, False) def test_requester_pays_getter(self): - NAME = 'name' - before = {'billing': {'requesterPays': True}} + NAME = "name" + before = {"billing": {"requesterPays": True}} bucket = self._make_one(name=NAME, properties=before) self.assertEqual(bucket.requester_pays, True) def test_requester_pays_setter(self): - NAME = 'name' + NAME = "name" bucket = self._make_one(name=NAME) self.assertFalse(bucket.requester_pays) bucket.requester_pays = True self.assertTrue(bucket.requester_pays) def test_configure_website_defaults(self): - NAME = 'name' - UNSET = {'website': {'mainPageSuffix': None, - 'notFoundPage': None}} + NAME = "name" + UNSET = {"website": {"mainPageSuffix": None, "notFoundPage": None}} bucket = self._make_one(name=NAME) bucket.configure_website() self.assertEqual(bucket._properties, UNSET) def test_configure_website(self): - NAME = 'name' - WEBSITE_VAL = {'website': {'mainPageSuffix': 'html', - 'notFoundPage': '404.html'}} + NAME = "name" + WEBSITE_VAL = { + "website": {"mainPageSuffix": "html", "notFoundPage": "404.html"} + } bucket = self._make_one(name=NAME) - bucket.configure_website('html', '404.html') + bucket.configure_website("html", "404.html") self.assertEqual(bucket._properties, WEBSITE_VAL) def test_disable_website(self): - NAME = 'name' - UNSET = {'website': {'mainPageSuffix': None, - 'notFoundPage': None}} + NAME = "name" + UNSET = {"website": {"mainPageSuffix": None, "notFoundPage": None}} bucket = self._make_one(name=NAME) bucket.disable_website() self.assertEqual(bucket._properties, UNSET) @@ -1757,29 +1615,29 @@ def test_get_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.cloud.iam import Policy - NAME = 'name' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' + NAME = "name" + PATH = "/b/%s" % (NAME,) + ETAG = "DEADBEEF" VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + "resourceId": PATH, + "etag": ETAG, + "version": VERSION, + "bindings": [ + {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, ], } EXPECTED = { - binding['role']: set(binding['members']) - for binding in RETURNED['bindings']} + binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + } connection = _Connection(RETURNED) client = _Client(connection, None) bucket = self._make_one(client=client, name=NAME) @@ -1787,48 +1645,47 @@ def test_get_iam_policy(self): policy = bucket.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) + self.assertEqual(policy.etag, RETURNED["etag"]) + self.assertEqual(policy.version, RETURNED["version"]) self.assertEqual(dict(policy), EXPECTED) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {}) def test_get_iam_policy_w_user_project(self): from google.cloud.iam import Policy - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' + NAME = "name" + USER_PROJECT = "user-project-123" + PATH = "/b/%s" % (NAME,) + ETAG = "DEADBEEF" VERSION = 17 RETURNED = { - 'resourceId': PATH, - 'etag': ETAG, - 'version': VERSION, - 'bindings': [], + "resourceId": PATH, + "etag": ETAG, + "version": VERSION, + "bindings": [], } EXPECTED = {} connection = _Connection(RETURNED) client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) policy = bucket.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED['etag']) - self.assertEqual(policy.version, RETURNED['version']) + self.assertEqual(policy.etag, RETURNED["etag"]) + self.assertEqual(policy.version, RETURNED["version"]) self.assertEqual(dict(policy), EXPECTED) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) def test_set_iam_policy(self): import operator @@ -1837,29 +1694,25 @@ def test_set_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.cloud.iam import Policy - NAME = 'name' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' + NAME = "name" + PATH = "/b/%s" % (NAME,) + ETAG = "DEADBEEF" VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" BINDINGS = [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, ] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } + RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() for binding in BINDINGS: - policy[binding['role']] = binding['members'] + policy[binding["role"]] = binding["members"] connection = _Connection(RETURNED) client = _Client(connection, None) @@ -1873,19 +1726,18 @@ def test_set_iam_policy(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {}) - sent = kw[0]['data'] - self.assertEqual(sent['resourceId'], PATH) - self.assertEqual(len(sent['bindings']), len(BINDINGS)) - key = operator.itemgetter('role') + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {}) + sent = kw[0]["data"] + self.assertEqual(sent["resourceId"], PATH) + self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + key = operator.itemgetter("role") for found, expected in zip( - sorted(sent['bindings'], key=key), - sorted(BINDINGS, key=key)): - self.assertEqual(found['role'], expected['role']) - self.assertEqual( - sorted(found['members']), sorted(expected['members'])) + sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + ): + self.assertEqual(found["role"], expected["role"]) + self.assertEqual(sorted(found["members"]), sorted(expected["members"])) def test_set_iam_policy_w_user_project(self): import operator @@ -1894,35 +1746,30 @@ def test_set_iam_policy_w_user_project(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.cloud.iam import Policy - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) - ETAG = 'DEADBEEF' + NAME = "name" + USER_PROJECT = "user-project-123" + PATH = "/b/%s" % (NAME,) + ETAG = "DEADBEEF" VERSION = 17 - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' + OWNER1 = "user:phred@example.com" + OWNER2 = "group:cloud-logs@google.com" + EDITOR1 = "domain:google.com" + EDITOR2 = "user:phred@example.com" + VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" + VIEWER2 = "user:phred@example.com" BINDINGS = [ - {'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, ] - RETURNED = { - 'etag': ETAG, - 'version': VERSION, - 'bindings': BINDINGS, - } + RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() for binding in BINDINGS: - policy[binding['role']] = binding['members'] + policy[binding["role"]] = binding["members"] connection = _Connection(RETURNED) client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) returned = bucket.set_iam_policy(policy) @@ -1932,34 +1779,33 @@ def test_set_iam_policy_w_user_project(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PUT') - self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'userProject': USER_PROJECT}) - sent = kw[0]['data'] - self.assertEqual(sent['resourceId'], PATH) - self.assertEqual(len(sent['bindings']), len(BINDINGS)) - key = operator.itemgetter('role') + self.assertEqual(kw[0]["method"], "PUT") + self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) + sent = kw[0]["data"] + self.assertEqual(sent["resourceId"], PATH) + self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + key = operator.itemgetter("role") for found, expected in zip( - sorted(sent['bindings'], key=key), - sorted(BINDINGS, key=key)): - self.assertEqual(found['role'], expected['role']) - self.assertEqual( - sorted(found['members']), sorted(expected['members'])) + sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + ): + self.assertEqual(found["role"], expected["role"]) + self.assertEqual(sorted(found["members"]), sorted(expected["members"])) def test_test_iam_permissions(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = 'name' - PATH = '/b/%s' % (NAME,) + NAME = "name" + PATH = "/b/%s" % (NAME,) PERMISSIONS = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} + RETURNED = {"permissions": ALLOWED} connection = _Connection(RETURNED) client = _Client(connection, None) bucket = self._make_one(client=client, name=NAME) @@ -1970,29 +1816,28 @@ def test_test_iam_permissions(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) - self.assertEqual(kw[0]['query_params'], {'permissions': PERMISSIONS}) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) + self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) def test_test_iam_permissions_w_user_project(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = 'name' - USER_PROJECT = 'user-project-123' - PATH = '/b/%s' % (NAME,) + NAME = "name" + USER_PROJECT = "user-project-123" + PATH = "/b/%s" % (NAME,) PERMISSIONS = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] ALLOWED = PERMISSIONS[1:] - RETURNED = {'permissions': ALLOWED} + RETURNED = {"permissions": ALLOWED} connection = _Connection(RETURNED) client = _Client(connection, None) - bucket = self._make_one( - client=client, name=NAME, user_project=USER_PROJECT) + bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) allowed = bucket.test_iam_permissions(PERMISSIONS) @@ -2000,18 +1845,19 @@ def test_test_iam_permissions_w_user_project(self): kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,)) + self.assertEqual(kw[0]["method"], "GET") + self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) self.assertEqual( - kw[0]['query_params'], - {'permissions': PERMISSIONS, 'userProject': USER_PROJECT}) + kw[0]["query_params"], + {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + ) def test_make_public_defaults(self): from google.cloud.storage.acl import _ACLEntity - NAME = 'name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = {'acl': permissive, 'defaultObjectAcl': []} + NAME = "name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + after = {"acl": permissive, "defaultObjectAcl": []} connection = _Connection(after) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -2022,18 +1868,18 @@ def test_make_public_defaults(self): self.assertEqual(list(bucket.default_object_acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': after['acl']}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) def _make_public_w_future_helper(self, default_object_acl_loaded=True): from google.cloud.storage.acl import _ACLEntity - NAME = 'name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after1 = {'acl': permissive, 'defaultObjectAcl': []} - after2 = {'acl': permissive, 'defaultObjectAcl': permissive} + NAME = "name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + after1 = {"acl": permissive, "defaultObjectAcl": []} + after2 = {"acl": permissive, "defaultObjectAcl": permissive} if default_object_acl_loaded: num_requests = 2 connection = _Connection(after1, after2) @@ -2051,18 +1897,18 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): self.assertEqual(list(bucket.default_object_acl), permissive) kw = connection._requested self.assertEqual(len(kw), num_requests) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": permissive}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) if not default_object_acl_loaded: - self.assertEqual(kw[1]['method'], 'GET') - self.assertEqual(kw[1]['path'], '/b/%s/defaultObjectAcl' % NAME) + self.assertEqual(kw[1]["method"], "GET") + self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]['method'], 'PATCH') - self.assertEqual(kw[-1]['path'], '/b/%s' % NAME) - self.assertEqual(kw[-1]['data'], {'defaultObjectAcl': permissive}) - self.assertEqual(kw[-1]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[-1]["method"], "PATCH") + self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) + self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": permissive}) + self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) def test_make_public_w_future(self): self._make_public_w_future_helper(default_object_acl_loaded=True) @@ -2094,55 +1940,49 @@ def grant_read(self): self._granted = True def save(self, client=None): - _saved.append( - (self._bucket, self._name, self._granted, client)) + _saved.append((self._bucket, self._name, self._granted, client)) def item_to_blob(self, item): - return _Blob(self.bucket, item['name']) + return _Blob(self.bucket, item["name"]) - NAME = 'name' - BLOB_NAME = 'blob-name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = {'acl': permissive, 'defaultObjectAcl': []} - connection = _Connection(after, {'items': [{'name': BLOB_NAME}]}) + NAME = "name" + BLOB_NAME = "blob-name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + after = {"acl": permissive, "defaultObjectAcl": []} + connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch('google.cloud.storage.bucket._item_to_blob', - new=item_to_blob): + with mock.patch("google.cloud.storage.bucket._item_to_blob", new=item_to_blob): bucket.make_public(recursive=True) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)]) kw = connection._requested self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - self.assertEqual(kw[1]['method'], 'GET') - self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": permissive}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) + self.assertEqual(kw[1]["method"], "GET") + self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual(kw[1]['query_params'], - {'maxResults': max_results, 'projection': 'full'}) + self.assertEqual( + kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + ) def test_make_public_recursive_too_many(self): from google.cloud.storage.acl import _ACLEntity - PERMISSIVE = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - AFTER = {'acl': PERMISSIVE, 'defaultObjectAcl': []} + PERMISSIVE = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + AFTER = {"acl": PERMISSIVE, "defaultObjectAcl": []} - NAME = 'name' - BLOB_NAME1 = 'blob-name1' - BLOB_NAME2 = 'blob-name2' - GET_BLOBS_RESP = { - 'items': [ - {'name': BLOB_NAME1}, - {'name': BLOB_NAME2}, - ], - } + NAME = "name" + BLOB_NAME1 = "blob-name1" + BLOB_NAME2 = "blob-name2" + GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} connection = _Connection(AFTER, GET_BLOBS_RESP) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -2154,9 +1994,9 @@ def test_make_public_recursive_too_many(self): self.assertRaises(ValueError, bucket.make_public, recursive=True) def test_make_private_defaults(self): - NAME = 'name' + NAME = "name" no_permissions = [] - after = {'acl': no_permissions, 'defaultObjectAcl': []} + after = {"acl": no_permissions, "defaultObjectAcl": []} connection = _Connection(after) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -2167,16 +2007,16 @@ def test_make_private_defaults(self): self.assertEqual(list(bucket.default_object_acl), []) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': after['acl']}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) def _make_private_w_future_helper(self, default_object_acl_loaded=True): - NAME = 'name' + NAME = "name" no_permissions = [] - after1 = {'acl': no_permissions, 'defaultObjectAcl': []} - after2 = {'acl': no_permissions, 'defaultObjectAcl': no_permissions} + after1 = {"acl": no_permissions, "defaultObjectAcl": []} + after2 = {"acl": no_permissions, "defaultObjectAcl": no_permissions} if default_object_acl_loaded: num_requests = 2 connection = _Connection(after1, after2) @@ -2194,18 +2034,18 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True): self.assertEqual(list(bucket.default_object_acl), no_permissions) kw = connection._requested self.assertEqual(len(kw), num_requests) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': no_permissions}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": no_permissions}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) if not default_object_acl_loaded: - self.assertEqual(kw[1]['method'], 'GET') - self.assertEqual(kw[1]['path'], '/b/%s/defaultObjectAcl' % NAME) + self.assertEqual(kw[1]["method"], "GET") + self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]['method'], 'PATCH') - self.assertEqual(kw[-1]['path'], '/b/%s' % NAME) - self.assertEqual(kw[-1]['data'], {'defaultObjectAcl': no_permissions}) - self.assertEqual(kw[-1]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[-1]["method"], "PATCH") + self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) + self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": no_permissions}) + self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) def test_make_private_w_future(self): self._make_private_w_future_helper(default_object_acl_loaded=True) @@ -2235,53 +2075,47 @@ def revoke_read(self): self._granted = False def save(self, client=None): - _saved.append( - (self._bucket, self._name, self._granted, client)) + _saved.append((self._bucket, self._name, self._granted, client)) def item_to_blob(self, item): - return _Blob(self.bucket, item['name']) + return _Blob(self.bucket, item["name"]) - NAME = 'name' - BLOB_NAME = 'blob-name' + NAME = "name" + BLOB_NAME = "blob-name" no_permissions = [] - after = {'acl': no_permissions, 'defaultObjectAcl': []} - connection = _Connection(after, {'items': [{'name': BLOB_NAME}]}) + after = {"acl": no_permissions, "defaultObjectAcl": []} + connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch('google.cloud.storage.bucket._item_to_blob', - new=item_to_blob): + with mock.patch("google.cloud.storage.bucket._item_to_blob", new=item_to_blob): bucket.make_private(recursive=True) self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None)]) kw = connection._requested self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': no_permissions}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - self.assertEqual(kw[1]['method'], 'GET') - self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) + self.assertEqual(kw[0]["method"], "PATCH") + self.assertEqual(kw[0]["path"], "/b/%s" % NAME) + self.assertEqual(kw[0]["data"], {"acl": no_permissions}) + self.assertEqual(kw[0]["query_params"], {"projection": "full"}) + self.assertEqual(kw[1]["method"], "GET") + self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual(kw[1]['query_params'], - {'maxResults': max_results, 'projection': 'full'}) + self.assertEqual( + kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + ) def test_make_private_recursive_too_many(self): NO_PERMISSIONS = [] - AFTER = {'acl': NO_PERMISSIONS, 'defaultObjectAcl': []} - - NAME = 'name' - BLOB_NAME1 = 'blob-name1' - BLOB_NAME2 = 'blob-name2' - GET_BLOBS_RESP = { - 'items': [ - {'name': BLOB_NAME1}, - {'name': BLOB_NAME2}, - ], - } + AFTER = {"acl": NO_PERMISSIONS, "defaultObjectAcl": []} + + NAME = "name" + BLOB_NAME1 = "blob-name1" + BLOB_NAME2 = "blob-name2" + GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} connection = _Connection(AFTER, GET_BLOBS_RESP) client = _Client(connection) bucket = self._make_one(client=client, name=NAME) @@ -2297,7 +2131,7 @@ def test_page_empty_response(self): connection = _Connection() client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) iterator = bucket.list_blobs() page = page_iterator.Page(iterator, (), None) @@ -2310,11 +2144,11 @@ def test_page_non_empty_response(self): import six from google.cloud.storage.blob import Blob - blob_name = 'blob-name' - response = {'items': [{'name': blob_name}], 'prefixes': ['foo']} + blob_name = "blob-name" + response = {"items": [{"name": blob_name}], "prefixes": ["foo"]} connection = _Connection() client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) def dummy_response(): @@ -2324,31 +2158,28 @@ def dummy_response(): iterator._get_next_page_response = dummy_response page = six.next(iterator.pages) - self.assertEqual(page.prefixes, ('foo',)) + self.assertEqual(page.prefixes, ("foo",)) self.assertEqual(page.num_items, 1) blob = six.next(page) self.assertEqual(page.remaining, 0) self.assertIsInstance(blob, Blob) self.assertEqual(blob.name, blob_name) - self.assertEqual(iterator.prefixes, set(['foo'])) + self.assertEqual(iterator.prefixes, set(["foo"])) def test_cumulative_prefixes(self): import six from google.cloud.storage.blob import Blob - BLOB_NAME = 'blob-name1' + BLOB_NAME = "blob-name1" response1 = { - 'items': [{'name': BLOB_NAME}], - 'prefixes': ['foo'], - 'nextPageToken': 's39rmf9', - } - response2 = { - 'items': [], - 'prefixes': ['bar'], + "items": [{"name": BLOB_NAME}], + "prefixes": ["foo"], + "nextPageToken": "s39rmf9", } + response2 = {"items": [], "prefixes": ["bar"]} connection = _Connection() client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) responses = [response1, response2] @@ -2361,18 +2192,18 @@ def dummy_response(): # Parse first response. pages_iter = iterator.pages page1 = six.next(pages_iter) - self.assertEqual(page1.prefixes, ('foo',)) + self.assertEqual(page1.prefixes, ("foo",)) self.assertEqual(page1.num_items, 1) blob = six.next(page1) self.assertEqual(page1.remaining, 0) self.assertIsInstance(blob, Blob) self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(iterator.prefixes, set(['foo'])) + self.assertEqual(iterator.prefixes, set(["foo"])) # Parse second response. page2 = six.next(pages_iter) - self.assertEqual(page2.prefixes, ('bar',)) + self.assertEqual(page2.prefixes, ("bar",)) self.assertEqual(page2.num_items, 0) - self.assertEqual(iterator.prefixes, set(['foo', 'bar'])) + self.assertEqual(iterator.prefixes, set(["foo", "bar"])) def _test_generate_upload_policy_helper(self, **kwargs): import base64 @@ -2380,71 +2211,68 @@ def _test_generate_upload_policy_helper(self, **kwargs): credentials = _create_signing_credentials() credentials.signer_email = mock.sentinel.signer_email - credentials.sign_bytes.return_value = b'DEADBEEF' + credentials.sign_bytes.return_value = b"DEADBEEF" connection = _Connection() connection.credentials = credentials client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) - conditions = [ - ['starts-with', '$key', '']] + conditions = [["starts-with", "$key", ""]] policy_fields = bucket.generate_upload_policy(conditions, **kwargs) - self.assertEqual(policy_fields['bucket'], bucket.name) + self.assertEqual(policy_fields["bucket"], bucket.name) + self.assertEqual(policy_fields["GoogleAccessId"], mock.sentinel.signer_email) self.assertEqual( - policy_fields['GoogleAccessId'], mock.sentinel.signer_email) - self.assertEqual( - policy_fields['signature'], - base64.b64encode(b'DEADBEEF').decode('utf-8')) + policy_fields["signature"], base64.b64encode(b"DEADBEEF").decode("utf-8") + ) - policy = json.loads( - base64.b64decode(policy_fields['policy']).decode('utf-8')) + policy = json.loads(base64.b64decode(policy_fields["policy"]).decode("utf-8")) - policy_conditions = policy['conditions'] - expected_conditions = [{'bucket': bucket.name}] + conditions + policy_conditions = policy["conditions"] + expected_conditions = [{"bucket": bucket.name}] + conditions for expected_condition in expected_conditions: for condition in policy_conditions: if condition == expected_condition: break else: # pragma: NO COVER - self.fail('Condition {} not found in {}'.format( - expected_condition, policy_conditions)) + self.fail( + "Condition {} not found in {}".format( + expected_condition, policy_conditions + ) + ) return policy_fields, policy @mock.patch( - 'google.cloud.storage.bucket._NOW', - return_value=datetime.datetime(1990, 1, 1)) + "google.cloud.storage.bucket._NOW", return_value=datetime.datetime(1990, 1, 1) + ) def test_generate_upload_policy(self, now): from google.cloud._helpers import _datetime_to_rfc3339 _, policy = self._test_generate_upload_policy_helper() self.assertEqual( - policy['expiration'], - _datetime_to_rfc3339( - now() + datetime.timedelta(hours=1))) + policy["expiration"], + _datetime_to_rfc3339(now() + datetime.timedelta(hours=1)), + ) def test_generate_upload_policy_args(self): from google.cloud._helpers import _datetime_to_rfc3339 expiration = datetime.datetime(1990, 5, 29) - _, policy = self._test_generate_upload_policy_helper( - expiration=expiration) + _, policy = self._test_generate_upload_policy_helper(expiration=expiration) - self.assertEqual( - policy['expiration'], - _datetime_to_rfc3339(expiration)) + self.assertEqual(policy["expiration"], _datetime_to_rfc3339(expiration)) def test_generate_upload_policy_bad_credentials(self): credentials = object() connection = _Connection() connection.credentials = credentials client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) with self.assertRaises(AttributeError): @@ -2455,9 +2283,9 @@ def test_lock_retention_policy_no_policy_set(self): connection = _Connection() connection.credentials = credentials client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) - bucket._properties['metageneration'] = 1234 + bucket._properties["metageneration"] = 1234 with self.assertRaises(ValueError): bucket.lock_retention_policy() @@ -2467,11 +2295,11 @@ def test_lock_retention_policy_no_metageneration(self): connection = _Connection() connection.credentials = credentials client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) - bucket._properties['retentionPolicy'] = { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'retentionPeriod': 86400 * 100, # 100 days + bucket._properties["retentionPolicy"] = { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "retentionPeriod": 86400 * 100, # 100 days } with self.assertRaises(ValueError): @@ -2482,27 +2310,27 @@ def test_lock_retention_policy_already_locked(self): connection = _Connection() connection.credentials = credentials client = _Client(connection) - name = 'name' + name = "name" bucket = self._make_one(client=client, name=name) - bucket._properties['metageneration'] = 1234 - bucket._properties['retentionPolicy'] = { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'isLocked': True, - 'retentionPeriod': 86400 * 100, # 100 days + bucket._properties["metageneration"] = 1234 + bucket._properties["retentionPolicy"] = { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "isLocked": True, + "retentionPeriod": 86400 * 100, # 100 days } with self.assertRaises(ValueError): bucket.lock_retention_policy() def test_lock_retention_policy_ok(self): - name = 'name' + name = "name" response = { - 'name': name, - 'metageneration': 1235, - 'retentionPolicy': { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'isLocked': True, - 'retentionPeriod': 86400 * 100, # 100 days + "name": name, + "metageneration": 1235, + "retentionPolicy": { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "isLocked": True, + "retentionPeriod": 86400 * 100, # 100 days }, } credentials = object() @@ -2510,53 +2338,51 @@ def test_lock_retention_policy_ok(self): connection.credentials = credentials client = _Client(connection) bucket = self._make_one(client=client, name=name) - bucket._properties['metageneration'] = 1234 - bucket._properties['retentionPolicy'] = { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'retentionPeriod': 86400 * 100, # 100 days + bucket._properties["metageneration"] = 1234 + bucket._properties["retentionPolicy"] = { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "retentionPeriod": 86400 * 100, # 100 days } bucket.lock_retention_policy() kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b/{}/lockRetentionPolicy'.format(name)) - self.assertEqual(kw['query_params'], {'ifMetagenerationMatch': 1234}) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) + self.assertEqual(kw["query_params"], {"ifMetagenerationMatch": 1234}) def test_lock_retention_policy_w_user_project(self): - name = 'name' - user_project = 'user-project-123' + name = "name" + user_project = "user-project-123" response = { - 'name': name, - 'metageneration': 1235, - 'retentionPolicy': { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'isLocked': True, - 'retentionPeriod': 86400 * 100, # 100 days + "name": name, + "metageneration": 1235, + "retentionPolicy": { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "isLocked": True, + "retentionPeriod": 86400 * 100, # 100 days }, } credentials = object() connection = _Connection(response) connection.credentials = credentials client = _Client(connection) - bucket = self._make_one( - client=client, name=name, user_project=user_project) - bucket._properties['metageneration'] = 1234 - bucket._properties['retentionPolicy'] = { - 'effectiveTime': '2018-03-01T16:46:27.123456Z', - 'retentionPeriod': 86400 * 100, # 100 days + bucket = self._make_one(client=client, name=name, user_project=user_project) + bucket._properties["metageneration"] = 1234 + bucket._properties["retentionPolicy"] = { + "effectiveTime": "2018-03-01T16:46:27.123456Z", + "retentionPeriod": 86400 * 100, # 100 days } bucket.lock_retention_policy() kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b/{}/lockRetentionPolicy'.format(name)) + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) self.assertEqual( - kw['query_params'], { - 'ifMetagenerationMatch': 1234, - 'userProject': user_project, - }) + kw["query_params"], + {"ifMetagenerationMatch": 1234, "userProject": user_project}, + ) class _Connection(object): @@ -2571,32 +2397,31 @@ def __init__(self, *responses): @staticmethod def _is_bucket_path(path): # Now just ensure the path only has /b/ and one more segment. - return path.startswith('/b/') and path.count('/') == 2 + return path.startswith("/b/") and path.count("/") == 2 def api_request(self, **kw): from google.cloud.exceptions import NotFound self._requested.append(kw) - method = kw.get('method') - path = kw.get('path', '') - if method == 'DELETE' and self._is_bucket_path(path): + method = kw.get("method") + path = kw.get("path", "") + if method == "DELETE" and self._is_bucket_path(path): self._deleted_buckets.append(kw) if self._delete_bucket: return else: - raise NotFound('miss') + raise NotFound("miss") try: response, self._responses = self._responses[0], self._responses[1:] except IndexError: - raise NotFound('miss') + raise NotFound("miss") else: return response class _Client(object): - def __init__(self, connection, project=None): self._connection = connection self._base_connection = connection diff --git a/storage/tests/unit/test_client.py b/storage/tests/unit/test_client.py index 16808ec9b4e1..d4afaf0b35bf 100644 --- a/storage/tests/unit/test_client.py +++ b/storage/tests/unit/test_client.py @@ -26,7 +26,7 @@ def _make_credentials(): return mock.Mock(spec=google.auth.credentials.Credentials) -def _make_response(status=http_client.OK, content=b'', headers={}): +def _make_response(status=http_client.OK, content=b"", headers={}): response = requests.Response() response.status_code = status response._content = content @@ -37,11 +37,10 @@ def _make_response(status=http_client.OK, content=b'', headers={}): def _make_json_response(data, status=http_client.OK, headers=None): headers = headers or {} - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" return _make_response( - status=status, - content=json.dumps(data).encode('utf-8'), - headers=headers) + status=status, content=json.dumps(data).encode("utf-8"), headers=headers + ) def _make_requests_session(responses): @@ -51,7 +50,6 @@ def _make_requests_session(responses): class TestClient(unittest.TestCase): - @staticmethod def _get_target_class(): from google.cloud.storage.client import Client @@ -64,7 +62,7 @@ def _make_one(self, *args, **kw): def test_ctor_connection_type(self): from google.cloud.storage._http import Connection - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -78,12 +76,12 @@ def test_ctor_connection_type(self): def test_ctor_wo_project(self): from google.cloud.storage._http import Connection - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() ddp_patch = mock.patch( - 'google.cloud.client._determine_default_project', - return_value=PROJECT) + "google.cloud.client._determine_default_project", return_value=PROJECT + ) with ddp_patch: client = self._make_one(credentials=CREDENTIALS) @@ -116,13 +114,12 @@ def test_create_anonymous_client(self): self.assertIsNone(client.project) self.assertIsInstance(client._connection, Connection) - self.assertIsInstance( - client._connection.credentials, AnonymousCredentials) + self.assertIsInstance(client._connection.credentials, AnonymousCredentials) def test__push_batch_and__pop_batch(self): from google.cloud.storage.batch import Batch - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -141,7 +138,7 @@ def test__push_batch_and__pop_batch(self): self.assertEqual(list(client._batch_stack), []) def test__connection_setter(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) client._base_connection = None # Unset the value from the constructor @@ -149,13 +146,13 @@ def test__connection_setter(self): self.assertIs(client._base_connection, connection) def test__connection_setter_when_set(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - self.assertRaises(ValueError, setattr, client, '_connection', None) + self.assertRaises(ValueError, setattr, client, "_connection", None) def test__connection_getter_no_batch(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) self.assertIs(client._connection, client._base_connection) @@ -164,7 +161,7 @@ def test__connection_getter_no_batch(self): def test__connection_getter_with_batch(self): from google.cloud.storage.batch import Batch - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) batch = Batch(client) @@ -174,65 +171,62 @@ def test__connection_getter_with_batch(self): self.assertIs(client.current_batch, batch) def test_get_service_account_email_wo_project(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() - EMAIL = 'storage-user-123@example.com' - RESOURCE = { - 'kind': 'storage#serviceAccount', - 'email_address': EMAIL, - } + EMAIL = "storage-user-123@example.com" + RESOURCE = {"kind": "storage#serviceAccount", "email_address": EMAIL} client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - http = _make_requests_session([ - _make_json_response(RESOURCE)]) + http = _make_requests_session([_make_json_response(RESOURCE)]) client._http_internal = http service_account_email = client.get_service_account_email() self.assertEqual(service_account_email, EMAIL) - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'projects/%s/serviceAccount' % (PROJECT,) - ]) + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "projects/%s/serviceAccount" % (PROJECT,), + ] + ) http.request.assert_called_once_with( - method='GET', url=URI, data=None, headers=mock.ANY) + method="GET", url=URI, data=None, headers=mock.ANY + ) def test_get_service_account_email_w_project(self): - PROJECT = 'PROJECT' - OTHER_PROJECT = 'OTHER_PROJECT' + PROJECT = "PROJECT" + OTHER_PROJECT = "OTHER_PROJECT" CREDENTIALS = _make_credentials() - EMAIL = 'storage-user-123@example.com' - RESOURCE = { - 'kind': 'storage#serviceAccount', - 'email_address': EMAIL, - } + EMAIL = "storage-user-123@example.com" + RESOURCE = {"kind": "storage#serviceAccount", "email_address": EMAIL} client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - http = _make_requests_session([ - _make_json_response(RESOURCE)]) + http = _make_requests_session([_make_json_response(RESOURCE)]) client._http_internal = http - service_account_email = client.get_service_account_email( - project=OTHER_PROJECT) + service_account_email = client.get_service_account_email(project=OTHER_PROJECT) self.assertEqual(service_account_email, EMAIL) - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'projects/%s/serviceAccount' % (OTHER_PROJECT,) - ]) + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "projects/%s/serviceAccount" % (OTHER_PROJECT,), + ] + ) http.request.assert_called_once_with( - method='GET', url=URI, data=None, headers=mock.ANY) + method="GET", url=URI, data=None, headers=mock.ANY + ) def test_bucket(self): from google.cloud.storage.bucket import Bucket - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() - BUCKET_NAME = 'BUCKET_NAME' + BUCKET_NAME = "BUCKET_NAME" client = self._make_one(project=PROJECT, credentials=CREDENTIALS) bucket = client.bucket(BUCKET_NAME) @@ -244,10 +238,10 @@ def test_bucket(self): def test_bucket_w_user_project(self): from google.cloud.storage.bucket import Bucket - PROJECT = 'PROJECT' - USER_PROJECT = 'USER_PROJECT' + PROJECT = "PROJECT" + USER_PROJECT = "USER_PROJECT" CREDENTIALS = _make_credentials() - BUCKET_NAME = 'BUCKET_NAME' + BUCKET_NAME = "BUCKET_NAME" client = self._make_one(project=PROJECT, credentials=CREDENTIALS) bucket = client.bucket(BUCKET_NAME, user_project=USER_PROJECT) @@ -259,7 +253,7 @@ def test_bucket_w_user_project(self): def test_batch(self): from google.cloud.storage.batch import Batch - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -270,45 +264,51 @@ def test_batch(self): def test_get_bucket_miss(self): from google.cloud.exceptions import NotFound - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - NONESUCH = 'nonesuch' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - 'nonesuch?projection=noAcl', - ]) - http = _make_requests_session([ - _make_json_response({}, status=http_client.NOT_FOUND)]) + NONESUCH = "nonesuch" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + "nonesuch?projection=noAcl", + ] + ) + http = _make_requests_session( + [_make_json_response({}, status=http_client.NOT_FOUND)] + ) client._http_internal = http with self.assertRaises(NotFound): client.get_bucket(NONESUCH) http.request.assert_called_once_with( - method='GET', url=URI, data=mock.ANY, headers=mock.ANY) + method="GET", url=URI, data=mock.ANY, headers=mock.ANY + ) def test_get_bucket_hit(self): from google.cloud.storage.bucket import Bucket - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - '%s?projection=noAcl' % (BUCKET_NAME,), - ]) + BUCKET_NAME = "bucket-name" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + "%s?projection=noAcl" % (BUCKET_NAME,), + ] + ) - data = {'name': BUCKET_NAME} + data = {"name": BUCKET_NAME} http = _make_requests_session([_make_json_response(data)]) client._http_internal = http @@ -317,47 +317,54 @@ def test_get_bucket_hit(self): self.assertIsInstance(bucket, Bucket) self.assertEqual(bucket.name, BUCKET_NAME) http.request.assert_called_once_with( - method='GET', url=URI, data=mock.ANY, headers=mock.ANY) + method="GET", url=URI, data=mock.ANY, headers=mock.ANY + ) def test_lookup_bucket_miss(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - NONESUCH = 'nonesuch' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - 'nonesuch?projection=noAcl', - ]) - http = _make_requests_session([ - _make_json_response({}, status=http_client.NOT_FOUND)]) + NONESUCH = "nonesuch" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + "nonesuch?projection=noAcl", + ] + ) + http = _make_requests_session( + [_make_json_response({}, status=http_client.NOT_FOUND)] + ) client._http_internal = http bucket = client.lookup_bucket(NONESUCH) self.assertIsNone(bucket) http.request.assert_called_once_with( - method='GET', url=URI, data=mock.ANY, headers=mock.ANY) + method="GET", url=URI, data=mock.ANY, headers=mock.ANY + ) def test_lookup_bucket_hit(self): from google.cloud.storage.bucket import Bucket - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - '%s?projection=noAcl' % (BUCKET_NAME,), - ]) - data = {'name': BUCKET_NAME} + BUCKET_NAME = "bucket-name" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + "%s?projection=noAcl" % (BUCKET_NAME,), + ] + ) + data = {"name": BUCKET_NAME} http = _make_requests_session([_make_json_response(data)]) client._http_internal = http @@ -366,52 +373,59 @@ def test_lookup_bucket_hit(self): self.assertIsInstance(bucket, Bucket) self.assertEqual(bucket.name, BUCKET_NAME) http.request.assert_called_once_with( - method='GET', url=URI, data=mock.ANY, headers=mock.ANY) + method="GET", url=URI, data=mock.ANY, headers=mock.ANY + ) def test_create_bucket_conflict(self): from google.cloud.exceptions import Conflict - PROJECT = 'PROJECT' - OTHER_PROJECT = 'OTHER_PROJECT' + PROJECT = "PROJECT" + OTHER_PROJECT = "OTHER_PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b?project=%s' % (OTHER_PROJECT,), - ]) - data = {'error': {'message': 'Conflict'}} - sent = {'name': BUCKET_NAME} - http = _make_requests_session([ - _make_json_response(data, status=http_client.CONFLICT)]) + BUCKET_NAME = "bucket-name" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b?project=%s" % (OTHER_PROJECT,), + ] + ) + data = {"error": {"message": "Conflict"}} + sent = {"name": BUCKET_NAME} + http = _make_requests_session( + [_make_json_response(data, status=http_client.CONFLICT)] + ) client._http_internal = http with self.assertRaises(Conflict): client.create_bucket(BUCKET_NAME, project=OTHER_PROJECT) http.request.assert_called_once_with( - method='POST', url=URI, data=mock.ANY, headers=mock.ANY) - json_sent = http.request.call_args_list[0][1]['data'] + method="POST", url=URI, data=mock.ANY, headers=mock.ANY + ) + json_sent = http.request.call_args_list[0][1]["data"] self.assertEqual(sent, json.loads(json_sent)) def test_create_bucket_success(self): from google.cloud.storage.bucket import Bucket - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' - URI = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b?project=%s' % (PROJECT,), - ]) - sent = {'name': BUCKET_NAME, 'billing': {'requesterPays': True}} + BUCKET_NAME = "bucket-name" + URI = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b?project=%s" % (PROJECT,), + ] + ) + sent = {"name": BUCKET_NAME, "billing": {"requesterPays": True}} data = sent http = _make_requests_session([_make_json_response(data)]) client._http_internal = http @@ -422,8 +436,9 @@ def test_create_bucket_success(self): self.assertEqual(bucket.name, BUCKET_NAME) self.assertTrue(bucket.requester_pays) http.request.assert_called_once_with( - method='POST', url=URI, data=mock.ANY, headers=mock.ANY) - json_sent = http.request.call_args_list[0][1]['data'] + method="POST", url=URI, data=mock.ANY, headers=mock.ANY + ) + json_sent = http.request.call_args_list[0][1]["data"] self.assertEqual(sent, json.loads(json_sent)) def test_list_buckets_wo_project(self): @@ -437,7 +452,7 @@ def test_list_buckets_empty(self): from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlparse - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -449,21 +464,21 @@ def test_list_buckets_empty(self): self.assertEqual(len(buckets), 0) http.request.assert_called_once_with( - method='GET', url=mock.ANY, data=mock.ANY, headers=mock.ANY) - - requested_url = http.request.mock_calls[0][2]['url'] - expected_base_url = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - ]) + method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY + ) + + requested_url = http.request.mock_calls[0][2]["url"] + expected_base_url = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + ] + ) self.assertTrue(requested_url.startswith(expected_base_url)) - expected_query = { - 'project': [PROJECT], - 'projection': ['noAcl'], - } + expected_query = {"project": [PROJECT], "projection": ["noAcl"]} uri_parts = urlparse(requested_url) self.assertEqual(parse_qs(uri_parts.query), expected_query) @@ -471,8 +486,8 @@ def test_list_buckets_explicit_project(self): from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlparse - PROJECT = 'PROJECT' - OTHER_PROJECT = 'OTHER_PROJECT' + PROJECT = "PROJECT" + OTHER_PROJECT = "OTHER_PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -484,32 +499,32 @@ def test_list_buckets_explicit_project(self): self.assertEqual(len(buckets), 0) http.request.assert_called_once_with( - method='GET', url=mock.ANY, data=mock.ANY, headers=mock.ANY) - - requested_url = http.request.mock_calls[0][2]['url'] - expected_base_url = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - ]) + method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY + ) + + requested_url = http.request.mock_calls[0][2]["url"] + expected_base_url = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + ] + ) self.assertTrue(requested_url.startswith(expected_base_url)) - expected_query = { - 'project': [OTHER_PROJECT], - 'projection': ['noAcl'], - } + expected_query = {"project": [OTHER_PROJECT], "projection": ["noAcl"]} uri_parts = urlparse(requested_url) self.assertEqual(parse_qs(uri_parts.query), expected_query) def test_list_buckets_non_empty(self): - PROJECT = 'PROJECT' + PROJECT = "PROJECT" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = 'bucket-name' + BUCKET_NAME = "bucket-name" - data = {'items': [{'name': BUCKET_NAME}]} + data = {"items": [{"name": BUCKET_NAME}]} http = _make_requests_session([_make_json_response(data)]) client._http_internal = http @@ -519,23 +534,24 @@ def test_list_buckets_non_empty(self): self.assertEqual(buckets[0].name, BUCKET_NAME) http.request.assert_called_once_with( - method='GET', url=mock.ANY, data=mock.ANY, headers=mock.ANY) + method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY + ) def test_list_buckets_all_arguments(self): from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlparse - PROJECT = 'foo-bar' + PROJECT = "foo-bar" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) MAX_RESULTS = 10 - PAGE_TOKEN = 'ABCD' - PREFIX = 'subfolder' - PROJECTION = 'full' - FIELDS = 'items/id,nextPageToken' + PAGE_TOKEN = "ABCD" + PREFIX = "subfolder" + PROJECTION = "full" + FIELDS = "items/id,nextPageToken" - data = {'items': []} + data = {"items": []} http = _make_requests_session([_make_json_response(data)]) client._http_internal = http iterator = client.list_buckets( @@ -548,24 +564,27 @@ def test_list_buckets_all_arguments(self): buckets = list(iterator) self.assertEqual(buckets, []) http.request.assert_called_once_with( - method='GET', url=mock.ANY, data=mock.ANY, headers=mock.ANY) - - requested_url = http.request.mock_calls[0][2]['url'] - expected_base_url = '/'.join([ - client._connection.API_BASE_URL, - 'storage', - client._connection.API_VERSION, - 'b', - ]) + method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY + ) + + requested_url = http.request.mock_calls[0][2]["url"] + expected_base_url = "/".join( + [ + client._connection.API_BASE_URL, + "storage", + client._connection.API_VERSION, + "b", + ] + ) self.assertTrue(requested_url.startswith(expected_base_url)) expected_query = { - 'project': [PROJECT], - 'maxResults': [str(MAX_RESULTS)], - 'pageToken': [PAGE_TOKEN], - 'prefix': [PREFIX], - 'projection': [PROJECTION], - 'fields': [FIELDS], + "project": [PROJECT], + "maxResults": [str(MAX_RESULTS)], + "pageToken": [PAGE_TOKEN], + "prefix": [PREFIX], + "projection": [PROJECTION], + "fields": [FIELDS], } uri_parts = urlparse(requested_url) self.assertEqual(parse_qs(uri_parts.query), expected_query) @@ -573,7 +592,7 @@ def test_list_buckets_all_arguments(self): def test_page_empty_response(self): from google.api_core import page_iterator - project = 'PROJECT' + project = "PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) iterator = client.list_buckets() @@ -585,12 +604,12 @@ def test_page_non_empty_response(self): import six from google.cloud.storage.bucket import Bucket - project = 'PROJECT' + project = "PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - blob_name = 'bucket-name' - response = {'items': [{'name': blob_name}]} + blob_name = "bucket-name" + response = {"items": [{"name": blob_name}]} def dummy_response(): return response diff --git a/storage/tests/unit/test_notification.py b/storage/tests/unit/test_notification.py index 9004decc94dc..29b376b57496 100644 --- a/storage/tests/unit/test_notification.py +++ b/storage/tests/unit/test_notification.py @@ -19,37 +19,35 @@ class TestBucketNotification(unittest.TestCase): - BUCKET_NAME = 'test-bucket' - BUCKET_PROJECT = 'bucket-project-123' - TOPIC_NAME = 'test-topic' - TOPIC_ALT_PROJECT = 'topic-project-456' - TOPIC_REF_FMT = '//pubsub.googleapis.com/projects/{}/topics/{}' + BUCKET_NAME = "test-bucket" + BUCKET_PROJECT = "bucket-project-123" + TOPIC_NAME = "test-topic" + TOPIC_ALT_PROJECT = "topic-project-456" + TOPIC_REF_FMT = "//pubsub.googleapis.com/projects/{}/topics/{}" TOPIC_REF = TOPIC_REF_FMT.format(BUCKET_PROJECT, TOPIC_NAME) TOPIC_ALT_REF = TOPIC_REF_FMT.format(TOPIC_ALT_PROJECT, TOPIC_NAME) - CUSTOM_ATTRIBUTES = { - 'attr1': 'value1', - 'attr2': 'value2', - } - BLOB_NAME_PREFIX = 'blob-name-prefix/' - NOTIFICATION_ID = '123' - SELF_LINK = 'https://example.com/notification/123' - ETAG = 'DEADBEEF' - CREATE_PATH = '/b/{}/notificationConfigs'.format(BUCKET_NAME) - NOTIFICATION_PATH = '/b/{}/notificationConfigs/{}'.format( - BUCKET_NAME, NOTIFICATION_ID) + CUSTOM_ATTRIBUTES = {"attr1": "value1", "attr2": "value2"} + BLOB_NAME_PREFIX = "blob-name-prefix/" + NOTIFICATION_ID = "123" + SELF_LINK = "https://example.com/notification/123" + ETAG = "DEADBEEF" + CREATE_PATH = "/b/{}/notificationConfigs".format(BUCKET_NAME) + NOTIFICATION_PATH = "/b/{}/notificationConfigs/{}".format( + BUCKET_NAME, NOTIFICATION_ID + ) @staticmethod def event_types(): from google.cloud.storage.notification import ( OBJECT_FINALIZE_EVENT_TYPE, - OBJECT_DELETE_EVENT_TYPE) + OBJECT_DELETE_EVENT_TYPE, + ) return [OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE] @staticmethod def payload_format(): - from google.cloud.storage.notification import ( - JSON_API_V1_PAYLOAD_FORMAT) + from google.cloud.storage.notification import JSON_API_V1_PAYLOAD_FORMAT return JSON_API_V1_PAYLOAD_FORMAT @@ -68,7 +66,7 @@ def _make_client(self, project=BUCKET_PROJECT): return mock.Mock(project=project, spec=Client) def _make_bucket(self, client, name=BUCKET_NAME, user_project=None): - bucket = mock.Mock(spec=['client', 'name', 'user_project']) + bucket = mock.Mock(spec=["client", "name", "user_project"]) bucket.client = client bucket.name = name bucket.user_project = user_project @@ -87,8 +85,7 @@ def test_ctor_defaults(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) self.assertIs(notification.bucket, bucket) self.assertEqual(notification.topic_name, self.TOPIC_NAME) @@ -103,7 +100,8 @@ def test_ctor_explicit(self): bucket = self._make_bucket(client) notification = self._make_one( - bucket, self.TOPIC_NAME, + bucket, + self.TOPIC_NAME, topic_project=self.TOPIC_ALT_PROJECT, custom_attributes=self.CUSTOM_ATTRIBUTES, event_types=self.event_types(), @@ -114,12 +112,10 @@ def test_ctor_explicit(self): self.assertIs(notification.bucket, bucket) self.assertEqual(notification.topic_name, self.TOPIC_NAME) self.assertEqual(notification.topic_project, self.TOPIC_ALT_PROJECT) - self.assertEqual( - notification.custom_attributes, self.CUSTOM_ATTRIBUTES) + self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) self.assertEqual(notification.blob_name_prefix, self.BLOB_NAME_PREFIX) - self.assertEqual( - notification.payload_format, self.payload_format()) + self.assertEqual(notification.payload_format, self.payload_format()) def test_from_api_repr_no_topic(self): klass = self._get_target_class() @@ -134,9 +130,7 @@ def test_from_api_repr_invalid_topic(self): klass = self._get_target_class() client = self._make_client() bucket = self._make_bucket(client) - resource = { - 'topic': '@#$%', - } + resource = {"topic": "@#$%"} with self.assertRaises(ValueError): klass.from_api_repr(resource, bucket=bucket) @@ -148,11 +142,11 @@ def test_from_api_repr_minimal(self): client = self._make_client() bucket = self._make_bucket(client) resource = { - 'topic': self.TOPIC_REF, - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, - 'payload_format': NONE_PAYLOAD_FORMAT, + "topic": self.TOPIC_REF, + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, + "payload_format": NONE_PAYLOAD_FORMAT, } notification = klass.from_api_repr(resource, bucket=bucket) @@ -172,14 +166,14 @@ def test_from_api_repr_explicit(self): client = self._make_client() bucket = self._make_bucket(client) resource = { - 'topic': self.TOPIC_ALT_REF, - 'custom_attributes': self.CUSTOM_ATTRIBUTES, - 'event_types': self.event_types(), - 'object_name_prefix': self.BLOB_NAME_PREFIX, - 'payload_format': self.payload_format(), - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, + "topic": self.TOPIC_ALT_REF, + "custom_attributes": self.CUSTOM_ATTRIBUTES, + "event_types": self.event_types(), + "object_name_prefix": self.BLOB_NAME_PREFIX, + "payload_format": self.payload_format(), + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, } notification = klass.from_api_repr(resource, bucket=bucket) @@ -187,12 +181,10 @@ def test_from_api_repr_explicit(self): self.assertIs(notification.bucket, bucket) self.assertEqual(notification.topic_name, self.TOPIC_NAME) self.assertEqual(notification.topic_project, self.TOPIC_ALT_PROJECT) - self.assertEqual( - notification.custom_attributes, self.CUSTOM_ATTRIBUTES) + self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) self.assertEqual(notification.blob_name_prefix, self.BLOB_NAME_PREFIX) - self.assertEqual( - notification.payload_format, self.payload_format()) + self.assertEqual(notification.payload_format, self.payload_format()) self.assertEqual(notification.notification_id, self.NOTIFICATION_ID) self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) @@ -201,44 +193,40 @@ def test_notification_id(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) self.assertIsNone(notification.notification_id) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID self.assertEqual(notification.notification_id, self.NOTIFICATION_ID) def test_etag(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) self.assertIsNone(notification.etag) - notification._properties['etag'] = self.ETAG + notification._properties["etag"] = self.ETAG self.assertEqual(notification.etag, self.ETAG) def test_self_link(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) self.assertIsNone(notification.self_link) - notification._properties['selfLink'] = self.SELF_LINK + notification._properties["selfLink"] = self.SELF_LINK self.assertEqual(notification.self_link, self.SELF_LINK) def test_create_w_existing_notification_id(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification = self._make_one(bucket, self.TOPIC_NAME) + notification._properties["id"] = self.NOTIFICATION_ID with self.assertRaises(ValueError): notification.create() @@ -248,15 +236,14 @@ def test_create_w_defaults(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) api_request = client._connection.api_request api_request.return_value = { - 'topic': self.TOPIC_REF, - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, - 'payload_format': NONE_PAYLOAD_FORMAT, + "topic": self.TOPIC_REF, + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, + "payload_format": NONE_PAYLOAD_FORMAT, } notification.create() @@ -269,24 +256,19 @@ def test_create_w_defaults(self): self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) - data = { - 'topic': self.TOPIC_REF, - 'payload_format': NONE_PAYLOAD_FORMAT, - } + data = {"topic": self.TOPIC_REF, "payload_format": NONE_PAYLOAD_FORMAT} api_request.assert_called_once_with( - method='POST', - path=self.CREATE_PATH, - query_params={}, - data=data, + method="POST", path=self.CREATE_PATH, query_params={}, data=data ) def test_create_w_explicit_client(self): - USER_PROJECT = 'user-project-123' + USER_PROJECT = "user-project-123" client = self._make_client() alt_client = self._make_client() bucket = self._make_bucket(client, user_project=USER_PROJECT) notification = self._make_one( - bucket, self.TOPIC_NAME, + bucket, + self.TOPIC_NAME, topic_project=self.TOPIC_ALT_PROJECT, custom_attributes=self.CUSTOM_ATTRIBUTES, event_types=self.event_types(), @@ -295,47 +277,44 @@ def test_create_w_explicit_client(self): ) api_request = alt_client._connection.api_request api_request.return_value = { - 'topic': self.TOPIC_ALT_REF, - 'custom_attributes': self.CUSTOM_ATTRIBUTES, - 'event_types': self.event_types(), - 'object_name_prefix': self.BLOB_NAME_PREFIX, - 'payload_format': self.payload_format(), - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, + "topic": self.TOPIC_ALT_REF, + "custom_attributes": self.CUSTOM_ATTRIBUTES, + "event_types": self.event_types(), + "object_name_prefix": self.BLOB_NAME_PREFIX, + "payload_format": self.payload_format(), + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, } notification.create(client=alt_client) - self.assertEqual( - notification.custom_attributes, self.CUSTOM_ATTRIBUTES) + self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) self.assertEqual(notification.blob_name_prefix, self.BLOB_NAME_PREFIX) - self.assertEqual( - notification.payload_format, self.payload_format()) + self.assertEqual(notification.payload_format, self.payload_format()) self.assertEqual(notification.notification_id, self.NOTIFICATION_ID) self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) data = { - 'topic': self.TOPIC_ALT_REF, - 'custom_attributes': self.CUSTOM_ATTRIBUTES, - 'event_types': self.event_types(), - 'object_name_prefix': self.BLOB_NAME_PREFIX, - 'payload_format': self.payload_format(), + "topic": self.TOPIC_ALT_REF, + "custom_attributes": self.CUSTOM_ATTRIBUTES, + "event_types": self.event_types(), + "object_name_prefix": self.BLOB_NAME_PREFIX, + "payload_format": self.payload_format(), } api_request.assert_called_once_with( - method='POST', + method="POST", path=self.CREATE_PATH, - query_params={'userProject': USER_PROJECT}, + query_params={"userProject": USER_PROJECT}, data=data, ) def test_exists_wo_notification_id(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.exists() @@ -346,45 +325,42 @@ def test_exists_miss(self): client = self._make_client() bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request - api_request.side_effect = NotFound('testing') + api_request.side_effect = NotFound("testing") self.assertFalse(notification.exists()) api_request.assert_called_once_with( - method='GET', - path=self.NOTIFICATION_PATH, - query_params={}, + method="GET", path=self.NOTIFICATION_PATH, query_params={} ) def test_exists_hit(self): - USER_PROJECT = 'user-project-123' + USER_PROJECT = "user-project-123" client = self._make_client() bucket = self._make_bucket(client, user_project=USER_PROJECT) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request api_request.return_value = { - 'topic': self.TOPIC_REF, - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, + "topic": self.TOPIC_REF, + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, } self.assertTrue(notification.exists(client=client)) api_request.assert_called_once_with( - method='GET', + method="GET", path=self.NOTIFICATION_PATH, - query_params={'userProject': USER_PROJECT}, + query_params={"userProject": USER_PROJECT}, ) def test_reload_wo_notification_id(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.reload() @@ -395,34 +371,32 @@ def test_reload_miss(self): client = self._make_client() bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request - api_request.side_effect = NotFound('testing') + api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): notification.reload() api_request.assert_called_once_with( - method='GET', - path=self.NOTIFICATION_PATH, - query_params={}, + method="GET", path=self.NOTIFICATION_PATH, query_params={} ) def test_reload_hit(self): from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - USER_PROJECT = 'user-project-123' + USER_PROJECT = "user-project-123" client = self._make_client() bucket = self._make_bucket(client, user_project=USER_PROJECT) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request api_request.return_value = { - 'topic': self.TOPIC_REF, - 'id': self.NOTIFICATION_ID, - 'etag': self.ETAG, - 'selfLink': self.SELF_LINK, - 'payload_format': NONE_PAYLOAD_FORMAT, + "topic": self.TOPIC_REF, + "id": self.NOTIFICATION_ID, + "etag": self.ETAG, + "selfLink": self.SELF_LINK, + "payload_format": NONE_PAYLOAD_FORMAT, } notification.reload(client=client) @@ -435,16 +409,15 @@ def test_reload_hit(self): self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) api_request.assert_called_once_with( - method='GET', + method="GET", path=self.NOTIFICATION_PATH, - query_params={'userProject': USER_PROJECT}, + query_params={"userProject": USER_PROJECT}, ) def test_delete_wo_notification_id(self): client = self._make_client() bucket = self._make_bucket(client) - notification = self._make_one( - bucket, self.TOPIC_NAME) + notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.delete() @@ -455,39 +428,36 @@ def test_delete_miss(self): client = self._make_client() bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request - api_request.side_effect = NotFound('testing') + api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): notification.delete() api_request.assert_called_once_with( - method='DELETE', - path=self.NOTIFICATION_PATH, - query_params={}, + method="DELETE", path=self.NOTIFICATION_PATH, query_params={} ) def test_delete_hit(self): - USER_PROJECT = 'user-project-123' + USER_PROJECT = "user-project-123" client = self._make_client() bucket = self._make_bucket(client, user_project=USER_PROJECT) notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties['id'] = self.NOTIFICATION_ID + notification._properties["id"] = self.NOTIFICATION_ID api_request = client._connection.api_request api_request.return_value = None notification.delete(client=client) api_request.assert_called_once_with( - method='DELETE', + method="DELETE", path=self.NOTIFICATION_PATH, - query_params={'userProject': USER_PROJECT}, + query_params={"userProject": USER_PROJECT}, ) class Test__parse_topic_path(unittest.TestCase): - @staticmethod def _call_fut(*args, **kwargs): from google.cloud.storage import notification @@ -501,48 +471,48 @@ def _make_topic_path(project, topic_name): return notification._TOPIC_REF_FMT.format(project, topic_name) def test_project_name_too_long(self): - project = 'a' * 31 - topic_path = self._make_topic_path(project, 'topic-name') + project = "a" * 31 + topic_path = self._make_topic_path(project, "topic-name") with self.assertRaises(ValueError): self._call_fut(topic_path) def test_project_name_uppercase(self): - project = 'aaaAaa' - topic_path = self._make_topic_path(project, 'topic-name') + project = "aaaAaa" + topic_path = self._make_topic_path(project, "topic-name") with self.assertRaises(ValueError): self._call_fut(topic_path) def test_leading_digit(self): - project = '1aaaaa' - topic_path = self._make_topic_path(project, 'topic-name') + project = "1aaaaa" + topic_path = self._make_topic_path(project, "topic-name") with self.assertRaises(ValueError): self._call_fut(topic_path) def test_leading_hyphen(self): - project = '-aaaaa' - topic_path = self._make_topic_path(project, 'topic-name') + project = "-aaaaa" + topic_path = self._make_topic_path(project, "topic-name") with self.assertRaises(ValueError): self._call_fut(topic_path) def test_trailing_hyphen(self): - project = 'aaaaa-' - topic_path = self._make_topic_path(project, 'topic-name') + project = "aaaaa-" + topic_path = self._make_topic_path(project, "topic-name") with self.assertRaises(ValueError): self._call_fut(topic_path) def test_invalid_format(self): - topic_path = '@#$%' + topic_path = "@#$%" with self.assertRaises(ValueError): self._call_fut(topic_path) def test_success(self): - topic_name = 'tah-pic-nehm' + topic_name = "tah-pic-nehm" project_choices = ( - 'a' * 30, # Max length. - 'a-b--c---d', # Valid hyphen usage. - 'abcdefghijklmnopqrstuvwxyz', # Valid letters. - 'z0123456789', # Valid digits (non-leading). - 'a-bcdefghijklmn12opqrstuv0wxyz', + "a" * 30, # Max length. + "a-b--c---d", # Valid hyphen usage. + "abcdefghijklmnopqrstuvwxyz", # Valid letters. + "z0123456789", # Valid digits (non-leading). + "a-bcdefghijklmn12opqrstuv0wxyz", ) for project in project_choices: topic_path = self._make_topic_path(project, topic_name)