diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index db1a4b0f2138..50db75f94560 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -211,8 +211,12 @@ def create_table(self, table): """ path = '/projects/%s/datasets/%s/tables' % ( table.project, table.dataset_id) + resource = table._build_resource(Table.all_fields) + doomed = [field for field in resource if resource[field] is None] + for field in doomed: + del resource[field] api_response = self._connection.api_request( - method='POST', path=path, data=table._build_resource()) + method='POST', path=path, data=resource) return Table.from_api_repr(api_response, self) def get_dataset(self, dataset_ref): @@ -285,6 +289,28 @@ def update_dataset(self, dataset, fields): method='PATCH', path=path, data=partial, headers=headers) return Dataset.from_api_repr(api_response) + def update_table(self, table, properties): + """API call: update table properties via a PUT request + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/update + + :type table: + :class:`google.cloud.bigquery.table.Table` + :param table_ref: the table to update. + + :rtype: :class:`google.cloud.bigquery.table.Table` + :returns: a ``Table`` instance + """ + partial = table._build_resource(properties) + if table.etag is not None: + headers = {'If-Match': table.etag} + else: + headers = None + api_response = self._connection.api_request( + method='PATCH', path=table.path, data=partial, headers=headers) + return Table.from_api_repr(api_response, client=self) + def list_dataset_tables(self, dataset, max_results=None, page_token=None): """List tables in the dataset. diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index dfc31be29745..e4814ae16c8e 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -175,6 +175,11 @@ class Table(object): _schema = None + all_fields = [ + 'description', 'friendly_name', 'expires', 'location', + 'partitioning_type', 'view_use_legacy_sql', 'view_query', 'schema' + ] + def __init__(self, table_ref, schema=(), client=None): self._project = table_ref.project self._table_id = table_ref.table_id @@ -240,9 +245,12 @@ def schema(self, value): :raises: TypeError if 'value' is not a sequence, or ValueError if any item in the sequence is not a SchemaField """ - if not all(isinstance(field, SchemaField) for field in value): + if value is None: + self._schema = () + elif not all(isinstance(field, SchemaField) for field in value): raise ValueError('Schema items must be fields') - self._schema = tuple(value) + else: + self._schema = tuple(value) @property def created(self): @@ -613,41 +621,59 @@ def _set_properties(self, api_response): cleaned['expirationTime'] = float(cleaned['expirationTime']) self._properties.update(cleaned) - def _build_resource(self): - """Generate a resource for ``create`` or ``update``.""" - resource = { - 'tableReference': { - 'projectId': self._project, - 'datasetId': self._dataset_id, - 'tableId': self.table_id}, - } - if self.description is not None: - resource['description'] = self.description - - if self.expires is not None: - value = _millis_from_datetime(self.expires) - resource['expirationTime'] = value - - if self.friendly_name is not None: - resource['friendlyName'] = self.friendly_name + def _populate_expires_resource(self, resource): + resource['expirationTime'] = _millis_from_datetime(self.expires) - if self.location is not None: - resource['location'] = self.location + def _populate_partitioning_type_resource(self, resource): + resource['timePartitioning'] = self._properties.get('timePartitioning') - if self.partitioning_type is not None: - resource['timePartitioning'] = self._properties['timePartitioning'] + def _populate_view_use_legacy_sql_resource(self, resource): + if 'view' not in resource: + resource['view'] = {} + resource['view']['useLegacySql'] = self.view_use_legacy_sql - if self.view_query is not None: - view = resource['view'] = {} - view['query'] = self.view_query - if self.view_use_legacy_sql is not None: - view['useLegacySql'] = self.view_use_legacy_sql + def _populate_view_query_resource(self, resource): + if self.view_query is None: + resource['view'] = None + return + if 'view' not in resource: + resource['view'] = {} + resource['view']['query'] = self.view_query - if self._schema: + def _populate_schema_resource(self, resource): + if not self._schema: + resource['schema'] = None + else: resource['schema'] = { - 'fields': _build_schema_resource(self._schema) + 'fields': _build_schema_resource(self._schema), } + custom_resource_fields = { + 'expires': _populate_expires_resource, + 'partitioning_type': _populate_partitioning_type_resource, + 'view_query': _populate_view_query_resource, + 'view_use_legacy_sql': _populate_view_use_legacy_sql_resource, + 'schema': _populate_schema_resource + } + + def _build_resource(self, filter_fields): + """Generate a resource for ``create`` or ``update``.""" + resource = { + 'tableReference': { + 'projectId': self._project, + 'datasetId': self._dataset_id, + 'tableId': self.table_id}, + } + for f in filter_fields: + if f in self.custom_resource_fields: + self.custom_resource_fields[f](self, resource) + else: + # TODO(alixh) refactor to use in both Table and Dataset + # snake case to camel case + words = f.split('_') + api_field = words[0] + ''.join( + map(str.capitalize, words[1:])) + resource[api_field] = getattr(self, f) return resource def exists(self, client=None): @@ -674,97 +700,6 @@ def exists(self, client=None): else: return True - def patch(self, - client=None, - friendly_name=_MARKER, - description=_MARKER, - location=_MARKER, - expires=_MARKER, - view_query=_MARKER, - schema=_MARKER): - """API call: update individual table properties via a PATCH request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/patch - - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - - :type friendly_name: str - :param friendly_name: (Optional) a descriptive name for this table. - - :type description: str - :param description: (Optional) a description of this table. - - :type location: str - :param location: - (Optional) the geographic location where the table resides. - - :type expires: :class:`datetime.datetime` - :param expires: (Optional) point in time at which the table expires. - - :type view_query: str - :param view_query: SQL query defining the table as a view - - :type schema: list of :class:`SchemaField` - :param schema: fields describing the schema - - :raises: ValueError for invalid value types. - """ - client = self._require_client(client) - - partial = {} - - if expires is not _MARKER: - if (not isinstance(expires, datetime.datetime) and - expires is not None): - raise ValueError("Pass a datetime, or None") - partial['expirationTime'] = _millis_from_datetime(expires) - - if description is not _MARKER: - partial['description'] = description - - if friendly_name is not _MARKER: - partial['friendlyName'] = friendly_name - - if location is not _MARKER: - partial['location'] = location - - if view_query is not _MARKER: - if view_query is None: - partial['view'] = None - else: - partial['view'] = {'query': view_query} - - if schema is not _MARKER: - if schema is None: - partial['schema'] = None - else: - partial['schema'] = { - 'fields': _build_schema_resource(schema)} - - api_response = client._connection.api_request( - method='PATCH', path=self.path, data=partial) - self._set_properties(api_response) - - def update(self, client=None): - """API call: update table properties via a PUT request - - See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/update - - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. - """ - client = self._require_client(client) - api_response = client._connection.api_request( - method='PUT', path=self.path, data=self._build_resource()) - self._set_properties(api_response) - def row_from_mapping(self, mapping): """Convert a mapping to a row tuple using the schema. diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 1936fc435e57..83d08c7598a4 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -250,14 +250,15 @@ def test_list_dataset_tables(self): table.dataset_id == DATASET_ID)] self.assertEqual(len(created), len(tables_to_create)) - def test_patch_table(self): - dataset = self.temp_dataset(_make_dataset_id('patch_table')) + def test_update_table(self): + dataset = self.temp_dataset(_make_dataset_id('update_table')) TABLE_NAME = 'test_table' - full_name = bigquery.SchemaField('full_name', 'STRING', - mode='REQUIRED') - age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') + ] + table_arg = Table(dataset.table(TABLE_NAME), schema=schema, client=Config.CLIENT) self.assertFalse(table_arg.exists()) table = retry_403(Config.CLIENT.create_table)(table_arg) @@ -265,18 +266,34 @@ def test_patch_table(self): self.assertTrue(table.exists()) self.assertIsNone(table.friendly_name) self.assertIsNone(table.description) - table.patch(friendly_name='Friendly', description='Description') - self.assertEqual(table.friendly_name, 'Friendly') - self.assertEqual(table.description, 'Description') + table.friendly_name = 'Friendly' + table.description = 'Description' - def test_update_table(self): + table2 = Config.CLIENT.update_table( + table, ['friendly_name', 'description']) + + self.assertEqual(table2.friendly_name, 'Friendly') + self.assertEqual(table2.description, 'Description') + + table2.description = None + table3 = Config.CLIENT.update_table(table2, ['description']) + self.assertIsNone(table3.description) + + # If we try to update using table2 again, it will fail because the + # previous update changed the ETag. + table2.description = 'no good' + with self.assertRaises(PreconditionFailed): + Config.CLIENT.update_table(table2, ['description']) + + def test_update_table_schema(self): dataset = self.temp_dataset(_make_dataset_id('update_table')) TABLE_NAME = 'test_table' - full_name = bigquery.SchemaField('full_name', 'STRING', - mode='REQUIRED') - age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') - table_arg = Table(dataset.table(TABLE_NAME), schema=[full_name, age], + schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED') + ] + table_arg = Table(dataset.table(TABLE_NAME), schema=schema, client=Config.CLIENT) self.assertFalse(table_arg.exists()) table = retry_403(Config.CLIENT.create_table)(table_arg) @@ -286,9 +303,11 @@ def test_update_table(self): schema = table.schema schema.append(voter) table.schema = schema - table.update() - self.assertEqual(len(table.schema), len(schema)) - for found, expected in zip(table.schema, schema): + + updated_table = Config.CLIENT.update_table(table, ['schema']) + + self.assertEqual(len(updated_table.schema), len(schema)) + for found, expected in zip(updated_table.schema, schema): self.assertEqual(found.name, expected.name) self.assertEqual(found.field_type, expected.field_type) self.assertEqual(found.mode, expected.mode) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index cd0c3f6d71b0..aad64d980df1 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -527,7 +527,8 @@ def test_create_table_w_schema_and_query(self): {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}] }, - 'view': {'query': query}, + # TODO(alixh) default to Standard SQL + 'view': {'query': query, 'useLegacySql': None}, } self.assertEqual(req['data'], sent) self.assertEqual(got.table_id, table_id) @@ -629,6 +630,292 @@ def test_update_dataset(self): req = conn._requested[1] self.assertEqual(req['headers']['If-Match'], 'etag') + def test_update_table(self): + from google.cloud.bigquery.table import Table, SchemaField + + project = 'PROJECT' + dataset_id = 'DATASET_ID' + table_id = 'table_id' + path = 'projects/%s/datasets/%s/tables/%s' % ( + project, dataset_id, table_id) + description = 'description' + title = 'title' + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}] + }, + 'etag': 'etag', + 'description': description, + 'friendlyName': title, + } + schema = [ + SchemaField('full_name', 'STRING', mode='REQUIRED'), + SchemaField('age', 'INTEGER', mode='REQUIRED') + ] + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + conn = client._connection = _Connection(resource, resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, schema=schema, client=client) + table.description = description + table.friendly_name = title + + updated_table = client.update_table( + table, ['schema', 'description', 'friendly_name']) + + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, + 'description': description, + 'friendlyName': title, + } + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['data'], sent) + self.assertEqual(req['path'], '/' + path) + self.assertIsNone(req['headers']) + self.assertEqual(updated_table.description, table.description) + self.assertEqual(updated_table.friendly_name, table.friendly_name) + self.assertEqual(updated_table.schema, table.schema) + + # ETag becomes If-Match header. + table._properties['etag'] = 'etag' + client.update_table(table, []) + req = conn._requested[1] + self.assertEqual(req['headers']['If-Match'], 'etag') + + def test_update_table_only_use_legacy_sql(self): + from google.cloud.bigquery.table import Table + + project = 'PROJECT' + dataset_id = 'DATASET_ID' + table_id = 'table_id' + path = 'projects/%s/datasets/%s/tables/%s' % ( + project, dataset_id, table_id) + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'view': {'useLegacySql': True} + } + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + conn = client._connection = _Connection(resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, client=client) + table.view_use_legacy_sql = True + + updated_table = client.update_table(table, ['view_use_legacy_sql']) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'view': {'useLegacySql': True} + } + self.assertEqual(req['data'], sent) + self.assertEqual( + updated_table.view_use_legacy_sql, table.view_use_legacy_sql) + + def test_update_table_w_query(self): + import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _millis + from google.cloud.bigquery.table import Table, SchemaField + + project = 'PROJECT' + dataset_id = 'DATASET_ID' + table_id = 'table_id' + path = 'projects/%s/datasets/%s/tables/%s' % ( + project, dataset_id, table_id) + query = 'select fullname, age from person_ages' + location = 'EU' + exp_time = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) + schema_resource = {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]} + schema = [ + SchemaField('full_name', 'STRING', mode='REQUIRED'), + SchemaField('age', 'INTEGER', mode='REQUIRED') + ] + resource = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': schema_resource, + 'view': {'query': query, 'useLegacySql': True}, + 'location': location, + 'expirationTime': _millis(exp_time) + } + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + conn = client._connection = _Connection(resource) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, schema=schema, client=client) + table.location = location + table.expires = exp_time + table.view_query = query + table.view_use_legacy_sql = True + updated_properties = ['schema', 'view_query', 'location', + 'expires', 'view_use_legacy_sql'] + + updated_table = client.update_table(table, updated_properties) + + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'view': {'query': query, 'useLegacySql': True}, + 'location': location, + 'expirationTime': _millis(exp_time), + 'schema': schema_resource, + } + self.assertEqual(req['data'], sent) + self.assertEqual(updated_table.schema, table.schema) + self.assertEqual(updated_table.view_query, table.view_query) + self.assertEqual(updated_table.location, table.location) + self.assertEqual(updated_table.expires, table.expires) + self.assertEqual( + updated_table.view_use_legacy_sql, table.view_use_legacy_sql) + + def test_update_table_w_schema_None(self): + # Simulate deleting schema: not sure if back-end will actually + # allow this operation, but the spec says it is optional. + project = 'PROJECT' + dataset_id = 'DATASET_ID' + table_id = 'table_id' + path = 'projects/%s/datasets/%s/tables/%s' % ( + project, dataset_id, table_id) + resource1 = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id}, + 'schema': {'fields': [ + {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, + {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]} + } + resource2 = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id}, + 'schema': {'fields': []}, + } + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + conn = client._connection = _Connection(resource1, resource2) + table_ref = client.dataset(dataset_id).table(table_id) + table = client.get_table(table_ref) + table.schema = None + + updated_table = client.update_table(table, ['schema']) + + self.assertEqual(len(conn._requested), 2) + req = conn._requested[1] + self.assertEqual(req['method'], 'PATCH') + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'schema': None + } + self.assertEqual(req['data'], sent) + self.assertEqual(req['path'], '/%s' % path) + self.assertEqual(updated_table.schema, table.schema) + + def test_update_table_delete_property(self): + from google.cloud.bigquery.table import Table + + project = 'PROJECT' + dataset_id = 'DATASET_ID' + table_id = 'table_id' + description = 'description' + title = 'title' + path = 'projects/%s/datasets/%s/tables/%s' % ( + project, dataset_id, table_id) + resource1 = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'description': description, + 'friendlyName': title, + } + resource2 = { + 'id': '%s:%s:%s' % (project, dataset_id, table_id), + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'description': None, + } + creds = _make_credentials() + client = self._make_one(project=project, credentials=creds) + conn = client._connection = _Connection(resource1, resource2) + table_ref = client.dataset(dataset_id).table(table_id) + table = Table(table_ref, client=client) + table.description = description + table.friendly_name = title + table2 = client.update_table(table, ['description', 'friendly_name']) + self.assertEqual(table2.description, table.description) + table2.description = None + + table3 = client.update_table(table2, ['description']) + self.assertEqual(len(conn._requested), 2) + req = conn._requested[1] + self.assertEqual(req['method'], 'PATCH') + self.assertEqual(req['path'], '/%s' % path) + sent = { + 'tableReference': { + 'projectId': project, + 'datasetId': dataset_id, + 'tableId': table_id + }, + 'description': None, + } + self.assertEqual(req['data'], sent) + self.assertIsNone(table3.description) + def test_list_dataset_tables_empty(self): PROJECT = 'PROJECT' DS_ID = 'DATASET_ID' diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index fc0ff3370974..6e00bd73c9c6 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -550,8 +550,17 @@ def test_from_api_repr_bare(self): self._verifyResourceProperties(table, RESOURCE) def test_from_api_repr_w_properties(self): + import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _millis + client = _Client(self.PROJECT) RESOURCE = self._makeResource() + RESOURCE['view'] = {'query': 'select fullname, age from person_ages'} + RESOURCE['type'] = 'VIEW' + RESOURCE['location'] = 'EU' + self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, tzinfo=UTC) + RESOURCE['expirationTime'] = _millis(self.EXP_TIME) klass = self._get_target_class() table = klass.from_api_repr(RESOURCE, client) self.assertIs(table._client, client) @@ -741,210 +750,6 @@ def test_exists_hit_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['query_params'], {'fields': 'id'}) - def test_patch_w_invalid_expiration(self): - RESOURCE = self._makeResource() - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client) - - with self.assertRaises(ValueError): - table.patch(expires='BOGUS') - - def test_patch_w_bound_client(self): - PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - DESCRIPTION = 'DESCRIPTION' - TITLE = 'TITLE' - RESOURCE = self._makeResource() - RESOURCE['description'] = DESCRIPTION - RESOURCE['friendlyName'] = TITLE - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client) - - table.patch(description=DESCRIPTION, - friendly_name=TITLE, - view_query=None) - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'PATCH') - SENT = { - 'description': DESCRIPTION, - 'friendlyName': TITLE, - 'view': None, - } - self.assertEqual(req['data'], SENT) - self.assertEqual(req['path'], '/%s' % PATH) - self._verifyResourceProperties(table, RESOURCE) - - def test_patch_w_alternate_client(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - QUERY = 'select fullname, age from person_ages' - LOCATION = 'EU' - RESOURCE = self._makeResource() - RESOURCE['view'] = {'query': QUERY} - RESOURCE['type'] = 'VIEW' - RESOURCE['location'] = LOCATION - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, - tzinfo=UTC) - RESOURCE['expirationTime'] = _millis(self.EXP_TIME) - conn1 = _Connection() - client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client1) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='NULLABLE') - - table.patch(client=client2, view_query=QUERY, location=LOCATION, - expires=self.EXP_TIME, schema=[full_name, age]) - - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'PATCH') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'view': {'query': QUERY}, - 'location': LOCATION, - 'expirationTime': _millis(self.EXP_TIME), - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'NULLABLE'}]}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - - def test_patch_w_schema_None(self): - # Simulate deleting schema: not sure if back-end will actually - # allow this operation, but the spec says it is optional. - PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - DESCRIPTION = 'DESCRIPTION' - TITLE = 'TITLE' - RESOURCE = self._makeResource() - RESOURCE['description'] = DESCRIPTION - RESOURCE['friendlyName'] = TITLE - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client) - - table.patch(schema=None) - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'PATCH') - SENT = {'schema': None} - self.assertEqual(req['data'], SENT) - self.assertEqual(req['path'], '/%s' % PATH) - self._verifyResourceProperties(table, RESOURCE) - - def test_update_w_bound_client(self): - from google.cloud.bigquery.table import SchemaField - - PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - DESCRIPTION = 'DESCRIPTION' - TITLE = 'TITLE' - RESOURCE = self._makeResource() - RESOURCE['description'] = DESCRIPTION - RESOURCE['friendlyName'] = TITLE - conn = _Connection(RESOURCE) - client = _Client(project=self.PROJECT, connection=conn) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - table = self._make_one(table_ref, schema=[full_name, age], - client=client) - table.description = DESCRIPTION - table.friendly_name = TITLE - - table.update() - - self.assertEqual(len(conn._requested), 1) - req = conn._requested[0] - self.assertEqual(req['method'], 'PUT') - SENT = { - 'tableReference': - {'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'schema': {'fields': [ - {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, - {'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'}]}, - 'description': DESCRIPTION, - 'friendlyName': TITLE, - } - self.assertEqual(req['data'], SENT) - self.assertEqual(req['path'], '/%s' % PATH) - self._verifyResourceProperties(table, RESOURCE) - - def test_update_w_alternate_client(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _millis - - PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_ID, self.TABLE_NAME) - DEF_TABLE_EXP = 12345 - LOCATION = 'EU' - QUERY = 'select fullname, age from person_ages' - RESOURCE = self._makeResource() - RESOURCE['defaultTableExpirationMs'] = 12345 - RESOURCE['location'] = LOCATION - self.EXP_TIME = datetime.datetime(2015, 8, 1, 23, 59, 59, - tzinfo=UTC) - RESOURCE['expirationTime'] = _millis(self.EXP_TIME) - RESOURCE['view'] = {'query': QUERY, 'useLegacySql': True} - RESOURCE['type'] = 'VIEW' - conn1 = _Connection() - client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection(RESOURCE) - client2 = _Client(project=self.PROJECT, connection=conn2) - dataset = DatasetReference(self.PROJECT, self.DS_ID) - table_ref = dataset.table(self.TABLE_NAME) - table = self._make_one(table_ref, client=client1) - table.default_table_expiration_ms = DEF_TABLE_EXP - table.location = LOCATION - table.expires = self.EXP_TIME - table.view_query = QUERY - table.view_use_legacy_sql = True - - table.update(client=client2) - - self.assertEqual(len(conn1._requested), 0) - self.assertEqual(len(conn2._requested), 1) - req = conn2._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % PATH) - SENT = { - 'tableReference': - {'projectId': self.PROJECT, - 'datasetId': self.DS_ID, - 'tableId': self.TABLE_NAME}, - 'expirationTime': _millis(self.EXP_TIME), - 'location': 'EU', - 'view': {'query': QUERY, 'useLegacySql': True}, - } - self.assertEqual(req['data'], SENT) - self._verifyResourceProperties(table, RESOURCE) - def test_row_from_mapping_wo_schema(self): from google.cloud.bigquery.table import _TABLE_HAS_NO_SCHEMA MAPPING = {'full_name': 'Phred Phlyntstone', 'age': 32} @@ -1224,6 +1029,21 @@ def _row_data(row): self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['data'], SENT) + def test__populate_view_use_legacy_sql_resource_w_existing_view(self): + query = 'select * from foo' + resource = {'view': {'query': query}} + client = mock.Mock(spec=[u'_credentials', '_http']) + client._http = mock.sentinel.http + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table = self._make_one(dataset.table(self.TABLE_NAME), client=client) + table.view_use_legacy_sql = True + + table._populate_view_use_legacy_sql_resource(resource) + + self.assertEqual( + resource['view']['useLegacySql'], table.view_use_legacy_sql) + self.assertEqual(resource['view']['query'], query) + def test__get_transport(self): client = mock.Mock(spec=[u'_credentials', '_http']) client._http = mock.sentinel.http