Skip to content

Commit

Permalink
updates dataset.table() to return a TableReference instead of a Table (
Browse files Browse the repository at this point in the history
  • Loading branch information
alixhami authored and tswast committed Oct 16, 2017
1 parent 6aab47a commit 15b3d73
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 70 deletions.
21 changes: 10 additions & 11 deletions bigquery/google/cloud/bigquery/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ def path(self):
def table(self, table_id):
"""Constructs a TableReference.
:type table_id: str
:param table_id: the ID of the table.
:rtype: :class:`google.cloud.bigquery.table.TableReference`
:returns: a TableReference for a table in this dataset.
"""
Expand Down Expand Up @@ -505,20 +508,16 @@ def list_tables(self, max_results=None, page_token=None):
result.dataset = self
return result

def table(self, name, schema=()):
"""Construct a table bound to this dataset.
:type name: str
:param name: Name of the table.
def table(self, table_id):
"""Constructs a TableReference.
:type schema: list of :class:`google.cloud.bigquery.table.SchemaField`
:param schema: The table's schema
:type table_id: str
:param table_id: the ID of the table.
:rtype: :class:`google.cloud.bigquery.table.Table`
:returns: a new ``Table`` instance
:rtype: :class:`google.cloud.bigquery.table.TableReference`
:returns: a TableReference for a table in this dataset.
"""
table_ref = TableReference(self, name)
return Table(table_ref, schema=schema, client=self._client)
return TableReference(self, table_id)


def _item_to_table(iterator, resource):
Expand Down
2 changes: 1 addition & 1 deletion bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1248,7 +1248,7 @@ def __init__(self, job_id, query, client,
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.defaultDataset
"""

destination = _TypedProperty('destination', Table)
destination = _TypedProperty('destination', TableReference)
"""See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationTable
"""
Expand Down
65 changes: 40 additions & 25 deletions bigquery/tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

from google.cloud import bigquery
from google.cloud.bigquery.dataset import Dataset, DatasetReference
from google.cloud.bigquery.table import Table
from google.cloud._helpers import UTC
from google.cloud.bigquery import dbapi
from google.cloud.exceptions import Forbidden, NotFound
Expand Down Expand Up @@ -178,7 +179,8 @@ def test_create_table(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
self.assertFalse(table.exists())
table.create()
self.to_delete.insert(0, table)
Expand Down Expand Up @@ -221,7 +223,9 @@ def test_list_tables(self):
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
for table_name in tables_to_create:
created_table = dataset.table(table_name, schema=[full_name, age])
created_table = Table(dataset.table(table_name),
schema=[full_name, age],
client=Config.CLIENT)
created_table.create()
self.to_delete.insert(0, created_table)

Expand All @@ -243,7 +247,8 @@ def test_patch_table(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
self.assertFalse(table.exists())
table.create()
self.to_delete.insert(0, table)
Expand All @@ -263,7 +268,8 @@ def test_update_table(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
self.assertFalse(table.exists())
table.create()
self.to_delete.insert(0, table)
Expand Down Expand Up @@ -306,7 +312,8 @@ def test_insert_data_then_dump_table(self):
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
now = bigquery.SchemaField('now', 'TIMESTAMP')
table = dataset.table(TABLE_NAME, schema=[full_name, age, now])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age, now],
client=Config.CLIENT)
self.assertFalse(table.exists())
table.create()
self.to_delete.insert(0, table)
Expand Down Expand Up @@ -345,7 +352,8 @@ def test_load_table_from_local_file_then_dump_table(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)

Expand Down Expand Up @@ -389,7 +397,7 @@ def test_load_table_from_local_avro_file_then_dump_table(self):
Dataset(_make_dataset_id('load_local_then_dump')))
self.to_delete.append(dataset)

table = dataset.table(TABLE_NAME)
table = Table(dataset.table(TABLE_NAME), client=Config.CLIENT)
self.to_delete.insert(0, table)

with open(os.path.join(WHERE, 'data', 'colors.avro'), 'rb') as avrof:
Expand Down Expand Up @@ -453,7 +461,8 @@ def test_load_table_from_storage_then_dump_table(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)

Expand Down Expand Up @@ -518,11 +527,10 @@ def test_load_table_from_storage_w_autodetect_schema(self):
Dataset(_make_dataset_id('load_gcs_then_dump')))
self.to_delete.append(dataset)

table = dataset.table(table_name)
self.to_delete.insert(0, table)
table_ref = dataset.table(table_name)

job = Config.CLIENT.load_table_from_storage(
'bq_load_storage_test_' + local_id, table, gs_url)
'bq_load_storage_test_' + local_id, table_ref, gs_url)
job.autodetect = True

job.begin()
Expand All @@ -533,7 +541,8 @@ def test_load_table_from_storage_w_autodetect_schema(self):
retry = RetryInstanceState(_job_done, max_tries=8)
retry(job.reload)()

table = Config.CLIENT.get_table(table)
table = Config.CLIENT.get_table(table_ref)
self.to_delete.insert(0, table)
field_name = SchemaField(
u'Full_Name', u'string', u'NULLABLE', None, ())
field_age = SchemaField(u'Age', u'integer', u'NULLABLE', None, ())
Expand Down Expand Up @@ -570,10 +579,9 @@ def _load_table_for_extract_table(
dataset = retry_403(Config.CLIENT.create_dataset)(
Dataset(table.dataset_id))
self.to_delete.append(dataset)
table = dataset.table(table.table_id)
self.to_delete.insert(0, table)
table_ref = dataset.table(table.table_id)
job = Config.CLIENT.load_table_from_storage(
'bq_extract_storage_test_' + local_id, table, gs_url)
'bq_extract_storage_test_' + local_id, table_ref, gs_url)
job.autodetect = True
job.begin()
# Allow for 90 seconds of "warm up" before rows visible. See
Expand All @@ -591,21 +599,23 @@ def test_extract_table(self):
blob_name = 'person_ages.csv'
dataset_id = _make_dataset_id('load_gcs_then_extract')
table_id = 'test_table'
table = Config.CLIENT.dataset(dataset_id).table(table_id)
table_ref = Config.CLIENT.dataset(dataset_id).table(table_id)
table = Table(table_ref, client=Config.CLIENT)
self.to_delete.insert(0, table)
rows = [
('Phred Phlyntstone', 32),
('Bharney Rhubble', 33),
('Wylma Phlyntstone', 29),
('Bhettye Rhubble', 27),
]
self._load_table_for_extract_table(
storage_client, rows, bucket_name, blob_name, table)
storage_client, rows, bucket_name, blob_name, table_ref)
bucket = storage_client.bucket(bucket_name)
destination_blob_name = 'person_ages_out.csv'
destination = bucket.blob(destination_blob_name)
destination_uri = 'gs://{}/person_ages_out.csv'.format(bucket_name)

job = Config.CLIENT.extract_table(table, destination_uri)
job = Config.CLIENT.extract_table(table_ref, destination_uri)
job.result()

self.to_delete.insert(0, destination)
Expand All @@ -621,15 +631,17 @@ def test_extract_table_w_job_config(self):
blob_name = 'person_ages.csv'
dataset_id = _make_dataset_id('load_gcs_then_extract')
table_id = 'test_table'
table = Config.CLIENT.dataset(dataset_id).table(table_id)
table_ref = Config.CLIENT.dataset(dataset_id).table(table_id)
table = Table(table_ref, client=Config.CLIENT)
self.to_delete.insert(0, table)
rows = [
('Phred Phlyntstone', 32),
('Bharney Rhubble', 33),
('Wylma Phlyntstone', 29),
('Bhettye Rhubble', 27),
]
self._load_table_for_extract_table(
storage_client, rows, bucket_name, blob_name, table)
storage_client, rows, bucket_name, blob_name, table_ref)
bucket = storage_client.bucket(bucket_name)
destination_blob_name = 'person_ages_out.csv'
destination = bucket.blob(destination_blob_name)
Expand Down Expand Up @@ -657,7 +669,8 @@ def test_job_cancel(self):
full_name = bigquery.SchemaField('full_name', 'STRING',
mode='REQUIRED')
age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table(TABLE_NAME, schema=[full_name, age])
table = Table(dataset.table(TABLE_NAME), schema=[full_name, age],
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)

Expand Down Expand Up @@ -845,7 +858,8 @@ def _load_table_for_dml(self, rows, dataset_id, table_id):

greeting = bigquery.SchemaField(
'greeting', 'STRING', mode='NULLABLE')
table = dataset.table(table_id, schema=[greeting])
table = Table(dataset.table(table_id), schema=[greeting],
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)

Expand Down Expand Up @@ -1237,7 +1251,8 @@ def test_insert_nested_nested(self):
Dataset(_make_dataset_id('issue_2951')))
self.to_delete.append(dataset)

table = dataset.table(table_name, schema=schema)
table = Table(dataset.table(table_name), schema=schema,
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)

Expand All @@ -1249,14 +1264,14 @@ def test_insert_nested_nested(self):
self.assertEqual(rows, to_insert)

def test_create_table_insert_fetch_nested_schema(self):

table_name = 'test_table'
dataset = retry_403(Config.CLIENT.create_dataset)(
Dataset(_make_dataset_id('create_table_nested_schema')))
self.to_delete.append(dataset)

schema = _load_json_schema()
table = dataset.table(table_name, schema=schema)
table = Table(dataset.table(table_name), schema=schema,
client=Config.CLIENT)
table.create()
self.to_delete.insert(0, table)
self.assertTrue(table.exists())
Expand Down
23 changes: 3 additions & 20 deletions bigquery/tests/unit/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,34 +506,17 @@ def test_list_tables_explicit(self):
self.assertEqual(req['query_params'],
{'maxResults': 3, 'pageToken': TOKEN})

def test_table_wo_schema(self):
from google.cloud.bigquery.table import Table
def test_table(self):
from google.cloud.bigquery.table import TableReference

conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._make_one(self.DS_ID, client=client)
table = dataset.table('table_id')
self.assertIsInstance(table, Table)
self.assertEqual(table.table_id, 'table_id')
self.assertEqual(table.dataset_id, self.DS_ID)
self.assertEqual(table.project, self.PROJECT)
self.assertEqual(table.schema, [])

def test_table_w_schema(self):
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table

conn = _Connection({})
client = _Client(project=self.PROJECT, connection=conn)
dataset = self._make_one(self.DS_ID, client=client)
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
table = dataset.table('table_id', schema=[full_name, age])
self.assertIsInstance(table, Table)
self.assertIsInstance(table, TableReference)
self.assertEqual(table.table_id, 'table_id')
self.assertEqual(table.dataset_id, self.DS_ID)
self.assertEqual(table.project, self.PROJECT)
self.assertEqual(table.schema, [full_name, age])


class _Client(object):
Expand Down
19 changes: 6 additions & 13 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1908,7 +1908,7 @@ def test_statement_type(self):
self.assertEqual(job.statement_type, statement_type)

def test_referenced_tables(self):
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TableReference

ref_tables_resource = [{
'projectId': self.PROJECT,
Expand Down Expand Up @@ -1939,23 +1939,20 @@ def test_referenced_tables(self):

local1, local2, remote = job.referenced_tables

self.assertIsInstance(local1, Table)
self.assertIsInstance(local1, TableReference)
self.assertEqual(local1.table_id, 'local1')
self.assertEqual(local1.dataset_id, 'dataset')
self.assertEqual(local1.project, self.PROJECT)
self.assertIs(local1._client, client)

self.assertIsInstance(local2, Table)
self.assertIsInstance(local2, TableReference)
self.assertEqual(local2.table_id, 'local2')
self.assertEqual(local2.dataset_id, 'dataset')
self.assertEqual(local2.project, self.PROJECT)
self.assertIs(local2._client, client)

self.assertIsInstance(remote, Table)
self.assertIsInstance(remote, TableReference)
self.assertEqual(remote.table_id, 'other-table')
self.assertEqual(remote.dataset_id, 'other-dataset')
self.assertEqual(remote.project, 'other-project-123')
self.assertIs(remote._client, client)

def test_undeclared_query_paramters(self):
from google.cloud.bigquery._helpers import ArrayQueryParameter
Expand Down Expand Up @@ -2173,7 +2170,6 @@ def test_begin_w_bound_client(self):
def test_begin_w_alternate_client(self):
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.dataset import Table

PATH = '/projects/%s/jobs' % (self.PROJECT,)
TABLE = 'TABLE'
Expand Down Expand Up @@ -2210,12 +2206,11 @@ def test_begin_w_alternate_client(self):
dataset_ref = DatasetReference(self.PROJECT, DS_ID)
dataset = Dataset(DS_ID, client1)
table_ref = dataset_ref.table(TABLE)
table = Table(table_ref, client=client1)

job.allow_large_results = True
job.create_disposition = 'CREATE_NEVER'
job.default_dataset = dataset
job.destination = table
job.destination = table_ref
job.flatten_results = True
job.priority = 'INTERACTIVE'
job.use_query_cache = True
Expand Down Expand Up @@ -2467,7 +2462,6 @@ def test_exists_hit_w_alternate_client(self):

def test_reload_w_bound_client(self):
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.table import Table

PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME)
DS_ID = 'DATASET'
Expand All @@ -2479,8 +2473,7 @@ def test_reload_w_bound_client(self):

dataset_ref = DatasetReference(self.PROJECT, DS_ID)
table_ref = dataset_ref.table(DEST_TABLE)
table = Table(table_ref, client=client)
job.destination = table
job.destination = table_ref

job.reload()

Expand Down

0 comments on commit 15b3d73

Please sign in to comment.