Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BQ: rename XJob.name to XJob.job_id. #3962

Merged
merged 2 commits into from
Sep 15, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 39 additions & 39 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,16 +164,15 @@ class WriteDisposition(_EnumProperty):
class _AsyncJob(google.api.core.future.polling.PollingFuture):
"""Base class for asynchronous jobs.

:type name: str
:param name: the name of the job
:type job_id: str
:param job_id: the job's ID in the project associated with the client.

:type client: :class:`google.cloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
:param client: A client which holds credentials and project configuration.
"""
def __init__(self, name, client):
def __init__(self, job_id, client):
super(_AsyncJob, self).__init__()
self.name = name
self.job_id = job_id
self._client = client
self._properties = {}
self._result_set = False
Expand Down Expand Up @@ -217,9 +216,9 @@ def path(self):
"""URL path for the job's APIs.

:rtype: str
:returns: the path based on project and job name.
:returns: the path based on project and job ID.
"""
return '/projects/%s/jobs/%s' % (self.project, self.name)
return '/projects/%s/jobs/%s' % (self.project, self.job_id)

@property
def etag(self):
Expand Down Expand Up @@ -367,21 +366,21 @@ def _get_resource_config(cls, resource):

:rtype: dict
:returns: tuple (string, dict), where the first element is the
job name and the second contains job-specific configuration.
job ID and the second contains job-specific configuration.
:raises: :class:`KeyError` if the resource has no identifier, or
is missing the appropriate configuration.
"""
if ('jobReference' not in resource or
'jobId' not in resource['jobReference']):
raise KeyError('Resource lacks required identity information: '
'["jobReference"]["jobId"]')
name = resource['jobReference']['jobId']
job_id = resource['jobReference']['jobId']
if ('configuration' not in resource or
cls._JOB_TYPE not in resource['configuration']):
raise KeyError('Resource lacks required configuration: '
'["configuration"]["%s"]' % cls._JOB_TYPE)
config = resource['configuration'][cls._JOB_TYPE]
return name, config
return job_id, config

def begin(self, client=None):
"""API call: begin the job via a POST request
Expand Down Expand Up @@ -560,8 +559,9 @@ class _LoadConfiguration(object):
class LoadJob(_AsyncJob):
"""Asynchronous job for loading data into a table from remote URI.

:type name: str
:param name: the name of the job
:type job_id: str
:param job_id:
The job's ID, belonging to the project associated with the client.

:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
Expand Down Expand Up @@ -766,7 +766,7 @@ def _build_resource(self):
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
'jobId': self.job_id,
},
'configuration': {
self._JOB_TYPE: {
Expand Down Expand Up @@ -834,12 +834,12 @@ def from_api_repr(cls, resource, client):
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
source_urls = config.get('sourceUris', ())
job = cls(name, destination, source_urls, client=client)
job = cls(job_id, destination, source_urls, client=client)
job._set_properties(resource)
return job

Expand All @@ -856,8 +856,8 @@ class _CopyConfiguration(object):
class CopyJob(_AsyncJob):
"""Asynchronous job: copy data into a table from other tables.

:type name: str
:param name: the name of the job
:type job_id: str
:param job_id: the job's ID, within the project belonging to ``client``.

:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
Expand All @@ -872,8 +872,8 @@ class CopyJob(_AsyncJob):

_JOB_TYPE = 'copy'

def __init__(self, name, destination, sources, client):
super(CopyJob, self).__init__(name, client)
def __init__(self, job_id, destination, sources, client):
super(CopyJob, self).__init__(job_id, client)
self.destination = destination
self.sources = sources
self._configuration = _CopyConfiguration()
Expand Down Expand Up @@ -907,7 +907,7 @@ def _build_resource(self):
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
'jobId': self.job_id,
},
'configuration': {
self._JOB_TYPE: {
Expand Down Expand Up @@ -949,7 +949,7 @@ def from_api_repr(cls, resource, client):
:rtype: :class:`google.cloud.bigquery.job.CopyJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
job_id, config = cls._get_resource_config(resource)
dest_config = config['destinationTable']
dataset = Dataset(dest_config['datasetId'], client)
destination = Table(dest_config['tableId'], dataset)
Expand All @@ -964,7 +964,7 @@ def from_api_repr(cls, resource, client):
for source_config in source_configs:
dataset = Dataset(source_config['datasetId'], client)
sources.append(Table(source_config['tableId'], dataset))
job = cls(name, destination, sources, client=client)
job = cls(job_id, destination, sources, client=client)
job._set_properties(resource)
return job

Expand All @@ -983,8 +983,8 @@ class _ExtractConfiguration(object):
class ExtractJob(_AsyncJob):
"""Asynchronous job: extract data from a table into Cloud Storage.

:type name: str
:param name: the name of the job
:type job_id: str
:param job_id: the job's ID, within the project belonging to ``client``.

:type source: :class:`google.cloud.bigquery.table.Table`
:param source: Table into which data is to be loaded.
Expand All @@ -1000,8 +1000,8 @@ class ExtractJob(_AsyncJob):
"""
_JOB_TYPE = 'extract'

def __init__(self, name, source, destination_uris, client):
super(ExtractJob, self).__init__(name, client)
def __init__(self, job_id, source, destination_uris, client):
super(ExtractJob, self).__init__(job_id, client)
self.source = source
self.destination_uris = destination_uris
self._configuration = _ExtractConfiguration()
Expand Down Expand Up @@ -1065,7 +1065,7 @@ def _build_resource(self):
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
'jobId': self.job_id,
},
'configuration': {
self._JOB_TYPE: {
Expand Down Expand Up @@ -1106,12 +1106,12 @@ def from_api_repr(cls, resource, client):
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
job_id, config = cls._get_resource_config(resource)
source_config = config['sourceTable']
dataset = Dataset(source_config['datasetId'], client)
source = Table(source_config['tableId'], dataset)
destination_uris = config['destinationUris']
job = cls(name, source, destination_uris, client=client)
job = cls(job_id, source, destination_uris, client=client)
job._set_properties(resource)
return job

Expand All @@ -1138,8 +1138,8 @@ class _AsyncQueryConfiguration(object):
class QueryJob(_AsyncJob):
"""Asynchronous job: query tables.

:type name: str
:param name: the name of the job
:type job_id: str
:param job_id: the job's ID, within the project belonging to ``client``.

:type query: str
:param query: SQL query string
Expand All @@ -1163,9 +1163,9 @@ class QueryJob(_AsyncJob):
_UDF_KEY = 'userDefinedFunctionResources'
_QUERY_PARAMETERS_KEY = 'queryParameters'

def __init__(self, name, query, client,
def __init__(self, job_id, query, client,
udf_resources=(), query_parameters=()):
super(QueryJob, self).__init__(name, client)
super(QueryJob, self).__init__(job_id, client)
self.query = query
self.udf_resources = udf_resources
self.query_parameters = query_parameters
Expand Down Expand Up @@ -1306,7 +1306,7 @@ def _build_resource(self):
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
'jobId': self.job_id,
},
'configuration': {
self._JOB_TYPE: {
Expand Down Expand Up @@ -1399,9 +1399,9 @@ def from_api_repr(cls, resource, client):
:rtype: :class:`google.cloud.bigquery.job.RunAsyncQueryJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
job_id, config = cls._get_resource_config(resource)
query = config['query']
job = cls(name, query, client=client)
job = cls(job_id, query, client=client)
job._set_properties(resource)
return job

Expand Down Expand Up @@ -1573,7 +1573,7 @@ def query_results(self):
:returns: results instance
"""
if not self._query_results:
self._query_results = self._client._get_query_results(self.name)
self._query_results = self._client._get_query_results(self.job_id)
return self._query_results

def done(self):
Expand All @@ -1585,7 +1585,7 @@ def done(self):
# Do not refresh is the state is already done, as the job will not
# change once complete.
if self.state != _DONE_STATE:
self._query_results = self._client._get_query_results(self.name)
self._query_results = self._client._get_query_results(self.job_id)

# Only reload the job once we know the query is complete.
# This will ensure that fields such as the destination table are
Expand Down
2 changes: 1 addition & 1 deletion bigquery/google/cloud/bigquery/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def from_query_job(cls, job):
instance = cls(job.query, job._client, job.udf_resources)
instance._job = job
job_ref = instance._properties.setdefault('jobReference', {})
job_ref['jobId'] = job.name
job_ref['jobId'] = job.job_id
if job.default_dataset is not None:
instance.default_dataset = job.default_dataset
if job.use_query_cache is not None:
Expand Down
6 changes: 3 additions & 3 deletions bigquery/tests/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def test_create_table(self):
table.create()
self.to_delete.insert(0, table)
self.assertTrue(table.exists())
self.assertEqual(table.name, TABLE_NAME)
self.assertEqual(table.table_id, TABLE_NAME)

def test_list_tables(self):
DATASET_ID = _make_dataset_id('list_tables')
Expand Down Expand Up @@ -240,7 +240,7 @@ def test_list_tables(self):
all_tables = list(iterator)
self.assertIsNone(iterator.next_page_token)
created = [table for table in all_tables
if (table.name in tables_to_create and
if (table.table_id in tables_to_create and
table.dataset_id == DATASET_ID)]
self.assertEqual(len(created), len(tables_to_create))

Expand Down Expand Up @@ -1167,7 +1167,7 @@ def test_create_table_insert_fetch_nested_schema(self):
table.create()
self.to_delete.insert(0, table)
self.assertTrue(table.exists())
self.assertEqual(table.name, table_name)
self.assertEqual(table.table_id, table_name)

to_insert = []
# Data is in "JSON Lines" format, see http://jsonlines.org/
Expand Down
18 changes: 9 additions & 9 deletions bigquery/tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ def test_get_job_hit(self):
job = client.get_job(JOB_ID)

self.assertIsInstance(job, QueryJob)
self.assertEqual(job.name, JOB_ID)
self.assertEqual(job.job_id, JOB_ID)
self.assertEqual(job.create_disposition, 'CREATE_IF_NEEDED')
self.assertEqual(job.write_disposition, 'WRITE_TRUNCATE')

Expand Down Expand Up @@ -466,7 +466,7 @@ def test_list_jobs_defaults(self):
for found, expected in zip(jobs, DATA['jobs']):
name = expected['jobReference']['jobId']
self.assertIsInstance(found, JOB_TYPES[name])
self.assertEqual(found.name, name)
self.assertEqual(found.job_id, name)
self.assertEqual(token, TOKEN)

self.assertEqual(len(conn._requested), 1)
Expand Down Expand Up @@ -523,7 +523,7 @@ def test_list_jobs_load_job_wo_sourceUris(self):
for found, expected in zip(jobs, DATA['jobs']):
name = expected['jobReference']['jobId']
self.assertIsInstance(found, JOB_TYPES[name])
self.assertEqual(found.name, name)
self.assertEqual(found.job_id, name)
self.assertEqual(token, TOKEN)

self.assertEqual(len(conn._requested), 1)
Expand Down Expand Up @@ -579,7 +579,7 @@ def test_load_table_from_storage(self):
job = client.load_table_from_storage(JOB, destination, SOURCE_URI)
self.assertIsInstance(job, LoadJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(list(job.source_uris), [SOURCE_URI])
self.assertIs(job.destination, destination)

Expand All @@ -600,7 +600,7 @@ def test_copy_table(self):
job = client.copy_table(JOB, destination, source)
self.assertIsInstance(job, CopyJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(list(job.sources), [source])
self.assertIs(job.destination, destination)

Expand All @@ -620,7 +620,7 @@ def test_extract_table_to_storage(self):
job = client.extract_table_to_storage(JOB, source, DESTINATION)
self.assertIsInstance(job, ExtractJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(job.source, source)
self.assertEqual(list(job.destination_uris), [DESTINATION])

Expand All @@ -636,7 +636,7 @@ def test_run_async_query_defaults(self):
job = client.run_async_query(JOB, QUERY)
self.assertIsInstance(job, QueryJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(job.query, QUERY)
self.assertEqual(job.udf_resources, [])
self.assertEqual(job.query_parameters, [])
Expand All @@ -656,7 +656,7 @@ def test_run_async_w_udf_resources(self):
job = client.run_async_query(JOB, QUERY, udf_resources=udf_resources)
self.assertIsInstance(job, QueryJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(job.query, QUERY)
self.assertEqual(job.udf_resources, udf_resources)
self.assertEqual(job.query_parameters, [])
Expand All @@ -676,7 +676,7 @@ def test_run_async_w_query_parameters(self):
query_parameters=query_parameters)
self.assertIsInstance(job, QueryJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.job_id, JOB)
self.assertEqual(job.query, QUERY)
self.assertEqual(job.udf_resources, [])
self.assertEqual(job.query_parameters, query_parameters)
Expand Down
2 changes: 1 addition & 1 deletion bigquery/tests/unit/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def test_job_w_jobid(self):
self.assertIsInstance(job, QueryJob)
self.assertEqual(job.query, self.QUERY)
self.assertIs(job._client, client)
self.assertEqual(job.name, SERVER_GENERATED)
self.assertEqual(job.job_id, SERVER_GENERATED)
fetched_later = query.job
self.assertIs(fetched_later, job)

Expand Down