diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index da6962daec1b..76a7d476cf6b 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -164,16 +164,15 @@ class WriteDisposition(_EnumProperty): class _AsyncJob(google.api.core.future.polling.PollingFuture): """Base class for asynchronous jobs. - :type name: str - :param name: the name of the job + :type job_id: str + :param job_id: the job's ID in the project associated with the client. :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + :param client: A client which holds credentials and project configuration. """ - def __init__(self, name, client): + def __init__(self, job_id, client): super(_AsyncJob, self).__init__() - self.name = name + self.job_id = job_id self._client = client self._properties = {} self._result_set = False @@ -217,9 +216,9 @@ def path(self): """URL path for the job's APIs. :rtype: str - :returns: the path based on project and job name. + :returns: the path based on project and job ID. """ - return '/projects/%s/jobs/%s' % (self.project, self.name) + return '/projects/%s/jobs/%s' % (self.project, self.job_id) @property def etag(self): @@ -367,7 +366,7 @@ def _get_resource_config(cls, resource): :rtype: dict :returns: tuple (string, dict), where the first element is the - job name and the second contains job-specific configuration. + job ID and the second contains job-specific configuration. :raises: :class:`KeyError` if the resource has no identifier, or is missing the appropriate configuration. """ @@ -375,13 +374,13 @@ def _get_resource_config(cls, resource): 'jobId' not in resource['jobReference']): raise KeyError('Resource lacks required identity information: ' '["jobReference"]["jobId"]') - name = resource['jobReference']['jobId'] + job_id = resource['jobReference']['jobId'] if ('configuration' not in resource or cls._JOB_TYPE not in resource['configuration']): raise KeyError('Resource lacks required configuration: ' '["configuration"]["%s"]' % cls._JOB_TYPE) config = resource['configuration'][cls._JOB_TYPE] - return name, config + return job_id, config def begin(self, client=None): """API call: begin the job via a POST request @@ -560,8 +559,9 @@ class _LoadConfiguration(object): class LoadJob(_AsyncJob): """Asynchronous job for loading data into a table from remote URI. - :type name: str - :param name: the name of the job + :type job_id: str + :param job_id: + The job's ID, belonging to the project associated with the client. :type destination: :class:`google.cloud.bigquery.table.Table` :param destination: Table into which data is to be loaded. @@ -766,7 +766,7 @@ def _build_resource(self): resource = { 'jobReference': { 'projectId': self.project, - 'jobId': self.name, + 'jobId': self.job_id, }, 'configuration': { self._JOB_TYPE: { @@ -834,12 +834,12 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.bigquery.job.LoadJob` :returns: Job parsed from ``resource``. """ - name, config = cls._get_resource_config(resource) + job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] dataset = Dataset(dest_config['datasetId'], client) destination = Table(dest_config['tableId'], dataset) source_urls = config.get('sourceUris', ()) - job = cls(name, destination, source_urls, client=client) + job = cls(job_id, destination, source_urls, client=client) job._set_properties(resource) return job @@ -856,8 +856,8 @@ class _CopyConfiguration(object): class CopyJob(_AsyncJob): """Asynchronous job: copy data into a table from other tables. - :type name: str - :param name: the name of the job + :type job_id: str + :param job_id: the job's ID, within the project belonging to ``client``. :type destination: :class:`google.cloud.bigquery.table.Table` :param destination: Table into which data is to be loaded. @@ -872,8 +872,8 @@ class CopyJob(_AsyncJob): _JOB_TYPE = 'copy' - def __init__(self, name, destination, sources, client): - super(CopyJob, self).__init__(name, client) + def __init__(self, job_id, destination, sources, client): + super(CopyJob, self).__init__(job_id, client) self.destination = destination self.sources = sources self._configuration = _CopyConfiguration() @@ -907,7 +907,7 @@ def _build_resource(self): resource = { 'jobReference': { 'projectId': self.project, - 'jobId': self.name, + 'jobId': self.job_id, }, 'configuration': { self._JOB_TYPE: { @@ -949,7 +949,7 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.bigquery.job.CopyJob` :returns: Job parsed from ``resource``. """ - name, config = cls._get_resource_config(resource) + job_id, config = cls._get_resource_config(resource) dest_config = config['destinationTable'] dataset = Dataset(dest_config['datasetId'], client) destination = Table(dest_config['tableId'], dataset) @@ -964,7 +964,7 @@ def from_api_repr(cls, resource, client): for source_config in source_configs: dataset = Dataset(source_config['datasetId'], client) sources.append(Table(source_config['tableId'], dataset)) - job = cls(name, destination, sources, client=client) + job = cls(job_id, destination, sources, client=client) job._set_properties(resource) return job @@ -983,8 +983,8 @@ class _ExtractConfiguration(object): class ExtractJob(_AsyncJob): """Asynchronous job: extract data from a table into Cloud Storage. - :type name: str - :param name: the name of the job + :type job_id: str + :param job_id: the job's ID, within the project belonging to ``client``. :type source: :class:`google.cloud.bigquery.table.Table` :param source: Table into which data is to be loaded. @@ -1000,8 +1000,8 @@ class ExtractJob(_AsyncJob): """ _JOB_TYPE = 'extract' - def __init__(self, name, source, destination_uris, client): - super(ExtractJob, self).__init__(name, client) + def __init__(self, job_id, source, destination_uris, client): + super(ExtractJob, self).__init__(job_id, client) self.source = source self.destination_uris = destination_uris self._configuration = _ExtractConfiguration() @@ -1065,7 +1065,7 @@ def _build_resource(self): resource = { 'jobReference': { 'projectId': self.project, - 'jobId': self.name, + 'jobId': self.job_id, }, 'configuration': { self._JOB_TYPE: { @@ -1106,12 +1106,12 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.bigquery.job.ExtractJob` :returns: Job parsed from ``resource``. """ - name, config = cls._get_resource_config(resource) + job_id, config = cls._get_resource_config(resource) source_config = config['sourceTable'] dataset = Dataset(source_config['datasetId'], client) source = Table(source_config['tableId'], dataset) destination_uris = config['destinationUris'] - job = cls(name, source, destination_uris, client=client) + job = cls(job_id, source, destination_uris, client=client) job._set_properties(resource) return job @@ -1138,8 +1138,8 @@ class _AsyncQueryConfiguration(object): class QueryJob(_AsyncJob): """Asynchronous job: query tables. - :type name: str - :param name: the name of the job + :type job_id: str + :param job_id: the job's ID, within the project belonging to ``client``. :type query: str :param query: SQL query string @@ -1163,9 +1163,9 @@ class QueryJob(_AsyncJob): _UDF_KEY = 'userDefinedFunctionResources' _QUERY_PARAMETERS_KEY = 'queryParameters' - def __init__(self, name, query, client, + def __init__(self, job_id, query, client, udf_resources=(), query_parameters=()): - super(QueryJob, self).__init__(name, client) + super(QueryJob, self).__init__(job_id, client) self.query = query self.udf_resources = udf_resources self.query_parameters = query_parameters @@ -1306,7 +1306,7 @@ def _build_resource(self): resource = { 'jobReference': { 'projectId': self.project, - 'jobId': self.name, + 'jobId': self.job_id, }, 'configuration': { self._JOB_TYPE: { @@ -1399,9 +1399,9 @@ def from_api_repr(cls, resource, client): :rtype: :class:`google.cloud.bigquery.job.RunAsyncQueryJob` :returns: Job parsed from ``resource``. """ - name, config = cls._get_resource_config(resource) + job_id, config = cls._get_resource_config(resource) query = config['query'] - job = cls(name, query, client=client) + job = cls(job_id, query, client=client) job._set_properties(resource) return job @@ -1573,7 +1573,7 @@ def query_results(self): :returns: results instance """ if not self._query_results: - self._query_results = self._client._get_query_results(self.name) + self._query_results = self._client._get_query_results(self.job_id) return self._query_results def done(self): @@ -1585,7 +1585,7 @@ def done(self): # Do not refresh is the state is already done, as the job will not # change once complete. if self.state != _DONE_STATE: - self._query_results = self._client._get_query_results(self.name) + self._query_results = self._client._get_query_results(self.job_id) # Only reload the job once we know the query is complete. # This will ensure that fields such as the destination table are diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index fa03d373674d..7abbbec76b9b 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -95,7 +95,7 @@ def from_query_job(cls, job): instance = cls(job.query, job._client, job.udf_resources) instance._job = job job_ref = instance._properties.setdefault('jobReference', {}) - job_ref['jobId'] = job.name + job_ref['jobId'] = job.job_id if job.default_dataset is not None: instance.default_dataset = job.default_dataset if job.use_query_cache is not None: diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index e530f14c8dc0..980001a992ed 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -205,7 +205,7 @@ def test_create_table(self): table.create() self.to_delete.insert(0, table) self.assertTrue(table.exists()) - self.assertEqual(table.name, TABLE_NAME) + self.assertEqual(table.table_id, TABLE_NAME) def test_list_tables(self): DATASET_ID = _make_dataset_id('list_tables') @@ -240,7 +240,7 @@ def test_list_tables(self): all_tables = list(iterator) self.assertIsNone(iterator.next_page_token) created = [table for table in all_tables - if (table.name in tables_to_create and + if (table.table_id in tables_to_create and table.dataset_id == DATASET_ID)] self.assertEqual(len(created), len(tables_to_create)) @@ -1167,7 +1167,7 @@ def test_create_table_insert_fetch_nested_schema(self): table.create() self.to_delete.insert(0, table) self.assertTrue(table.exists()) - self.assertEqual(table.name, table_name) + self.assertEqual(table.table_id, table_name) to_insert = [] # Data is in "JSON Lines" format, see http://jsonlines.org/ diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index fffffb9b2b25..cb2e476e3f99 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -333,7 +333,7 @@ def test_get_job_hit(self): job = client.get_job(JOB_ID) self.assertIsInstance(job, QueryJob) - self.assertEqual(job.name, JOB_ID) + self.assertEqual(job.job_id, JOB_ID) self.assertEqual(job.create_disposition, 'CREATE_IF_NEEDED') self.assertEqual(job.write_disposition, 'WRITE_TRUNCATE') @@ -466,7 +466,7 @@ def test_list_jobs_defaults(self): for found, expected in zip(jobs, DATA['jobs']): name = expected['jobReference']['jobId'] self.assertIsInstance(found, JOB_TYPES[name]) - self.assertEqual(found.name, name) + self.assertEqual(found.job_id, name) self.assertEqual(token, TOKEN) self.assertEqual(len(conn._requested), 1) @@ -523,7 +523,7 @@ def test_list_jobs_load_job_wo_sourceUris(self): for found, expected in zip(jobs, DATA['jobs']): name = expected['jobReference']['jobId'] self.assertIsInstance(found, JOB_TYPES[name]) - self.assertEqual(found.name, name) + self.assertEqual(found.job_id, name) self.assertEqual(token, TOKEN) self.assertEqual(len(conn._requested), 1) @@ -579,7 +579,7 @@ def test_load_table_from_storage(self): job = client.load_table_from_storage(JOB, destination, SOURCE_URI) self.assertIsInstance(job, LoadJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.source_uris), [SOURCE_URI]) self.assertIs(job.destination, destination) @@ -600,7 +600,7 @@ def test_copy_table(self): job = client.copy_table(JOB, destination, source) self.assertIsInstance(job, CopyJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.sources), [source]) self.assertIs(job.destination, destination) @@ -620,7 +620,7 @@ def test_extract_table_to_storage(self): job = client.extract_table_to_storage(JOB, source, DESTINATION) self.assertIsInstance(job, ExtractJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(job.source, source) self.assertEqual(list(job.destination_uris), [DESTINATION]) @@ -636,7 +636,7 @@ def test_run_async_query_defaults(self): job = client.run_async_query(JOB, QUERY) self.assertIsInstance(job, QueryJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(job.query, QUERY) self.assertEqual(job.udf_resources, []) self.assertEqual(job.query_parameters, []) @@ -656,7 +656,7 @@ def test_run_async_w_udf_resources(self): job = client.run_async_query(JOB, QUERY, udf_resources=udf_resources) self.assertIsInstance(job, QueryJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(job.query, QUERY) self.assertEqual(job.udf_resources, udf_resources) self.assertEqual(job.query_parameters, []) @@ -676,7 +676,7 @@ def test_run_async_w_query_parameters(self): query_parameters=query_parameters) self.assertIsInstance(job, QueryJob) self.assertIs(job._client, client) - self.assertEqual(job.name, JOB) + self.assertEqual(job.job_id, JOB) self.assertEqual(job.query, QUERY) self.assertEqual(job.udf_resources, []) self.assertEqual(job.query_parameters, query_parameters) diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index 76d5057f6450..0bf0c17c3102 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -262,7 +262,7 @@ def test_job_w_jobid(self): self.assertIsInstance(job, QueryJob) self.assertEqual(job.query, self.QUERY) self.assertIs(job._client, client) - self.assertEqual(job.name, SERVER_GENERATED) + self.assertEqual(job.job_id, SERVER_GENERATED) fetched_later = query.job self.assertIs(fetched_later, job)