Skip to content

Commit

Permalink
Rename job classes (#3797)
Browse files Browse the repository at this point in the history
* Rename class: 'jobs.LoadTableFromStorageJob' -> 'jobs.LoadJob'.

* Rename class: 'jobs.ExtractTableToStorageJob' -> 'jobs.ExtractJob'.
  • Loading branch information
tseaver authored and tswast committed Aug 18, 2017
1 parent cd3a05d commit b06ab3f
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 46 deletions.
26 changes: 12 additions & 14 deletions bigquery/google/cloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
from google.cloud.bigquery._http import Connection
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.job import CopyJob
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import ExtractJob
from google.cloud.bigquery.job import LoadJob
from google.cloud.bigquery.job import QueryJob
from google.cloud.bigquery.query import QueryResults

Expand Down Expand Up @@ -169,20 +169,20 @@ def job_from_resource(self, resource):
:param resource: one job resource from API response
:rtype: One of:
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
:class:`google.cloud.bigquery.job.LoadJob`,
:class:`google.cloud.bigquery.job.CopyJob`,
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
:class:`google.cloud.bigquery.job.ExtractJob`,
:class:`google.cloud.bigquery.job.QueryJob`,
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource['configuration']
if 'load' in config:
return LoadTableFromStorageJob.from_api_repr(resource, self)
return LoadJob.from_api_repr(resource, self)
elif 'copy' in config:
return CopyJob.from_api_repr(resource, self)
elif 'extract' in config:
return ExtractTableToStorageJob.from_api_repr(resource, self)
return ExtractJob.from_api_repr(resource, self)
elif 'query' in config:
return QueryJob.from_api_repr(resource, self)
raise ValueError('Cannot parse job resource')
Expand Down Expand Up @@ -253,11 +253,10 @@ def load_table_from_storage(self, job_name, destination, *source_uris):
:param source_uris: URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
:returns: a new ``LoadTableFromStorageJob`` instance
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
:returns: a new ``LoadJob`` instance
"""
return LoadTableFromStorageJob(job_name, destination, source_uris,
client=self)
return LoadJob(job_name, destination, source_uris, client=self)

def copy_table(self, job_name, destination, *sources):
"""Construct a job for copying one or more tables into another table.
Expand Down Expand Up @@ -296,11 +295,10 @@ def extract_table_to_storage(self, job_name, source, *destination_uris):
table data is to be extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
:returns: a new ``ExtractTableToStorageJob`` instance
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
:returns: a new ``ExtractJob`` instance
"""
return ExtractTableToStorageJob(job_name, source, destination_uris,
client=self)
return ExtractJob(job_name, source, destination_uris, client=self)

def run_async_query(self, job_name, query,
udf_resources=(), query_parameters=()):
Expand Down
20 changes: 11 additions & 9 deletions bigquery/google/cloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,8 +525,8 @@ class _LoadConfiguration(object):
_write_disposition = None


class LoadTableFromStorageJob(_AsyncJob):
"""Asynchronous job for loading data into a table from CloudStorage.
class LoadJob(_AsyncJob):
"""Asynchronous job for loading data into a table from remote URI.
:type name: str
:param name: the name of the job
Expand All @@ -535,8 +535,10 @@ class LoadTableFromStorageJob(_AsyncJob):
:param destination: Table into which data is to be loaded.
:type source_uris: sequence of string
:param source_uris: URIs of one or more data files to be loaded, in
format ``gs://<bucket_name>/<object_name_or_glob>``.
:param source_uris:
URIs of one or more data files to be loaded. See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris
for supported URI formats.
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
Expand All @@ -550,7 +552,7 @@ class LoadTableFromStorageJob(_AsyncJob):
_JOB_TYPE = 'load'

def __init__(self, name, destination, source_uris, client, schema=()):
super(LoadTableFromStorageJob, self).__init__(name, client)
super(LoadJob, self).__init__(name, client)
self.destination = destination
self.source_uris = source_uris
self._configuration = _LoadConfiguration()
Expand Down Expand Up @@ -775,7 +777,7 @@ def from_api_repr(cls, resource, client):
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
Expand Down Expand Up @@ -919,7 +921,7 @@ class _ExtractConfiguration(object):
_print_header = None


class ExtractTableToStorageJob(_AsyncJob):
class ExtractJob(_AsyncJob):
"""Asynchronous job: extract data from a table into Cloud Storage.
:type name: str
Expand All @@ -940,7 +942,7 @@ class ExtractTableToStorageJob(_AsyncJob):
_JOB_TYPE = 'extract'

def __init__(self, name, source, destination_uris, client):
super(ExtractTableToStorageJob, self).__init__(name, client)
super(ExtractJob, self).__init__(name, client)
self.source = source
self.destination_uris = destination_uris
self._configuration = _ExtractConfiguration()
Expand Down Expand Up @@ -1018,7 +1020,7 @@ def from_api_repr(cls, resource, client):
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
:returns: Job parsed from ``resource``.
"""
name, config = cls._get_resource_config(resource)
Expand Down
2 changes: 1 addition & 1 deletion bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1120,7 +1120,7 @@ def upload_from_file(self,
:type null_marker: str
:param null_marker: Optional. A custom null marker (example: "\\N")
:rtype: :class:`~google.cloud.bigquery.jobs.LoadTableFromStorageJob`
:rtype: :class:`~google.cloud.bigquery.jobs.LoadJob`
:returns: the job instance used to load the data (e.g., for
querying status). Note that the job is already started:
Expand Down
20 changes: 10 additions & 10 deletions bigquery/tests/unit/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,9 @@ def test_job_from_resource_unknown_type(self):

def test_list_jobs_defaults(self):
import six
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import LoadJob
from google.cloud.bigquery.job import CopyJob
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import ExtractJob
from google.cloud.bigquery.job import QueryJob

PROJECT = 'PROJECT'
Expand All @@ -223,9 +223,9 @@ def test_list_jobs_defaults(self):
SOURCE_URI = 'gs://test_bucket/src_object*'
DESTINATION_URI = 'gs://test_bucket/dst_object*'
JOB_TYPES = {
'load_job': LoadTableFromStorageJob,
'load_job': LoadJob,
'copy_job': CopyJob,
'extract_job': ExtractTableToStorageJob,
'extract_job': ExtractJob,
'query_job': QueryJob,
}
PATH = 'projects/%s/jobs' % PROJECT
Expand Down Expand Up @@ -342,13 +342,13 @@ def test_list_jobs_defaults(self):

def test_list_jobs_load_job_wo_sourceUris(self):
import six
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import LoadJob

PROJECT = 'PROJECT'
DATASET = 'test_dataset'
SOURCE_TABLE = 'source_table'
JOB_TYPES = {
'load_job': LoadTableFromStorageJob,
'load_job': LoadJob,
}
PATH = 'projects/%s/jobs' % PROJECT
TOKEN = 'TOKEN'
Expand Down Expand Up @@ -429,7 +429,7 @@ def test_list_jobs_explicit_missing(self):
'stateFilter': 'done'})

def test_load_table_from_storage(self):
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import LoadJob

PROJECT = 'PROJECT'
JOB = 'job_name'
Expand All @@ -442,7 +442,7 @@ def test_load_table_from_storage(self):
dataset = client.dataset(DATASET)
destination = dataset.table(DESTINATION)
job = client.load_table_from_storage(JOB, destination, SOURCE_URI)
self.assertIsInstance(job, LoadTableFromStorageJob)
self.assertIsInstance(job, LoadJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(list(job.source_uris), [SOURCE_URI])
Expand Down Expand Up @@ -470,7 +470,7 @@ def test_copy_table(self):
self.assertIs(job.destination, destination)

def test_extract_table_to_storage(self):
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import ExtractJob

PROJECT = 'PROJECT'
JOB = 'job_name'
Expand All @@ -483,7 +483,7 @@ def test_extract_table_to_storage(self):
dataset = client.dataset(DATASET)
source = dataset.table(SOURCE)
job = client.extract_table_to_storage(JOB, source, DESTINATION)
self.assertIsInstance(job, ExtractTableToStorageJob)
self.assertIsInstance(job, ExtractJob)
self.assertIs(job._client, client)
self.assertEqual(job.name, JOB)
self.assertEqual(job.source, source)
Expand Down
24 changes: 12 additions & 12 deletions bigquery/tests/unit/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,24 +143,24 @@ def _verifyReadonlyResourceProperties(self, job, resource):
self.assertIsNone(job.user_email)


class TestLoadTableFromStorageJob(unittest.TestCase, _Base):
class TestLoadJob(unittest.TestCase, _Base):
JOB_TYPE = 'load'

@staticmethod
def _get_target_class():
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import LoadJob

return LoadTableFromStorageJob
return LoadJob

def _setUpConstants(self):
super(TestLoadTableFromStorageJob, self)._setUpConstants()
super(TestLoadJob, self)._setUpConstants()
self.INPUT_FILES = 2
self.INPUT_BYTES = 12345
self.OUTPUT_BYTES = 23456
self.OUTPUT_ROWS = 345

def _makeResource(self, started=False, ended=False):
resource = super(TestLoadTableFromStorageJob, self)._makeResource(
resource = super(TestLoadJob, self)._makeResource(
started, ended)
config = resource['configuration']['load']
config['sourceUris'] = [self.SOURCE1]
Expand Down Expand Up @@ -1110,19 +1110,19 @@ def test_reload_w_alternate_client(self):
self._verifyResourceProperties(job, RESOURCE)


class TestExtractTableToStorageJob(unittest.TestCase, _Base):
class TestExtractJob(unittest.TestCase, _Base):
JOB_TYPE = 'extract'
SOURCE_TABLE = 'source_table'
DESTINATION_URI = 'gs://bucket_name/object_name'

@staticmethod
def _get_target_class():
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import ExtractJob

return ExtractTableToStorageJob
return ExtractJob

def _makeResource(self, started=False, ended=False):
resource = super(TestExtractTableToStorageJob, self)._makeResource(
resource = super(TestExtractJob, self)._makeResource(
started, ended)
config = resource['configuration']['extract']
config['sourceTable'] = {
Expand Down Expand Up @@ -2098,15 +2098,15 @@ def __init__(self, name=None):
def name(self):
if self._name is not None:
return self._name
return TestLoadTableFromStorageJob.TABLE_NAME
return TestLoadJob.TABLE_NAME

@property
def project(self):
return TestLoadTableFromStorageJob.PROJECT
return TestLoadJob.PROJECT

@property
def dataset_name(self):
return TestLoadTableFromStorageJob.DS_NAME
return TestLoadJob.DS_NAME


class _Connection(object):
Expand Down

0 comments on commit b06ab3f

Please sign in to comment.