diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 5d3a70416c22..d851d91f5e46 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -42,7 +42,7 @@ from google.cloud.bigquery.job import ExtractJob from google.cloud.bigquery.job import LoadJob from google.cloud.bigquery.job import QueryJob, QueryJobConfig -from google.cloud.bigquery.query import QueryResults +from google.cloud.bigquery.query import _QueryResults from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TableListItem from google.cloud.bigquery.table import TableReference @@ -488,8 +488,8 @@ def _get_query_results(self, job_id, retry, project=None, timeout_ms=None): (Optional) number of milliseconds the the API call should wait for the query to complete before the request times out. - :rtype: :class:`google.cloud.bigquery.query.QueryResults` - :returns: a new ``QueryResults`` instance + :rtype: :class:`google.cloud.bigquery.query._QueryResults` + :returns: a new ``_QueryResults`` instance """ extra_params = {'maxResults': 0} @@ -507,7 +507,7 @@ def _get_query_results(self, job_id, retry, project=None, timeout_ms=None): # QueryJob.result()). So we don't need to poll here. resource = self._call_api( retry, method='GET', path=path, query_params=extra_params) - return QueryResults.from_api_repr(resource) + return _QueryResults.from_api_repr(resource) def job_from_resource(self, resource): """Detect correct job type from resource and instantiate. diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index f4dc32e7e101..5df7117d5e8b 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -86,7 +86,7 @@ def _set_rowcount(self, query_results): of modified rows. :type query_results: - :class:`~google.cloud.bigquery.query.QueryResults` + :class:`~google.cloud.bigquery.query._QueryResults` :param query_results: results of a query """ total_rows = 0 @@ -156,7 +156,7 @@ def execute(self, operation, parameters=None, job_id=None): except google.cloud.exceptions.GoogleCloudError: raise exceptions.DatabaseError(self._query_job.errors) - query_results = self._query_job.query_results() + query_results = self._query_job._query_results self._set_rowcount(query_results) self._set_description(query_results.schema) @@ -193,7 +193,7 @@ def _try_fetch(self, size=None): # TODO(tswast): pass in page size to list_rows based on arraysize rows_iter = client.list_rows( self._query_job.destination, - selected_fields=self._query_job.query_results().schema) + selected_fields=self._query_job._query_results.schema) self._query_data = iter(rows_iter) def fetchone(self): diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 583538299766..9090f2b3c189 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -1867,20 +1867,6 @@ def undeclared_query_parameters(self): return parameters - def query_results(self, retry=DEFAULT_RETRY): - """Construct a QueryResults instance, bound to this job. - - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - :rtype: :class:`~google.cloud.bigquery.QueryResults` - :returns: results instance - """ - if not self._query_results: - self._query_results = self._client._get_query_results( - self.job_id, retry, project=self.project) - return self._query_results - def done(self, retry=DEFAULT_RETRY): """Refresh the job and checks if it is complete. @@ -1945,7 +1931,10 @@ def result(self, timeout=None, retry=DEFAULT_RETRY): """ super(QueryJob, self).result(timeout=timeout) # Return an iterator instead of returning the job. - schema = self.query_results().schema + if not self._query_results: + self._query_results = self._client._get_query_results( + self.job_id, retry, project=self.project) + schema = self._query_results.schema dest_table = self.destination return self._client.list_rows(dest_table, selected_fields=schema, retry=retry) diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 1d3009394c96..e3bd5c196bec 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -461,7 +461,7 @@ def __repr__(self): return 'StructQueryParameter{}'.format(self._key()) -class QueryResults(object): +class _QueryResults(object): """Results of a query. See: diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py index 542b053fae1a..d0ad5401cf85 100644 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -51,21 +51,21 @@ def _mock_job( mock_job.error_result = None mock_job.state = 'DONE' mock_job.result.return_value = mock_job + mock_job._query_results = self._mock_results( + total_rows=total_rows, schema=schema, + num_dml_affected_rows=num_dml_affected_rows) if num_dml_affected_rows is None: mock_job.statement_type = None # API sends back None for SELECT else: mock_job.statement_type = 'UPDATE' - mock_job.query_results.return_value = self._mock_results( - total_rows=total_rows, schema=schema, - num_dml_affected_rows=num_dml_affected_rows) return mock_job def _mock_results( self, total_rows=0, schema=None, num_dml_affected_rows=None): from google.cloud.bigquery import query - mock_results = mock.create_autospec(query.QueryResults) + mock_results = mock.create_autospec(query._QueryResults) mock_results.schema = schema mock_results.num_dml_affected_rows = num_dml_affected_rows mock_results.total_rows = total_rows diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index e2065429ff57..40903a4bf059 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -2097,40 +2097,6 @@ def test_undeclared_query_parameters(self): self.assertEqual(struct.struct_types, {'count': 'INT64'}) self.assertEqual(struct.struct_values, {'count': 123}) - def test_query_results(self): - from google.cloud.bigquery.query import QueryResults - - query_resource = { - 'jobComplete': True, - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - } - connection = _Connection(query_resource) - client = _make_client(self.PROJECT, connection=connection) - job = self._make_one(self.JOB_ID, self.QUERY, client) - results = job.query_results() - self.assertIsInstance(results, QueryResults) - - def test_query_results_w_cached_value(self): - from google.cloud.bigquery.query import QueryResults - - client = _make_client(project=self.PROJECT) - job = self._make_one(self.JOB_ID, self.QUERY, client) - resource = { - 'jobReference': { - 'projectId': self.PROJECT, - 'jobId': self.JOB_ID, - }, - } - query_results = QueryResults(resource) - job._query_results = query_results - - results = job.query_results() - - self.assertIs(results, query_results) - def test_result(self): query_resource = { 'jobComplete': True, diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index 35def936946b..bce6d2cd726a 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -973,16 +973,16 @@ def test___repr__(self): self.assertIn("'field1': 'hello'", got) -class TestQueryResults(unittest.TestCase): +class Test_QueryResults(unittest.TestCase): PROJECT = 'project' JOB_ID = 'test-synchronous-query' TOKEN = 'TOKEN' @staticmethod def _get_target_class(): - from google.cloud.bigquery.query import QueryResults + from google.cloud.bigquery.query import _QueryResults - return QueryResults + return _QueryResults def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw)