diff --git a/tests/system/test_client.py b/tests/system/test_client.py index 1437328a8..f4757e30f 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -1706,8 +1706,8 @@ def test_dbapi_fetch_w_bqstorage_client_large_result_set(self): cursor.execute( """ - SELECT id, `by`, time_ts - FROM `bigquery-public-data.hacker_news.comments` + SELECT id, `by`, timestamp + FROM `bigquery-public-data.hacker_news.full` ORDER BY `id` ASC LIMIT 100000 """ @@ -1717,27 +1717,28 @@ def test_dbapi_fetch_w_bqstorage_client_large_result_set(self): field_name = operator.itemgetter(0) fetched_data = [sorted(row.items(), key=field_name) for row in result_rows] - # Since DB API is not thread safe, only a single result stream should be # requested by the BQ storage client, meaning that results should arrive # in the sorted order. + expected_data = [ [ - ("by", "sama"), - ("id", 15), - ("time_ts", datetime.datetime(2006, 10, 9, 19, 51, 1, tzinfo=UTC)), + ("by", "pg"), + ("id", 1), + ("timestamp", datetime.datetime(2006, 10, 9, 18, 21, 51, tzinfo=UTC)), ], [ - ("by", "pg"), - ("id", 17), - ("time_ts", datetime.datetime(2006, 10, 9, 19, 52, 45, tzinfo=UTC)), + ("by", "phyllis"), + ("id", 2), + ("timestamp", datetime.datetime(2006, 10, 9, 18, 30, 28, tzinfo=UTC)), ], [ - ("by", "pg"), - ("id", 22), - ("time_ts", datetime.datetime(2006, 10, 10, 2, 18, 22, tzinfo=UTC)), + ("by", "phyllis"), + ("id", 3), + ("timestamp", datetime.datetime(2006, 10, 9, 18, 40, 33, tzinfo=UTC)), ], ] + self.assertEqual(fetched_data, expected_data) def test_dbapi_dry_run_query(self): @@ -1769,8 +1770,8 @@ def test_dbapi_connection_does_not_leak_sockets(self): cursor.execute( """ - SELECT id, `by`, time_ts - FROM `bigquery-public-data.hacker_news.comments` + SELECT id, `by`, timestamp + FROM `bigquery-public-data.hacker_news.full` ORDER BY `id` ASC LIMIT 100000 """ diff --git a/tests/system/test_pandas.py b/tests/system/test_pandas.py index ea8cc6d63..726b68f7c 100644 --- a/tests/system/test_pandas.py +++ b/tests/system/test_pandas.py @@ -740,8 +740,8 @@ def test_load_table_from_dataframe_w_explicit_schema_source_format_csv_floats( def test_query_results_to_dataframe(bigquery_client): QUERY = """ - SELECT id, author, time_ts, dead - FROM `bigquery-public-data.hacker_news.comments` + SELECT id, `by`, timestamp, dead + FROM `bigquery-public-data.hacker_news.full` LIMIT 10 """ @@ -749,12 +749,12 @@ def test_query_results_to_dataframe(bigquery_client): assert isinstance(df, pandas.DataFrame) assert len(df) == 10 # verify the number of rows - column_names = ["id", "author", "time_ts", "dead"] + column_names = ["id", "by", "timestamp", "dead"] assert list(df) == column_names # verify the column names exp_datatypes = { "id": int, - "author": str, - "time_ts": pandas.Timestamp, + "by": str, + "timestamp": pandas.Timestamp, "dead": bool, } for _, row in df.iterrows(): @@ -766,8 +766,8 @@ def test_query_results_to_dataframe(bigquery_client): def test_query_results_to_dataframe_w_bqstorage(bigquery_client): query = """ - SELECT id, author, time_ts, dead - FROM `bigquery-public-data.hacker_news.comments` + SELECT id, `by`, timestamp, dead + FROM `bigquery-public-data.hacker_news.full` LIMIT 10 """ @@ -779,12 +779,12 @@ def test_query_results_to_dataframe_w_bqstorage(bigquery_client): assert isinstance(df, pandas.DataFrame) assert len(df) == 10 # verify the number of rows - column_names = ["id", "author", "time_ts", "dead"] + column_names = ["id", "by", "timestamp", "dead"] assert list(df) == column_names exp_datatypes = { "id": int, - "author": str, - "time_ts": pandas.Timestamp, + "by": str, + "timestamp": pandas.Timestamp, "dead": bool, } for index, row in df.iterrows():