Skip to content

Commit

Permalink
chore!: remove google.cloud.bigquery_v2 code (#855)
Browse files Browse the repository at this point in the history
Closes #814.

The first preview. Still need to address Model types, disable code generation, BigQuery ML classes...

**PR checklist:**
- [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
- [x] Ensure the tests and linter pass
- [x] Code coverage does not decrease (if any source code was changed)
- [x] Appropriate docs were updated (if necessary)
  • Loading branch information
plamut authored Sep 27, 2021
1 parent 2cb1c21 commit a7842b6
Show file tree
Hide file tree
Showing 39 changed files with 1,327 additions and 2,628 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Types for Google Cloud Bigquery v2 API
======================================

.. automodule:: google.cloud.bigquery_v2.types
.. automodule:: google.cloud.bigquery.standard_sql
:members:
:undoc-members:
:show-inheritance:
3 changes: 0 additions & 3 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
"bigquery_v2/services.rst", # generated by the code generator
]

# The reST default role (used for this markup: `text`) to use for all
Expand Down Expand Up @@ -364,8 +363,6 @@
"google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
"pandas": ("http://pandas.pydata.org/pandas-docs/dev", None),
"geopandas": ("https://geopandas.org/", None),
}
Expand Down
4 changes: 2 additions & 2 deletions docs/reference.rst
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,9 @@ Encryption Configuration
Additional Types
================

Protocol buffer classes for working with the Models API.
Helper SQL type classes.

.. toctree::
:maxdepth: 2

bigquery_v2/types
bigquery/standard_sql
15 changes: 13 additions & 2 deletions google/cloud/bigquery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from google.cloud.bigquery.enums import DecimalTargetType
from google.cloud.bigquery.enums import KeyResultStatementKind
from google.cloud.bigquery.enums import SqlTypeNames
from google.cloud.bigquery.enums import StandardSqlDataTypes
from google.cloud.bigquery.enums import StandardSqlTypeNames
from google.cloud.bigquery.external_config import ExternalConfig
from google.cloud.bigquery.external_config import BigtableOptions
from google.cloud.bigquery.external_config import BigtableColumnFamily
Expand Down Expand Up @@ -77,6 +77,7 @@
from google.cloud.bigquery.query import ArrayQueryParameterType
from google.cloud.bigquery.query import ScalarQueryParameter
from google.cloud.bigquery.query import ScalarQueryParameterType
from google.cloud.bigquery.query import SqlParameterScalarTypes
from google.cloud.bigquery.query import StructQueryParameter
from google.cloud.bigquery.query import StructQueryParameterType
from google.cloud.bigquery.query import UDFResource
Expand All @@ -87,6 +88,10 @@
from google.cloud.bigquery.routine import RoutineReference
from google.cloud.bigquery.routine import RoutineType
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.standard_sql import StandardSqlDataType
from google.cloud.bigquery.standard_sql import StandardSqlField
from google.cloud.bigquery.standard_sql import StandardSqlStructType
from google.cloud.bigquery.standard_sql import StandardSqlTableType
from google.cloud.bigquery.table import PartitionRange
from google.cloud.bigquery.table import RangePartitioning
from google.cloud.bigquery.table import Row
Expand All @@ -108,6 +113,7 @@
"StructQueryParameter",
"ArrayQueryParameterType",
"ScalarQueryParameterType",
"SqlParameterScalarTypes",
"StructQueryParameterType",
# Datasets
"Dataset",
Expand Down Expand Up @@ -151,6 +157,11 @@
"ScriptOptions",
"TransactionInfo",
"DEFAULT_RETRY",
# Standard SQL types
"StandardSqlDataType",
"StandardSqlField",
"StandardSqlStructType",
"StandardSqlTableType",
# Enum Constants
"enums",
"AutoRowIDs",
Expand All @@ -168,7 +179,7 @@
"SchemaUpdateOption",
"SourceFormat",
"SqlTypeNames",
"StandardSqlDataTypes",
"StandardSqlTypeNames",
"WriteDisposition",
# EncryptionConfiguration
"EncryptionConfiguration",
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/bigquery/dbapi/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import typing

from google.cloud import bigquery
from google.cloud.bigquery import table, enums, query
from google.cloud.bigquery import table, query
from google.cloud.bigquery.dbapi import exceptions


Expand All @@ -48,7 +48,7 @@ def _parameter_type(name, value, query_parameter_type=None, value_doc=""):
query_parameter_type = type_parameters_re.sub("", query_parameter_type)
try:
parameter_type = getattr(
enums.SqlParameterScalarTypes, query_parameter_type.upper()
query.SqlParameterScalarTypes, query_parameter_type.upper()
)._type
except AttributeError:
raise exceptions.ProgrammingError(
Expand Down Expand Up @@ -185,7 +185,7 @@ def _parse_type(
# Strip type parameters
type_ = type_parameters_re.sub("", type_).strip()
try:
type_ = getattr(enums.SqlParameterScalarTypes, type_.upper())
type_ = getattr(query.SqlParameterScalarTypes, type_.upper())
except AttributeError:
raise exceptions.ProgrammingError(
f"The given parameter type, {type_},"
Expand Down
99 changes: 21 additions & 78 deletions google/cloud/bigquery/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import re

import enum
import itertools

from google.cloud.bigquery_v2 import types as gapic_types
from google.cloud.bigquery.query import ScalarQueryParameterType


class AutoRowIDs(enum.Enum):
Expand Down Expand Up @@ -180,56 +174,27 @@ class KeyResultStatementKind:
FIRST_SELECT = "FIRST_SELECT"


_SQL_SCALAR_TYPES = frozenset(
(
"INT64",
"BOOL",
"FLOAT64",
"STRING",
"BYTES",
"TIMESTAMP",
"DATE",
"TIME",
"DATETIME",
"INTERVAL",
"GEOGRAPHY",
"NUMERIC",
"BIGNUMERIC",
"JSON",
)
)

_SQL_NONSCALAR_TYPES = frozenset(("TYPE_KIND_UNSPECIFIED", "ARRAY", "STRUCT"))


def _make_sql_scalars_enum():
"""Create an enum based on a gapic enum containing only SQL scalar types."""

new_enum = enum.Enum(
"StandardSqlDataTypes",
(
(member.name, member.value)
for member in gapic_types.StandardSqlDataType.TypeKind
if member.name in _SQL_SCALAR_TYPES
),
)

# make sure the docstring for the new enum is also correct
orig_doc = gapic_types.StandardSqlDataType.TypeKind.__doc__
skip_pattern = re.compile(
"|".join(_SQL_NONSCALAR_TYPES)
+ "|because a JSON object" # the second description line of STRUCT member
)

new_doc = "\n".join(
itertools.filterfalse(skip_pattern.search, orig_doc.splitlines())
)
new_enum.__doc__ = "An Enum of scalar SQL types.\n" + new_doc

return new_enum


StandardSqlDataTypes = _make_sql_scalars_enum()
class StandardSqlTypeNames(str, enum.Enum):
def _generate_next_value_(name, start, count, last_values):
return name

TYPE_KIND_UNSPECIFIED = enum.auto()
INT64 = enum.auto()
BOOL = enum.auto()
FLOAT64 = enum.auto()
STRING = enum.auto()
BYTES = enum.auto()
TIMESTAMP = enum.auto()
DATE = enum.auto()
TIME = enum.auto()
DATETIME = enum.auto()
INTERVAL = enum.auto()
GEOGRAPHY = enum.auto()
NUMERIC = enum.auto()
BIGNUMERIC = enum.auto()
JSON = enum.auto()
ARRAY = enum.auto()
STRUCT = enum.auto()


# See also: https://cloud.google.com/bigquery/data-types#legacy_sql_data_types
Expand All @@ -256,28 +221,6 @@ class SqlTypeNames(str, enum.Enum):
DATETIME = "DATETIME"


class SqlParameterScalarTypes:
"""Supported scalar SQL query parameter types as type objects."""

BOOL = ScalarQueryParameterType("BOOL")
BOOLEAN = ScalarQueryParameterType("BOOL")
BIGDECIMAL = ScalarQueryParameterType("BIGNUMERIC")
BIGNUMERIC = ScalarQueryParameterType("BIGNUMERIC")
BYTES = ScalarQueryParameterType("BYTES")
DATE = ScalarQueryParameterType("DATE")
DATETIME = ScalarQueryParameterType("DATETIME")
DECIMAL = ScalarQueryParameterType("NUMERIC")
FLOAT = ScalarQueryParameterType("FLOAT64")
FLOAT64 = ScalarQueryParameterType("FLOAT64")
GEOGRAPHY = ScalarQueryParameterType("GEOGRAPHY")
INT64 = ScalarQueryParameterType("INT64")
INTEGER = ScalarQueryParameterType("INT64")
NUMERIC = ScalarQueryParameterType("NUMERIC")
STRING = ScalarQueryParameterType("STRING")
TIME = ScalarQueryParameterType("TIME")
TIMESTAMP = ScalarQueryParameterType("TIMESTAMP")


class WriteDisposition(object):
"""Specifies the action that occurs if destination table already exists.
Expand Down
Loading

0 comments on commit a7842b6

Please sign in to comment.