From 2ed5ee5a17af08f39c0395752a72633a0fbc681d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 9 Jul 2024 15:54:18 -0500 Subject: [PATCH] chore: remove references to conda (#795) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See also, internal change 650726157. Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://togithub.com/googleapis/python-bigquery-pandas/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) 🦕 --- .kokoro/build.sh | 16 +----- .kokoro/presubmit/conda_test.cfg | 7 --- CHANGELOG.md | 3 - README.rst | 14 +---- docs/contributing.rst | 22 ------- docs/install.rst | 10 +--- docs/reading.rst | 5 -- noxfile.py | 99 +------------------------------- owlbot.py | 6 +- release-procedure.md | 9 +-- 10 files changed, 10 insertions(+), 181 deletions(-) delete mode 100644 .kokoro/presubmit/conda_test.cfg diff --git a/.kokoro/build.sh b/.kokoro/build.sh index e490fe53..08171cbd 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,18 +23,9 @@ cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -export CONDA_EXE=/root/conda/bin/conda -export CONDA_PREFIX=/root/conda -export CONDA_PROMPT_MODIFIER=(base) -export _CE_CONDA= -export CONDA_SHLVL=1 -export CONDA_PYTHON_EXE=/root/conda/bin/python -export CONDA_DEFAULT_ENV=base -export PATH=/root/conda/bin:/root/conda/condabin:${PATH} - # Debug: show build environment -env +env | grep KOKORO # Setup service account credentials. export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json @@ -42,9 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Install nox -python3 -m pip install --upgrade --quiet nox - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/.kokoro/presubmit/conda_test.cfg b/.kokoro/presubmit/conda_test.cfg deleted file mode 100644 index 6e3943f3..00000000 --- a/.kokoro/presubmit/conda_test.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "conda_test" -} diff --git a/CHANGELOG.md b/CHANGELOG.md index 418201bf..ced3da53 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,6 @@ ### Bug Fixes * Handle None when converting numerics to parquet ([#768](https://github.com/googleapis/python-bigquery-pandas/issues/768)) ([53a4683](https://github.com/googleapis/python-bigquery-pandas/commit/53a46833a320963d5c15427f6eb631e0199fb332)) -* Set minimum allowable version of sqlite when performing a conda install ([#780](https://github.com/googleapis/python-bigquery-pandas/issues/780)) ([8a03d44](https://github.com/googleapis/python-bigquery-pandas/commit/8a03d44fbe125ae1202f43b7c6e54c98eca94d4d)) ### Documentation @@ -369,8 +368,6 @@ df = gbq.read_gbq( ([#281](https://github.com/googleapis/python-bigquery-pandas/issues/281)) - Fix `pytest.raises` usage for latest pytest. Fix warnings in tests. ([#282](https://github.com/googleapis/python-bigquery-pandas/issues/282)) -- Update CI to install nightly packages in the conda tests. - ([#254](https://github.com/googleapis/python-bigquery-pandas/issues/254)) ## 0.10.0 / 2019-04-05 diff --git a/README.rst b/README.rst index 1b35f26a..122f43ee 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ pandas-gbq ========== -|preview| |pypi| |versions| +|preview| |pypi| |versions| **pandas-gbq** is a package providing an interface to the Google BigQuery API from pandas. @@ -20,14 +20,6 @@ pandas-gbq Installation ------------ - -Install latest release version via conda -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. code-block:: shell - - $ conda install pandas-gbq --channel conda-forge - Install latest release version via pip ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -52,7 +44,7 @@ Perform a query .. code:: python import pandas_gbq - + result_dataframe = pandas_gbq.read_gbq("SELECT column FROM dataset.table WHERE value = 'something'") Upload a dataframe @@ -61,7 +53,7 @@ Upload a dataframe .. code:: python import pandas_gbq - + pandas_gbq.to_gbq(dataframe, "dataset.table") More samples diff --git a/docs/contributing.rst b/docs/contributing.rst index 891a1799..39c2694a 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -145,31 +145,9 @@ Install in Development Mode It's helpful to install pandas-gbq in development mode so that you can use the library without reinstalling the package after every change. -Conda -~~~~~ - -Create a new conda environment and install the necessary dependencies - -.. code-block:: shell - - $ conda create -n my-env --channel conda-forge \ - db-dtypes \ - pandas \ - pydata-google-auth \ - google-cloud-bigquery - $ source activate my-env - -Install pandas-gbq in development mode - -.. code-block:: shell - - $ python setup.py develop - Pip & virtualenv ~~~~~~~~~~~~~~~~ -*Skip this section if you already followed the conda instructions.* - Create a new `virtual environment `__. diff --git a/docs/install.rst b/docs/install.rst index 9887c799..849b1b60 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -1,16 +1,8 @@ Installation ============ -You can install pandas-gbq with ``conda``, ``pip``, or by installing from source. +You can install pandas-gbq with ``pip`` or by installing from source. -Conda ------ - -.. code-block:: shell - - $ conda install pandas-gbq --channel conda-forge - -This installs pandas-gbq and all common dependencies, including ``pandas``. Pip --- diff --git a/docs/reading.rst b/docs/reading.rst index bc7b74e1..5fa369a7 100644 --- a/docs/reading.rst +++ b/docs/reading.rst @@ -103,11 +103,6 @@ quickly (but at an `increased cost pip install --upgrade google-cloud-bigquery-storage pyarrow - With conda: - - .. code-block:: sh - - conda install -c conda-forge google-cloud-bigquery-storage #. Set ``use_bqstorage_api`` to ``True`` when calling the :func:`~pandas_gbq.read_gbq` function. As of the ``google-cloud-bigquery`` package, version 1.11.1 or later,the function will fallback to the diff --git a/noxfile.py b/noxfile.py index a8c12aa1..774325ae 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,7 +23,6 @@ import pathlib import re import shutil -import subprocess import time import warnings @@ -57,11 +56,6 @@ "3.9": [], } -CONDA_TEST_PYTHON_VERSIONS = [ - UNIT_TEST_PYTHON_VERSIONS[0], - UNIT_TEST_PYTHON_VERSIONS[-1], -] - SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11", "3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", @@ -316,6 +310,7 @@ def system(session): @_calculate_duration def prerelease(session): session.install( + # https://arrow.apache.org/docs/developers/python.html#installing-nightly-packages "--extra-index-url", "https://pypi.fury.io/arrow-nightlies/", "--prefer-binary", @@ -323,14 +318,6 @@ def prerelease(session): "--upgrade", "pyarrow", ) - session.install( - "--extra-index-url", - "https://pypi.anaconda.org/scipy-wheels-nightly/simple", - "--prefer-binary", - "--pre", - "--upgrade", - "pandas", - ) session.install( "--prefer-binary", "--pre", @@ -342,6 +329,7 @@ def prerelease(session): "google-resumable-media", # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 "grpcio!=1.49.0rc1", + "pandas", ) session.install( "freezegun", @@ -504,86 +492,3 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) - - -def install_conda_unittest_dependencies(session, standard_deps, conda_forge_packages): - """Installs packages from conda forge, pypi, and locally.""" - - # Install from conda-forge and default conda package repos. - session.conda_install(*conda_forge_packages, channel=["defaults", "conda-forge"]) - - # Install from pypi for packages not readily available on conda forge. - session.install( - *standard_deps, - ) - - # Install via pip from the local repo, avoid doing dependency resolution - # via pip, so that we don't override any conda resolved dependencies - session.install("-e", ".", "--no-deps") - - -@nox.session(python=CONDA_TEST_PYTHON_VERSIONS, venv_backend="mamba") -@_calculate_duration -def conda_test(session): - """Run test suite in a conda virtual environment. - - Installs all test dependencies, then installs this package. - NOTE: Some of these libraries are not readily available on conda-forge - at this time and are thus installed using pip after the base install of - libraries from conda-forge. - - We decided that it was more important to prove a base ability to install - using conda than to complicate things with adding a whole nother - set of constraints just for a conda install, so this install does not - attempt to constrain packages (i.e. in a constraints-x.x.txt file) - manually. - """ - - standard_deps = ( - UNIT_TEST_STANDARD_DEPENDENCIES - + UNIT_TEST_DEPENDENCIES - + UNIT_TEST_EXTERNAL_DEPENDENCIES - ) - - conda_forge_packages = [ - "db-dtypes", - "google-api-core", - "google-auth", - "google-auth-oauthlib", - "google-cloud-bigquery", - "google-cloud-bigquery-storage", - "numpy", - "pandas", - "pyarrow", - "pydata-google-auth", - "tqdm", - "protobuf", - "sqlite>3.31.1", # v3.31.1 caused test failures - ] - - install_conda_unittest_dependencies(session, standard_deps, conda_forge_packages) - - # Provide a list of all installed packages (both from conda forge and pip) - # for troubleshooting purposes. - session.run("mamba", "list") - - # Using subprocess.run() instead of session.run() because - # session.run() does not correctly handle the pip check command. - subprocess.run( - ["pip", "check"], check=True - ) # Raise an exception if pip check fails - - # Tests are limited to unit tests only, at this time. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=pandas_gbq", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) diff --git a/owlbot.py b/owlbot.py index 3bb53fc2..aeda7356 100644 --- a/owlbot.py +++ b/owlbot.py @@ -56,13 +56,9 @@ "docs/multiprocessing.rst", "noxfile.py", "README.rst", - - # exclude .kokoro/build.sh which is customized due to support for conda - ".kokoro/build.sh", - # exclude this file as we have an alternate prerelease.cfg ".kokoro/presubmit/prerelease-deps.cfg", - ] + ], ) # ---------------------------------------------------------------------------- diff --git a/release-procedure.md b/release-procedure.md index 3b33021d..47c8fd74 100644 --- a/release-procedure.md +++ b/release-procedure.md @@ -33,13 +33,6 @@ twine upload dist/* -* Create the [release on GitHub](https://github.com/pydata/pandas-gbq/releases/new) using the tag created earlier. +* Create the [release on GitHub](https://github.com/googleapis/python-bigquery-pandas/releases/new) using the tag created earlier. * Upload wheel and source zip from `dist/` directory. - -* Do a pull-request to the feedstock on `pandas-gbq-feedstock `__ - (Or review PR from @regro-cf-autotick-bot which updates the feedstock). - - * update the version - * update the SHA256 (retrieve from PyPI) - * update the dependencies (if they changed)