diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d3b5f389c9937..5e4202a598cff 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,12 +9,12 @@ repos: args: ["--tmpl=LICENSE_SHORT", "--ext=py", "-x=**/models/__init__.py", "-f"] - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.9.1 hooks: - id: black args: ["--config", "pyproject.toml"] - repo: https://github.com/timothycrosley/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort args: @@ -26,7 +26,7 @@ repos: ] additional_dependencies: ["colorama"] - repo: https://github.com/pre-commit/mirrors-prettier - rev: v2.5.0 + rev: v3.0.3 hooks: - id: prettier types_or: [yaml, json] @@ -37,14 +37,14 @@ repos: ).?$ - repo: https://github.com/csachs/pyproject-flake8 - rev: v6.0.0 + rev: v6.0.0.post1 hooks: - id: pyproject-flake8 args: ["--config", "pyproject.toml"] additional_dependencies: ["mccabe"] alias: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.930 + rev: v1.5.1 hooks: - id: mypy args: ["--config-file", "pyproject.toml"] @@ -64,3 +64,13 @@ repos: /connectors/destination-e2e-test| /connectors/source-e2e-test ).*$ + - repo: local + hooks: + - id: base-images-build + name: Run make build on base_images change + language: system + entry: make + args: ["-C", "airbyte-ci/connectors/base_images"] + files: ^airbyte-ci/connectors/base_images/ + pass_filenames: false + fail_fast: true diff --git a/airbyte-ci/connectors/base_images/Makefile b/airbyte-ci/connectors/base_images/Makefile new file mode 100644 index 0000000000000..ae5b17dbcff4f --- /dev/null +++ b/airbyte-ci/connectors/base_images/Makefile @@ -0,0 +1,8 @@ +all: build +lock: + poetry lock +install: lock + poetry install +build: install + poetry run build + git add generated \ No newline at end of file diff --git a/airbyte-ci/connectors/base_images/README.md b/airbyte-ci/connectors/base_images/README.md new file mode 100644 index 0000000000000..853fa580fd220 --- /dev/null +++ b/airbyte-ci/connectors/base_images/README.md @@ -0,0 +1,119 @@ +# airbyte-connectors-base-images + +This python package contains the base images used by Airbyte connectors. +It is intended to be used as a python library. +Our connector build pipeline ([`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#L1)) **will** use this library to build the connector images. +Our base images are declared in code, using the [Dagger Python SDK](https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/). + +## Base images changelog +The base changelog files are automatically generated and updated by the build pipeline. +* [airbyte-python-connector-base changelog]("https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/base_images/generated/docs/base_images_changelog/airbyte-python-connector-base.md") + + +## Where are the Dockerfiles? +Our base images are not declared using Dockerfiles. +They are declared in code using the [Dagger Python SDK](https://dagger-io.readthedocs.io/en/sdk-python-v0.6.4/). +We prefer this approach because it allows us to interact with base images container as code: we can use python to declare the base images and use the full power of the language to build and test them. +However, we do artificially generate Dockerfiles for debugging and documentation purposes. +Feel free to check the `generated/dockerfiles` directory. + + +## How to get our base images +### If you're not a Dagger user: +You'll be able to get our base images from our [Docker Hub](https://hub.docker.com/u/airbyte) registry. The publish pipeline for these image is not built yet. + +### If you are a Dagger user: +Install this library as a dependency of your project and import `GLOBAL_REGISTRY` from it: +```python +import platform + +import anyio +import dagger + +# You must have this library installed in your project +from base_images import GLOBAL_REGISTRY + +CURRENT_PLATFORM = dagger.Platform(f"linux/{platform.machine()}") +BaseImageVersion = GLOBAL_REGISTRY.get_version("airbyte-python-connector-base:0.1.0") + +async def main(): + async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as dagger_client: + python_connector_base_container: dagger.Container = BaseImageVersion(dagger_client, CURRENT_PLATFORM).container + # Do something with the container + python_version_output: str = await python_connector_base_container.with_exec(["python", "--version"]).stdout() + print(python_version_output) + +anyio.run(main) +``` + + +## How to add a new base image version + +0. Please install the repo pre-commit hook: from airbyte repo root run `pre-commit install`. It will make sure that the changelog file is up to date and committed on changes. +1. `poetry install` +2. Open the latest version module: e.g `base_images/python/v1.py`. +3. Declare a new class inheriting from `AirbytePythonConnectorBaseImage` or an other existing version. **The class name must follow the semver pattern `___(AirbytePythonConnectorBaseImage)`.** +4. Implement the `container` property which must return a `dagger.Container` object. +5. Declare the `changelog` class attribute to describe the change provided by the new version. +6. *Recommended*: Override the `run_sanity_check` method to add a sanity check to your new base image version +7. To detect regressions you can set the run_previous_version_sanity_checks attribute to True .` +8. Build the project: run `make build` it will run sanity checks on the images, generate dockerfiles and update the changelog file. +9. If you face any issue, feel free to run `LOG_LEVEL=DEBUG poetry run build` to get access to the full logs. +10. Commit and push your changes. +11. Create a PR and ask for a review from the Connector Operations team. +12. Your new base image version will be available for use in the connector build pipeline once your PR is merged. + +**Example: declaring a new base image version to add a system dependency (`ffmpeg`) on top of the previous version** + +```python +# In base_images/python/v1.py + +from base_images import sanity_checks, python + +# We enforce direct inheritance from AirbytePythonConnectorBaseImage +class _1_0_1(python.AirbytePythonConnectorBaseImage): + + base_base_image: Final[PythonBase] = PythonBase.PYTHON_3_9_18 + + changelog: str = "Add ffmpeg to the base image." + + # This will run the previous version sanity checks on top of the new version. + # This is helpful to detect regressions. + run_previous_version_sanity_checks = True + + @property + def container(self) -> dagger.Container: + # We encourage declarative programming here to facilitate the maintenance of the base images. + # To prevent refactoring side effects we'd love this container property to be idempotent and not call any external code except the base_container and Dagger API. + pip_cache_volume: dagger.CacheVolume = self.dagger_client.cache_volume(AirbytePythonConnectorBaseImage.pip_cache_name) + + return ( + self.base_container.with_mounted_cache("/root/.cache/pip", pip_cache_volume) + # Set the timezone to UTC + .with_exec(["ln", "-snf", "/usr/share/zoneinfo/Etc/UTC", "/etc/localtime"]) + # Upgrade pip to the expected version + .with_exec(["pip", "install", "--upgrade", "pip==23.2.1"]) + # Install ffmpeg + .with_exec(["sh", "-c", "apt-get update && apt-get install -y ffmpeg"]) + ) + + + async def run_sanity_checks(base_image_version: AirbyteConnectorBaseImage): + try: + # Feel free to add additional re-usable sanity checks in the sanity_checks module. + await sanity_checks.check_a_command_is_available_using_version_option( + base_image_version.container, + "ffmpeg" + ) +``` + +## How to update an existing base image version +**Existing base image version must not be updated or deleted! Please reach out to the Connector Operations team if you have a good reason to do that.** + +## Running tests locally +```bash +poetry run pytest +# Static typing checks +poetry run mypy base_images --check-untyped-defs +``` + diff --git a/airbyte-ci/connectors/base_images/base_images/__init__.py b/airbyte-ci/connectors/base_images/base_images/__init__.py new file mode 100644 index 0000000000000..b614c6c53d2b0 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/__init__.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import sys + +from base_images.errors import BaseImageVersionError +from base_images.registries import GlobalRegistry +from rich.console import Console + +console = Console() + +try: + from base_images import python # , java +except BaseImageVersionError as e: + # This error occurs if a base image version class name does not follow semver. + # We handle the error for nice console output. + # It might happen if a developer implement a new version class without following our required class name convention. + console.log(f":cross_mark: {e}", style="bold red") + sys.exit(1) + + +GLOBAL_REGISTRY = GlobalRegistry( + [ + python.VERSION_REGISTRY, + ] +) # , java.VERSION_REGISTRY]) diff --git a/airbyte-ci/connectors/base_images/base_images/build.py b/airbyte-ci/connectors/base_images/base_images/build.py new file mode 100644 index 0000000000000..d2bd9f0d4282a --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/build.py @@ -0,0 +1,162 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import os +import shutil +import sys +from itertools import product +from pathlib import Path +from unittest.mock import MagicMock + +import anyio +import dagger +from base_images import GLOBAL_REGISTRY, common, console, consts, errors, hacks, registries +from rich.status import Status + +DOCKERFILE_HEADER = """ +# This file is generated by base_images/build.py. Please do not edit it manually. +# It is not used by Airbyte internal build process. +# It is meant for documentation and debugging purposes. +""" + +DOCKERFILES_DIRECTORY = Path(consts.PROJECT_DIR / "generated" / "dockerfiles") + + +def generate_dockerfile(dockerfile_directory: Path, base_image_version: common.AirbyteConnectorBaseImage): + """ + Generates the dockerfiles for all the base images. + """ + dockerfile = hacks.get_container_dockerfile(base_image_version.container) + dockerfile_directory = dockerfile_directory / base_image_version.image_name / base_image_version.platform + dockerfile_directory.mkdir(exist_ok=True, parents=True) + dockerfile_path = Path(dockerfile_directory / f"{base_image_version.version}.Dockerfile") + dockerfile = DOCKERFILE_HEADER + "\n" + dockerfile + "\n" + dockerfile_path.write_text(dockerfile) + console.log( + f":whale2: Generated Dockerfile for {base_image_version.name_with_tag} for {base_image_version.platform}: {dockerfile_path}", + highlight=False, + ) + + +async def run_sanity_checks(base_image_version: common.AirbyteConnectorBaseImage, registry: registries.VersionRegistry) -> bool: + """ + Runs sanity checks on a base images. + Sanity checks are declared in the base image version classes by implementing the run_sanity_checks function. + Sanity checks are commands executed on the base image container, we check the output of these command to make sure the base image is working as expected. + """ + try: + await base_image_version.run_sanity_checks_for_version() + console.log( + f":white_check_mark: Successfully ran sanity checks on {base_image_version.name_with_tag} for {base_image_version.platform}", + highlight=False, + ) + if base_image_version.run_previous_version_sanity_checks: + PreviousVersion = registry.get_previous_version(base_image_version) + if PreviousVersion: + await PreviousVersion.run_sanity_checks(base_image_version) + console.log( + f":white_check_mark: Successfully ran sanity checks on previous version: {PreviousVersion.name_with_tag} for {base_image_version.platform}", + highlight=False, + ) + return True + except errors.SanityCheckError as sanity_check_error: + console.log( + f":cross_mark: Sanity checks failure on {base_image_version.name_with_tag} for {base_image_version.platform}: {sanity_check_error}", + style="bold red", + highlight=False, + ) + return False + + +async def build_registry(dagger_client: dagger.Client, current_status: Status, registry: registries.VersionRegistry) -> bool: + """Generate the dockerfiles, run the sanity checks and write the changelog for a registry. + + Args: + dagger_client (dagger.Client): The dagger client. + current_status (Status): The rich status object to update. + registry (registries.VersionRegistry): The registry to build. + + Returns: + bool: True if all the sanity checks passed, False otherwise. + """ + sanity_check_successes = [] + for platform, BaseImageVersion in product(consts.SUPPORTED_PLATFORMS, registry.versions): + base_image_version = BaseImageVersion(dagger_client, platform) + current_status.update(f":whale2: Generating dockerfile for {base_image_version.name_with_tag} for {base_image_version.platform}") + generate_dockerfile(DOCKERFILES_DIRECTORY, base_image_version) + current_status.update(f":mag_right: Running sanity checks on {base_image_version.name_with_tag} for {base_image_version.platform}") + success = await run_sanity_checks(base_image_version, registry) + sanity_check_successes.append(success) + fully_successful = all(sanity_check_successes) + if fully_successful: + console.log(f":tada: All sanity checks passed for {registry.base_image_name}", style="bold green") + current_status.update(f"Writing the changelog for {registry.base_image_name}") + changelog_path = registry.write_changelog() + console.log( + f":memo: Wrote the updated changelog for {registry.base_image_name} to {changelog_path}.", + ) + else: + console.log(f":bomb: Did not write the changelog: sanity checks failed for {registry.base_image_name}", style="bold red") + return fully_successful + + +async def build(current_status: Status) -> bool: + current_status.update(":dagger: Initializing Dagger") + if consts.DEBUG: + dagger_config = dagger.Config(log_output=sys.stderr) + else: + dagger_logs_path = Path("/tmp/base_images_project_build_dagger_logs.log") + dagger_logs_path.unlink(missing_ok=True) + dagger_logs_path.touch() + dagger_config = dagger.Config(log_output=open(dagger_logs_path, "w")) + console.log(f":information_source: Dagger logs will be written to {dagger_logs_path}") + build_successes = [] + + # Clear the generated dockerfiles directory, we will regenerate them. + shutil.rmtree(DOCKERFILES_DIRECTORY, ignore_errors=True) + + async with dagger.Connection(dagger_config) as dagger_client: + for registry in GLOBAL_REGISTRY.all_registries: + build_successes.append(await build_registry(dagger_client, current_status, registry)) + return all(build_successes) + + +def main(): + """ + This function is called by the build command, currently via poetry run build. + It's currently meant to be run locally by developers to generate the changelog and run sanity checks. + It can eventually be run in CI to generate the changelog and run sanity checks. + + 1. Run sanity checks on all the base images. + 2. Write the changelog for the python base image. + + This function calls Dagger to run the sanity checks. + If you don't have the base base image locally it will be pulled, which can take a while. + Subsequent runs will be faster as the base images layers and sanity checks layers will be cached locally. + """ + try: + default_build_status = console.status("Building the project", spinner="bouncingBall") + disabled_build_status = MagicMock(default_build_status) + build_status = default_build_status if not consts.DEBUG else disabled_build_status + with build_status as current_status: # type: ignore + global_build_success = anyio.run(build, current_status) + if not global_build_success: + console.log( + ":bomb: Build failed. Feel free to prepend the command with LOG_LEVEL=DEBUG if you want to investigate Dagger logs.", + style="bold red", + ) + else: + if os.getenv("GIT_HOOK"): + console.log("[bold green] The updated changelog and dockerfile files were commited.[/bold green]") + else: + console.log("[bold green]You can now commit and push the changelog and the generated dockerfiles![/bold green]") + if not global_build_success: + sys.exit(1) + except KeyboardInterrupt: + console.log(":bomb: User aborted the build.", style="bold red") + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/airbyte-ci/connectors/base_images/base_images/common.py b/airbyte-ci/connectors/base_images/base_images/common.py new file mode 100644 index 0000000000000..fb436fee55d7c --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/common.py @@ -0,0 +1,199 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module declares common (abstract) classes and methods used by all base images. +It's not meant to be regurlarly modified. +""" +from __future__ import annotations + +import inspect +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum +from typing import final + +import dagger +import semver +from base_images import errors, registries, sanity_checks + + +@dataclass +class PlatformAwareDockerImage: + image_name: str + tag: str + sha: str + platform: dagger.Platform + + def get_full_image_name(self) -> str: + return f"{self.image_name}:{self.tag}@sha256:{self.sha}" + + +class BaseBaseImage(Enum): + pass + + +class AirbyteConnectorBaseImage(ABC): + """An abstract class that represents an Airbyte base image. + Please do not declare any Dagger with_exec instruction in this class as in the abstract class context we have no guarantee about the underlying system used in the base image. + """ + + name_with_tag: str + version: semver.VersionInfo + + @final + def __init__(self, dagger_client: dagger.Client, platform: dagger.Platform): + """Initializes the Airbyte base image. + + Args: + dagger_client (dagger.Client): The dagger client used to build the base image. + platform (dagger.Platform): The platform used to build the base image. + """ + self.dagger_client = dagger_client + self.platform = platform + self._validate_platform_availability() + + def __init_subclass__(cls) -> None: + if not inspect.isabstract(cls): + cls.version = registries.get_version_from_class_name(cls) + cls.name_with_tag = f"{cls.image_name}:{cls.version}" + return super().__init_subclass__() + + # INSTANCE PROPERTIES: + + @final + @property + def base_base_image_name(self) -> str: + """Returns the full name of the base's base image used to build the Airbyte base image. + In this context the base's base image name contains the tag. + Returns: + str: The full name of the base's base image used to build the Airbyte base image, with its tag. + """ + return self.base_base_image.value[self.platform].get_full_image_name() + + @property + @final + def base_container(self) -> dagger.Container: + """Returns a container using the base python image. This container is used to build the Airbyte base image. + We set environment variables and labels to ensure we can easily check at post build time: + - the base image that was used to build the Airbyte base image + - the version of the Airbyte base image + + Returns: + dagger.Container: The container using the base python image. + """ + return ( + self.dagger_client.pipeline(self.name_with_tag) + .container(platform=self.platform) + .from_(self.base_base_image_name) + .with_env_variable("AIRBYTE_BASE_BASE_IMAGE", self.base_base_image_name) + .with_env_variable("AIRBYTE_BASE_IMAGE", self.name_with_tag) + .with_label("io.airbyte.base_base_image", self.base_base_image_name) + .with_label("io.airbyte.base_image", self.name_with_tag) + ) + + # MANDATORY SUBCLASSES ATTRIBUTES / PROPERTIES: + + @property + @abstractmethod + def base_base_image(cls) -> BaseBaseImage: + """Returns the base image used to build the Airbyte base image. + + Raises: + NotImplementedError: Raised if a subclass does not define a 'base_base_image' attribute. + + Returns: + BaseBaseImage: The base image used to build the Airbyte base image. + """ + raise NotImplementedError("Subclasses must define a 'base_base_image'.") + + @property + @abstractmethod + def image_name(cls) -> str: + """This is the name of the final base image. By name we mean DockerHub image name without the tag. + + Raises: + NotImplementedError: Raised if a subclass does not define an 'image_name' attribute. + + Returns: + str: The name of the final base image. + """ + raise NotImplementedError("Subclasses must define an 'image_name'.") + + @property + @abstractmethod + def changelog_entry(cls) -> str: + """This is the changelog entry for a new base image version. + It will automatically be used to generate the changelog entry for the release notes. + It is solely used for the release notes. + + Raises: + NotImplementedError: Raised if a subclass does not define a 'changelog_entry' attribute. + + Returns: + str: The changelog entry for a new base image version. + """ + raise NotImplementedError("Subclasses must define a 'changelog_entry' attribute.") + + @property + @abstractmethod + def container(self) -> dagger.Container: + """Returns the container of the Airbyte connector base image. This is where version specific definitions, like with_exec, should occur.""" + raise NotImplementedError("Subclasses must define a 'container' property.") + + @property + @abstractmethod + def run_previous_version_sanity_checks(cls) -> bool: + """This is a flag to run the previous version sanity checks on the current version. + It is helpful to detect breaking changes or regression in a new base image version. + Raises: + NotImplementedError: Raised if a subclass does not define a 'run_previous_version_sanity_checks' attribute. + + Returns: + bool: A flag to run the previous version sanity checks on the current version. + """ + raise NotImplementedError("Subclasses must define a 'run_previous_version_sanity_checks' attribute.") + + # INSTANCE METHODS: + + @final + def _validate_platform_availability(self): + """Validates that the base image supports the platform passed at initialization. + + Raises: + ValueError: Raised if the platform is not supported by the base image. + """ + if self.platform not in self.base_base_image.value: + raise errors.PlatformAvailabilityError(f"Platform {self.platform} is not supported by {self.base_base_image.name}.") + + async def run_sanity_checks_for_version(self): + """Runs sanity checks on the current base image version instance.""" + await self.__class__.__base__.run_sanity_checks(self) # type: ignore + await self.run_sanity_checks(self) + + # STATIC METHODS: + @staticmethod + async def run_sanity_checks(base_image_version: AirbyteConnectorBaseImage): + """Runs sanity checks on the base image container. + This method is called on base image build. + This method is static to allow running sanity checks of a specific version from another one. + The following sanity checks are meant to check that labels and environment variables about the base's base image and the current Airbyte base image are correctly set. + + Args: + base_image_version (AirbyteConnectorBaseImage): The base image version on which the sanity checks should run. + + Raises: + SanityCheckError: Raised if a sanity check fails. + """ + await sanity_checks.check_env_var_defined_with_dagger( + base_image_version.container, "AIRBYTE_BASE_BASE_IMAGE", base_image_version.base_base_image_name + ) + await sanity_checks.check_env_var_defined_with_dagger( + base_image_version.container, "AIRBYTE_BASE_IMAGE", base_image_version.name_with_tag + ) + await sanity_checks.check_label_defined_with_dagger( + base_image_version.container, "io.airbyte.base_base_image", base_image_version.base_base_image_name + ) + await sanity_checks.check_label_defined_with_dagger( + base_image_version.container, "io.airbyte.base_image", base_image_version.name_with_tag + ) diff --git a/airbyte-ci/connectors/base_images/base_images/consts.py b/airbyte-ci/connectors/base_images/base_images/consts.py new file mode 100644 index 0000000000000..f6640a55ce5b7 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/consts.py @@ -0,0 +1,20 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module declares constants used by the base_images module. +""" + +import os +from pathlib import Path + +import dagger +import git + +AIRBYTE_GITHUB_REPO_URL = "https://github.com/airbytehq/airbyte" +GIT_REPO = git.Repo(search_parent_directories=True) +AIRBYTE_ROOT_DIR: str = str(GIT_REPO.working_tree_dir) +MAIN_BRANCH_NAME = "master" +PROJECT_DIR = Path(__file__).parent.parent +SUPPORTED_PLATFORMS = (dagger.Platform("linux/amd64"), dagger.Platform("linux/arm64")) +DEBUG = os.getenv("LOG_LEVEL", "INFO").upper() == "DEBUG" diff --git a/airbyte-ci/connectors/base_images/base_images/errors.py b/airbyte-ci/connectors/base_images/base_images/errors.py new file mode 100644 index 0000000000000..301ee51633515 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/errors.py @@ -0,0 +1,24 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module contains the exceptions used by the base_images module. +""" + + +class BaseImageVersionError(ValueError): + """Raised when the version is not in the expected format.""" + + pass + + +class SanityCheckError(Exception): + """Raised when a sanity check fails.""" + + pass + + +class PlatformAvailabilityError(ValueError): + """Raised when an image does not support the passed.""" + + pass diff --git a/airbyte-ci/connectors/base_images/base_images/hacks.py b/airbyte-ci/connectors/base_images/base_images/hacks.py new file mode 100644 index 0000000000000..e956efded4716 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/hacks.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import dagger + + +def get_container_dockerfile(container) -> str: + """Returns the Dockerfile of the base image container. + Disclaimer: THIS IS HIGHLY EXPERIMENTAL, HACKY AND BRITTLE. + TODO: CONFIRM WITH THE DAGGER TEAM WHAT CAN GO WRONG HERE. + Returns: + str: The Dockerfile of the base image container. + """ + lineage = [ + field for field in list(container._ctx.selections) if isinstance(field, dagger.api.base.Field) and field.type_name == "Container" + ] + dockerfile = [] + for field in lineage: + if field.name == "from": + dockerfile.append(f'FROM {field.args.get("address")}') + if field.name == "withExec": + dockerfile.append(f'RUN {" ".join(field.args.get("args"))}') # type: ignore + if field.name == "withEnvVariable": + dockerfile.append(f'ENV {field.args.get("name")}={field.args.get("value")}') + if field.name == "withLabel": + dockerfile.append(f'LABEL {field.args.get("name")}={field.args.get("value")}') + return "\n".join(dockerfile) diff --git a/airbyte-ci/connectors/base_images/base_images/python/__init__.py b/airbyte-ci/connectors/base_images/base_images/python/__init__.py new file mode 100644 index 0000000000000..e50e83de49866 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/python/__init__.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from base_images.registries import VersionRegistry + +from .common import AirbytePythonConnectorBaseImage + +VERSION_REGISTRY: VersionRegistry = VersionRegistry.build_from_package(AirbytePythonConnectorBaseImage, __name__, __path__) diff --git a/airbyte-ci/connectors/base_images/base_images/python/common.py b/airbyte-ci/connectors/base_images/base_images/python/common.py new file mode 100644 index 0000000000000..53b05ee7d83dc --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/python/common.py @@ -0,0 +1,98 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from __future__ import annotations + +from abc import ABC +from typing import Final, Set, final + +import dagger +from base_images import common, errors, sanity_checks + + +class PythonBase(common.BaseBaseImage): + """ + This enum declares the Python base images that can be use to build our own base image for python. + We use the image digest (the a sha256) to ensure that the image is not changed for reproducibility. + """ + + PYTHON_3_9_18 = { + # https://hub.docker.com/layers/library/python/3.9.18-slim-bookworm/images/sha256-3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 + dagger.Platform("linux/amd64"): common.PlatformAwareDockerImage( + image_name="python", + tag="3.9.18-slim-bookworm", + sha="3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6", + platform=dagger.Platform("linux/amd64"), + ), + # x86_64 and amd64 are the same platform, they'll use the same image + # https://hub.docker.com/layers/library/python/3.9.18-slim-bookworm/images/sha256-3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 + dagger.Platform("linux/x86_64"): common.PlatformAwareDockerImage( + image_name="python", + tag="3.9.18-slim-bookworm", + sha="3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6", + platform=dagger.Platform("linux/amd64"), + ), + # https://hub.docker.com/layers/library/python/3.9.18-slim-bookworm/images/sha256-ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 + dagger.Platform("linux/arm64"): common.PlatformAwareDockerImage( + image_name="python", + tag="3.9.18-slim-bookworm", + sha="ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1", + platform=dagger.Platform("linux/arm64"), + ), + } + + +class AirbytePythonConnectorBaseImage(common.AirbyteConnectorBaseImage, ABC): + """An abstract class that represents an Airbyte Python base image.""" + + image_name: Final[str] = "airbyte-python-connector-base" + pip_cache_name: Final[str] = "pip-cache" + expected_env_vars: Set[str] = { + "PYTHON_VERSION", + "PYTHON_PIP_VERSION", + "PYTHON_GET_PIP_SHA256", + "PYTHON_GET_PIP_URL", + "HOME", + "PATH", + "LANG", + "GPG_KEY", + "OTEL_EXPORTER_OTLP_TRACES_PROTOCOL", + "PYTHON_SETUPTOOLS_VERSION", + "OTEL_TRACES_EXPORTER", + "OTEL_TRACE_PARENT", + "TRACEPARENT", + } + + @final + def __init_subclass__(cls) -> None: + if not cls.__base__ == AirbytePythonConnectorBaseImage: + raise errors.BaseImageVersionError( + f"AirbytePythonConnectorBaseImage subclasses must directly inherit from AirbytePythonConnectorBaseImage. {cls.__name__} does not." + ) + return super().__init_subclass__() + + @staticmethod + async def run_sanity_checks(base_image_version: common.AirbyteConnectorBaseImage): + await common.AirbyteConnectorBaseImage.run_sanity_checks(base_image_version) + await AirbytePythonConnectorBaseImage.check_env_vars(base_image_version) + + async def run_sanity_checks_for_version(self): + await common.AirbyteConnectorBaseImage.run_sanity_checks(self) + await AirbytePythonConnectorBaseImage.check_env_vars(self) + return await super().run_sanity_checks_for_version() + + @staticmethod + async def check_env_vars(base_image_version: common.AirbyteConnectorBaseImage): + """Checks that the expected environment variables are set on the base image. + The expected_env_vars were set on all our certified python connectors that were not using this base image + We want to make sure that they are still set on all our connectors to avoid breaking changes. + + Args: + base_image_version (AirbyteConnectorBaseImage): The base image version on which the sanity checks should run. + + Raises: + errors.SanityCheckError: Raised if a sanity check fails: the printenv command could not be executed or an expected variable is not set. + """ + for expected_env_var in AirbytePythonConnectorBaseImage.expected_env_vars: + await sanity_checks.check_env_var_with_printenv(base_image_version.container, expected_env_var) diff --git a/airbyte-ci/connectors/base_images/base_images/python/v1.py b/airbyte-ci/connectors/base_images/base_images/python/v1.py new file mode 100644 index 0000000000000..6c75a1606af1a --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/python/v1.py @@ -0,0 +1,79 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module declares all the airbyte python connector base image for version 1. +Please create a v2.py module if you want to declare a new major version. +""" +from __future__ import annotations + +from typing import Final, final + +import dagger +from base_images import common, sanity_checks +from base_images.python.common import AirbytePythonConnectorBaseImage, PythonBase + + +class _1_0_0(AirbytePythonConnectorBaseImage): + base_base_image: Final[PythonBase] = PythonBase.PYTHON_3_9_18 + + changelog_entry: Final[ + str + ] = "Declare our first base image version. It uses Python 3.9.18 on a Debian 11 (Bookworm) system with Pip 23.2.1 and UTC timezone." + + run_previous_version_sanity_checks = False + + @property + def container(self) -> dagger.Container: + pip_cache_volume: dagger.CacheVolume = self.dagger_client.cache_volume(AirbytePythonConnectorBaseImage.pip_cache_name) + + return ( + self.base_container.with_mounted_cache("/root/.cache/pip", pip_cache_volume) + # Set the timezone to UTC + .with_exec(["ln", "-snf", "/usr/share/zoneinfo/Etc/UTC", "/etc/localtime"]) + # Upgrade pip to the expected version + .with_exec(["pip", "install", "--upgrade", "pip==23.2.1"]) + ) + + @final + @staticmethod + async def run_sanity_checks(base_image_version: common.AirbyteConnectorBaseImage): + await sanity_checks.check_timezone_is_utc(base_image_version.container) + await sanity_checks.check_a_command_is_available_using_version_option(base_image_version.container, "bash") + await sanity_checks.check_python_version(base_image_version.container, "3.9.18") + await sanity_checks.check_pip_version(base_image_version.container, "23.2.1") + + +class _1_1_0(AirbytePythonConnectorBaseImage): + base_base_image: Final[PythonBase] = PythonBase.PYTHON_3_9_18 + + changelog_entry: Final[str] = "Install poetry 1.6.1" + + run_previous_version_sanity_checks = True + + @property + def container(self) -> dagger.Container: + pip_cache_volume: dagger.CacheVolume = self.dagger_client.cache_volume(AirbytePythonConnectorBaseImage.pip_cache_name) + return ( + self.base_container.with_mounted_cache("/root/.cache/pip", pip_cache_volume) + # Set the timezone to UTC + .with_exec(["ln", "-snf", "/usr/share/zoneinfo/Etc/UTC", "/etc/localtime"]) + # Upgrade pip to the expected version + .with_exec(["pip", "install", "--upgrade", "pip==23.2.1"]) + # Declare poetry specific environment variables + .with_env_variable("POETRY_VIRTUALENVS_CREATE", "false") + .with_env_variable("POETRY_VIRTUALENVS_IN_PROJECT", "false") + .with_env_variable("POETRY_NO_INTERACTION", "1") + .with_exec(["pip", "install", "poetry==1.6.1"], skip_entrypoint=True) + ) + + @final + @staticmethod + async def run_sanity_checks(base_image_version: common.AirbyteConnectorBaseImage): + await sanity_checks.check_poetry_version(base_image_version.container, "1.6.1") + + +# DECLARE NEW BASE IMAGE VERSIONS BELOW THIS LINE +# class _1_1_1(AirbytePythonConnectorBaseImage): + +# Breaking version should be declared in a v2 module. diff --git a/airbyte-ci/connectors/base_images/base_images/registries.py b/airbyte-ci/connectors/base_images/base_images/registries.py new file mode 100644 index 0000000000000..655bd383a08d3 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/registries.py @@ -0,0 +1,162 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from __future__ import annotations + +import importlib +import inspect +import pkgutil +from abc import ABC +from pathlib import Path +from types import ModuleType +from typing import TYPE_CHECKING, Any, List, Mapping, MutableMapping, Optional, Type + +import semver +from base_images import consts, errors +from py_markdown_table.markdown_table import markdown_table # type: ignore + +if TYPE_CHECKING: + from base_images.common import AirbyteConnectorBaseImage + + +def get_version_from_class_name(cls: Type) -> semver.VersionInfo: + """The version is parsed from the class name. + The class name must follow the naming convention: `_MAJOR_MINOR_PATCH` e.g `_1_0_0`. + You can declare pre-release versions by adding a `__` followed by the pre-release version name e.g `_1_0_0__alpha`. + Returns: + semver.VersionInfo: The version parsed from the class name. + """ + try: + return semver.VersionInfo.parse(".".join(cls.__name__.replace("__", "-").split("_")[1:])) + except ValueError as e: + raise errors.BaseImageVersionError(f"The version class {cls.__name__} is not in the expected naming format: e.g `_1_0_0`.") from e + + +class VersionRegistry: + def __init__(self, abstract_base_version_class: Type[AirbyteConnectorBaseImage]): + self._versions: List[Type[AirbyteConnectorBaseImage]] = [] + self.abstract_base_version_class = abstract_base_version_class + + @property + def base_image_name(self) -> str: + return self.abstract_base_version_class.image_name # type: ignore + + @staticmethod + def build_from_package(abstract_base_version_class: Type[Any], package_name: str, package_path: List[str]) -> VersionRegistry: + version_registry = VersionRegistry(abstract_base_version_class) + all_base_image_versions = [] + for _, module_name, is_pkg in pkgutil.walk_packages(package_path, prefix=package_name + "."): + if not is_pkg: + module = importlib.import_module(module_name) + all_base_image_versions.extend(version_registry._get_all_concrete_subclasses_in_module(module, abstract_base_version_class)) + + version_registry._register_versions(all_base_image_versions) + return version_registry + + def _get_all_concrete_subclasses_in_module(self, module: ModuleType, SuperClass: Type) -> List[Type]: + all_subclasses = [] + for _, cls_member in inspect.getmembers(module, inspect.isclass): + if issubclass(cls_member, SuperClass) and cls_member != SuperClass and cls_member != ABC: + all_subclasses.append(cls_member) + return all_subclasses + + def _check_for_duplicate_versions(self, all_base_image_versions: List[Type[AirbyteConnectorBaseImage]]): + """Checks that there are no duplicate versions. This can happen if two version classes with the same name are defined in different modules. + + Args: + all_base_image_versions (List[AirbyteConnectorBaseImage]): A list of base image versions. + + Raises: + errors.BaseImageVersionError: Raised if there are duplicate versions. + """ + available_versions = [base_image_version_class.name_with_tag for base_image_version_class in all_base_image_versions] + unique_versions = set(available_versions) + if len(available_versions) != len(unique_versions): + raise errors.BaseImageVersionError( + "Found duplicate versions. Two version classes with the same name are probably defined in different modules." + ) + + def _register_versions(self, base_image_versions: List[Type[AirbyteConnectorBaseImage]]): + """Registers a list of base image versions. We check that there are no duplicate versions. This can happen if two version classes with the same name are defined in different modules. + + Args: + base_image_versions (List[Type[AirbyteConnectorBaseImage]]): _description_ + + Returns: + _type_: _description_ + """ + self._check_for_duplicate_versions(base_image_versions) + self._versions.extend(base_image_versions) + return self._versions + + @property + def versions(self) -> List[Type[AirbyteConnectorBaseImage]]: + """Returns all the base image versions sorted by version number in descending order. + + Returns: + List[Type[AirbyteConnectorBaseImage]]: All the base image versions sorted by version number in descending order. + """ + return sorted(self._versions, key=lambda cls: cls.version, reverse=True) + + @property + def latest_version(self) -> Type[AirbyteConnectorBaseImage]: + return self.versions[0] + + def get_previous_version(self, base_image_version: AirbyteConnectorBaseImage) -> Optional[Type[AirbyteConnectorBaseImage]]: + for BaseImageVersion in self.versions: + if BaseImageVersion.version < base_image_version.version: + return BaseImageVersion + return None + + def as_dict(self) -> Mapping[str, Type[AirbyteConnectorBaseImage]]: + return {version.name_with_tag: version for version in self.versions} + + @property + def changelog_path(self) -> Path: + return consts.PROJECT_DIR / "generated" / "docs" / "base_images_changelogs" / f"{self.base_image_name}.md" + + def write_changelog(self) -> Path: + """Writes the registry changelog file locally.""" + self.changelog_path.parent.mkdir(exist_ok=True, parents=True) + self.changelog_path.unlink(missing_ok=True) + entries = [ + { + "Version": base_version_image_class.version, + "Changelog": base_version_image_class.changelog_entry, + } + for base_version_image_class in self.versions + ] + markdown = markdown_table(entries).set_params(row_sep="markdown", quote=False).get_markdown() + with open(self.changelog_path, "w") as f: + f.write(f"# Changelog for {self.base_image_name}\n\n") + f.write(markdown) + return self.changelog_path + + +class GlobalRegistry: + def __init__(self, all_registries: List[VersionRegistry]) -> None: + self.all_registries = all_registries + + def as_dict(self) -> MutableMapping[str, Type[AirbyteConnectorBaseImage]]: + all_registries_dict: MutableMapping[str, Type[AirbyteConnectorBaseImage]] = {} + for registry in self.all_registries: + all_registries_dict = {**all_registries_dict, **registry.as_dict()} + return all_registries_dict + + def get_version(self, image_name_with_tag: str) -> Type[AirbyteConnectorBaseImage]: + """Returns the base image version class from its name with tag. + + Args: + image_name_with_tag (str): The base image version name with tag. + + Raises: + errors.BaseImageVersionError: Raised if the base image version is not found. + + Returns: + Type[AirbyteConnectorBaseImage]: The base image version class. + """ + try: + return self.as_dict()[image_name_with_tag] + except KeyError: + raise errors.BaseImageVersionError(f"Could not find base image version {image_name_with_tag} in the global registry.") diff --git a/airbyte-ci/connectors/base_images/base_images/sanity_checks.py b/airbyte-ci/connectors/base_images/base_images/sanity_checks.py new file mode 100644 index 0000000000000..c2318a00d4790 --- /dev/null +++ b/airbyte-ci/connectors/base_images/base_images/sanity_checks.py @@ -0,0 +1,164 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from typing import Optional + +import dagger +from base_images import errors + + +async def check_env_var_defined_with_dagger( + container: dagger.Container, expected_env_var_name: str, expected_env_var_value: Optional[str] = None +): + """This checks if an environment variable is correctly defined with dagger. + This is a better check than the one using printenv in some contexts were we have no guarantee that the printenv command is available. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_env_var_name (str): The name of the environment variable to check. + expected_env_var_value (Optional[str], optional): The expected value of the environment variable. Defaults to None. + + Raises: + errors.SanityCheckError: Raised if the environment variable is not defined or if it has an unexpected value. + """ + env_var_value = await container.env_variable(expected_env_var_name) + if env_var_value is None: + raise errors.SanityCheckError(f"the {expected_env_var_name} environment variable is not defined.") + if expected_env_var_value is not None and env_var_value != expected_env_var_value: + raise errors.SanityCheckError( + f"the {expected_env_var_name} environment variable is defined but has an unexpected value: {env_var_value}." + ) + + +async def check_env_var_with_printenv( + container: dagger.Container, expected_env_var_name: str, expected_env_var_value: Optional[str] = None +): + """This checks if an environment variable is correctly defined by calling the printenv command in a container. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_env_var_name (str): The name of the environment variable to check. + expected_env_var_value (Optional[str], optional): The expected value of the environment variable. Defaults to None. + + Raises: + errors.SanityCheckError: Raised if the environment variable is not defined or if it has an unexpected value. + """ + try: + printenv_output = await container.with_exec(["printenv"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + env_vars = {line.split("=")[0]: line.split("=")[1] for line in printenv_output.splitlines()} + if expected_env_var_name not in env_vars: + raise errors.SanityCheckError(f"the {expected_env_var_name} environment variable is not defined.") + if expected_env_var_value is not None and env_vars[expected_env_var_name] != expected_env_var_value: + raise errors.SanityCheckError( + f"the {expected_env_var_name} environment variable is defined but has an unexpected value: {env_vars[expected_env_var_name]}." + ) + + +async def check_label_defined_with_dagger(container: dagger.Container, expected_label: str, expected_label_value: Optional[str] = None): + """This checks if a label is correctly defined with dagger. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_env_var_name (str): The name of the label to check. + expected_env_var_value (Optional[str], optional): The expected value of the label. Defaults to None. + + Raises: + errors.SanityCheckError: Raised if the environment variable is not defined or if it has an unexpected value. + """ + label_value = await container.label(expected_label) + if label_value is None: + raise errors.SanityCheckError(f"the {expected_label_value} label is not defined.") + if expected_label_value is not None and label_value != expected_label_value: + raise errors.SanityCheckError(f"the {expected_label_value} label is defined but has an unexpected value: {label_value}.") + + +async def check_timezone_is_utc(container: dagger.Container): + """Check that the system timezone is UTC. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + + Raises: + errors.SanityCheckError: Raised if the date command could not be executed or if the outputted timezone is not UTC. + """ + try: + tz_output: str = await container.with_exec(["date"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + if "UTC" not in tz_output: + raise errors.SanityCheckError(f"unexpected timezone: {tz_output}") + + +async def check_a_command_is_available_using_version_option(container: dagger.Container, command: str): + """Checks that a command is available in the container by calling it with the --version option. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + command (str): The command to check. + + Raises: + errors.SanityCheckError: Raised if the command could not be executed or if the outputted version is not the expected one. + """ + try: + command_version_output: str = await container.with_exec([command, "--version"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + if command_version_output == "": + raise errors.SanityCheckError(f"unexpected {command} version: {command_version_output}") + + +async def check_python_version(container: dagger.Container, expected_python_version: str): + """Checks that the python version is the expected one. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_python_version (str): The expected python version. + + Raises: + errors.SanityCheckError: Raised if the python --version command could not be executed or if the outputted version is not the expected one. + """ + try: + python_version_output: str = await container.with_exec(["python", "--version"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + if python_version_output != f"Python {expected_python_version}\n": + raise errors.SanityCheckError(f"unexpected python version: {python_version_output}") + + +async def check_pip_version(container: dagger.Container, expected_pip_version: str): + """Checks that the pip version is the expected one. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_pip_version (str): The expected pip version. + + Raises: + errors.SanityCheckError: Raised if the pip --version command could not be executed or if the outputted version is not the expected one. + """ + try: + pip_version_output: str = await container.with_exec(["pip", "--version"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + if not pip_version_output.startswith(f"pip {expected_pip_version}"): + raise errors.SanityCheckError(f"unexpected pip version: {pip_version_output}") + + +async def check_poetry_version(container: dagger.Container, expected_poetry_version: str): + """Checks that the poetry version is the expected one. + + Args: + container (dagger.Container): The container on which the sanity checks should run. + expected_poetry_version (str): The expected poetry version. + + Raises: + errors.SanityCheckError: Raised if the poetry --version command could not be executed or if the outputted version is not the expected one. + """ + try: + poetry_version_output: str = await container.with_exec(["poetry", "--version"], skip_entrypoint=True).stdout() + except dagger.ExecError as e: + raise errors.SanityCheckError(e) + if not poetry_version_output.startswith(f"Poetry (version {expected_poetry_version}"): + raise errors.SanityCheckError(f"unexpected poetry version: {poetry_version_output}") diff --git a/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.0.0.Dockerfile b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.0.0.Dockerfile new file mode 100644 index 0000000000000..ab6a47eb86f63 --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.0.0.Dockerfile @@ -0,0 +1,12 @@ + +# This file is generated by base_images/build.py. Please do not edit it manually. +# It is not used by Airbyte internal build process. +# It is meant for documentation and debugging purposes. + +FROM python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +ENV AIRBYTE_BASE_BASE_IMAGE=python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +ENV AIRBYTE_BASE_IMAGE=airbyte-python-connector-base:1.0.0 +LABEL io.airbyte.base_base_image=python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +LABEL io.airbyte.base_image=airbyte-python-connector-base:1.0.0 +RUN ln -snf /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN pip install --upgrade pip==23.2.1 diff --git a/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.1.0.Dockerfile b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.1.0.Dockerfile new file mode 100644 index 0000000000000..78dd8750b9fae --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/amd64/1.1.0.Dockerfile @@ -0,0 +1,16 @@ + +# This file is generated by base_images/build.py. Please do not edit it manually. +# It is not used by Airbyte internal build process. +# It is meant for documentation and debugging purposes. + +FROM python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +ENV AIRBYTE_BASE_BASE_IMAGE=python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +ENV AIRBYTE_BASE_IMAGE=airbyte-python-connector-base:1.1.0 +LABEL io.airbyte.base_base_image=python:3.9.18-slim-bookworm@sha256:3b4e3a36cce74c444b333a26958d65d08b0ded00869f1557faffe8d131a0bdc6 +LABEL io.airbyte.base_image=airbyte-python-connector-base:1.1.0 +RUN ln -snf /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN pip install --upgrade pip==23.2.1 +ENV POETRY_VIRTUALENVS_CREATE=false +ENV POETRY_VIRTUALENVS_IN_PROJECT=false +ENV POETRY_NO_INTERACTION=1 +RUN pip install poetry==1.6.1 diff --git a/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.0.0.Dockerfile b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.0.0.Dockerfile new file mode 100644 index 0000000000000..a12f701395d77 --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.0.0.Dockerfile @@ -0,0 +1,12 @@ + +# This file is generated by base_images/build.py. Please do not edit it manually. +# It is not used by Airbyte internal build process. +# It is meant for documentation and debugging purposes. + +FROM python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +ENV AIRBYTE_BASE_BASE_IMAGE=python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +ENV AIRBYTE_BASE_IMAGE=airbyte-python-connector-base:1.0.0 +LABEL io.airbyte.base_base_image=python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +LABEL io.airbyte.base_image=airbyte-python-connector-base:1.0.0 +RUN ln -snf /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN pip install --upgrade pip==23.2.1 diff --git a/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.1.0.Dockerfile b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.1.0.Dockerfile new file mode 100644 index 0000000000000..6b3fa18c4f07d --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/dockerfiles/airbyte-python-connector-base/linux/arm64/1.1.0.Dockerfile @@ -0,0 +1,16 @@ + +# This file is generated by base_images/build.py. Please do not edit it manually. +# It is not used by Airbyte internal build process. +# It is meant for documentation and debugging purposes. + +FROM python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +ENV AIRBYTE_BASE_BASE_IMAGE=python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +ENV AIRBYTE_BASE_IMAGE=airbyte-python-connector-base:1.1.0 +LABEL io.airbyte.base_base_image=python:3.9.18-slim-bookworm@sha256:ac1eb5caf138ab249f57f2d19666d8d55f25449e3408c2630479523c3537c0f1 +LABEL io.airbyte.base_image=airbyte-python-connector-base:1.1.0 +RUN ln -snf /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN pip install --upgrade pip==23.2.1 +ENV POETRY_VIRTUALENVS_CREATE=false +ENV POETRY_VIRTUALENVS_IN_PROJECT=false +ENV POETRY_NO_INTERACTION=1 +RUN pip install poetry==1.6.1 diff --git a/airbyte-ci/connectors/base_images/generated/docs/base_images_changelogs/airbyte-python-connector-base.md b/airbyte-ci/connectors/base_images/generated/docs/base_images_changelogs/airbyte-python-connector-base.md new file mode 100644 index 0000000000000..50654d281e0ef --- /dev/null +++ b/airbyte-ci/connectors/base_images/generated/docs/base_images_changelogs/airbyte-python-connector-base.md @@ -0,0 +1,6 @@ +# Changelog for airbyte-python-connector-base + +|Version| Changelog | +|-------|------------------------------------------------------------------------------------------------------------------------------| +| 1.1.0 | Install poetry 1.6.1 | +| 1.0.0 |Declare our first base image version. It uses Python 3.9.18 on a Debian 11 (Bookworm) system with Pip 23.2.1 and UTC timezone.| \ No newline at end of file diff --git a/airbyte-ci/connectors/base_images/poetry.lock b/airbyte-ci/connectors/base_images/poetry.lock new file mode 100644 index 0000000000000..8729f253f02b4 --- /dev/null +++ b/airbyte-ci/connectors/base_images/poetry.lock @@ -0,0 +1,867 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "anyio" +version = "4.0.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "beartype" +version = "0.15.0" +description = "Unbearably fast runtime type checking in pure Python." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "beartype-0.15.0-py3-none-any.whl", hash = "sha256:52cd2edea72fdd84e4e7f8011a9e3007bf0125c3d6d7219e937b9d8868169177"}, + {file = "beartype-0.15.0.tar.gz", hash = "sha256:2af6a8d8a7267ccf7d271e1a3bd908afbc025d2a09aa51123567d7d7b37438df"}, +] + +[package.extras] +all = ["typing-extensions (>=3.10.0.0)"] +dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] +doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] +test-tox = ["mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] +test-tox-coverage = ["coverage (>=5.5)"] + +[[package]] +name = "cattrs" +version = "23.1.2" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cattrs-23.1.2-py3-none-any.whl", hash = "sha256:b2bb14311ac17bed0d58785e5a60f022e5431aca3932e3fc5cc8ed8639de50a4"}, + {file = "cattrs-23.1.2.tar.gz", hash = "sha256:db1c821b8c537382b2c7c66678c3790091ca0275ac486c76f3c8f3920e83c657"}, +] + +[package.dependencies] +attrs = ">=20" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.2.0,<5.0.0)"] +cbor2 = ["cbor2 (>=5.4.6,<6.0.0)"] +msgpack = ["msgpack (>=1.0.2,<2.0.0)"] +orjson = ["orjson (>=3.5.2,<4.0.0)"] +pyyaml = ["PyYAML (>=6.0,<7.0)"] +tomlkit = ["tomlkit (>=0.11.4,<0.12.0)"] +ujson = ["ujson (>=5.4.0,<6.0.0)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dagger-io" +version = "0.6.4" +description = "A client package for running Dagger pipelines in Python." +optional = false +python-versions = ">=3.10" +files = [ + {file = "dagger_io-0.6.4-py3-none-any.whl", hash = "sha256:b1bea624d1428a40228fffaa96407292cc3d18a7eca5bc036e6ceb9abd903d9a"}, + {file = "dagger_io-0.6.4.tar.gz", hash = "sha256:b754fd9820c41904e344377330ccca88f0a3409023eea8f0557db739b871e552"}, +] + +[package.dependencies] +anyio = ">=3.6.2" +beartype = ">=0.11.0" +cattrs = ">=22.2.0" +gql = ">=3.4.0" +graphql-core = ">=3.2.3" +httpx = ">=0.23.1" +platformdirs = ">=2.6.2" +typing-extensions = ">=4.4.0" + +[package.extras] +cli = ["typer[all] (>=0.6.1)"] +server = ["strawberry-graphql (>=0.187.0)", "typer[all] (>=0.6.1)"] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.36" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.36-py3-none-any.whl", hash = "sha256:8d22b5cfefd17c79914226982bb7851d6ade47545b1735a9d010a2a4c26d8388"}, + {file = "GitPython-3.1.36.tar.gz", hash = "sha256:4bb0c2a6995e85064140d31a33289aa5dce80133a23d36fcd372d716c54d3ebf"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar", "virtualenv"] + +[[package]] +name = "gql" +version = "3.4.1" +description = "GraphQL client for Python" +optional = false +python-versions = "*" +files = [ + {file = "gql-3.4.1-py2.py3-none-any.whl", hash = "sha256:315624ca0f4d571ef149d455033ebd35e45c1a13f18a059596aeddcea99135cf"}, + {file = "gql-3.4.1.tar.gz", hash = "sha256:11dc5d8715a827f2c2899593439a4f36449db4f0eafa5b1ea63948f8a2f8c545"}, +] + +[package.dependencies] +backoff = ">=1.11.1,<3.0" +graphql-core = ">=3.2,<3.3" +yarl = ">=1.6,<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.7.1,<3.9.0)"] +all = ["aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +botocore = ["botocore (>=1.21,<2)"] +dev = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "black (==22.3.0)", "botocore (>=1.21,<2)", "check-manifest (>=0.42,<1)", "flake8 (==3.8.1)", "isort (==4.3.21)", "mock (==4.0.2)", "mypy (==0.910)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "sphinx (>=3.0.0,<4)", "sphinx-argparse (==0.2.5)", "sphinx-rtd-theme (>=0.4,<1)", "types-aiofiles", "types-mock", "types-requests", "urllib3 (>=1.26,<2)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +requests = ["requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)"] +test = ["aiofiles", "aiohttp (>=3.7.1,<3.9.0)", "botocore (>=1.21,<2)", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "requests (>=2.26,<3)", "requests-toolbelt (>=0.9.1,<1)", "urllib3 (>=1.26,<2)", "vcrpy (==4.0.2)", "websockets (>=10,<11)", "websockets (>=9,<10)"] +test-no-transport = ["aiofiles", "mock (==4.0.2)", "parse (==1.15.0)", "pytest (==6.2.5)", "pytest-asyncio (==0.16.0)", "pytest-console-scripts (==1.3.1)", "pytest-cov (==3.0.0)", "vcrpy (==4.0.2)"] +websockets = ["websockets (>=10,<11)", "websockets (>=9,<10)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "0.18.0" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-0.18.0-py3-none-any.whl", hash = "sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced"}, + {file = "httpcore-0.18.0.tar.gz", hash = "sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx" +version = "0.25.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.25.0-py3-none-any.whl", hash = "sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100"}, + {file = "httpx-0.25.0.tar.gz", hash = "sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.18.0,<0.19.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mypy" +version = "1.5.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "py-markdown-table" +version = "0.4.0" +description = "Package that generates markdown tables from a list of dicts" +optional = false +python-versions = ">=3.6" +files = [ + {file = "py-markdown-table-0.4.0.tar.gz", hash = "sha256:e595f9cc03d8a464f6aa616027be29cf033cd778ae8fe8c030d3d664e790dbc3"}, + {file = "py_markdown_table-0.4.0-py3-none-any.whl", hash = "sha256:28a4848322cb1f975cf444080d3ada752138968b0f3727e295504c5f5fb5bae1"}, +] + +[package.extras] +dev = ["black", "pylint", "pytest", "pytest-cov"] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.11.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "semver" +version = "3.0.1" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, + {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, +] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "c748bdd84f0294ca9c3de0b12b2f5ca02295a15c9eba9e5adf42e42a3bf419af" diff --git a/airbyte-ci/connectors/base_images/pyproject.toml b/airbyte-ci/connectors/base_images/pyproject.toml new file mode 100644 index 0000000000000..e6527f0763d88 --- /dev/null +++ b/airbyte-ci/connectors/base_images/pyproject.toml @@ -0,0 +1,28 @@ +[tool.poetry] +name = "airbyte-connectors-base-images" +version = "0.1.0" +description = "This package should be used as a library to import base images definitions to build Airbyte connectors." +authors = ["Augustin Lafanechere "] +readme = "README.md" +packages = [{include = "base_images"}] + +[tool.poetry.dependencies] +python = "^3.10" +dagger-io = "0.6.4" +py-markdown-table = "0.4.0" +gitpython = "^3.1.35" +rich = "^13.0.1" +semver = "^3.0.1" + +[tool.poetry.group.dev.dependencies] +pytest = "^6.2.5" +pytest-mock = "^3.10.0" +pytest-cov = "^4.1.0" +mypy = "^1.5.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +build = "base_images.build:main" diff --git a/airbyte-ci/connectors/base_images/pytest.ini b/airbyte-ci/connectors/base_images/pytest.ini new file mode 100644 index 0000000000000..f14609688a095 --- /dev/null +++ b/airbyte-ci/connectors/base_images/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --cov=base_images --cov-report=term-missing diff --git a/airbyte-ci/connectors/base_images/tests/__init__.py b/airbyte-ci/connectors/base_images/tests/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/base_images/tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/base_images/tests/conftest.py b/airbyte-ci/connectors/base_images/tests/conftest.py new file mode 100644 index 0000000000000..3f08d090096e4 --- /dev/null +++ b/airbyte-ci/connectors/base_images/tests/conftest.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import platform +import sys + +import dagger +import pytest + + +@pytest.fixture(scope="module") +def anyio_backend(): + return "asyncio" + + +@pytest.fixture(scope="module") +async def dagger_client(): + async with dagger.Connection(dagger.Config(log_output=sys.stderr)) as client: + yield client + + +@pytest.fixture(scope="session") +def current_platform(): + return dagger.Platform(f"linux/{platform.machine()}") diff --git a/airbyte-ci/connectors/base_images/tests/test_build.py b/airbyte-ci/connectors/base_images/tests/test_build.py new file mode 100644 index 0000000000000..4abe49561c20a --- /dev/null +++ b/airbyte-ci/connectors/base_images/tests/test_build.py @@ -0,0 +1,63 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +import pytest +from base_images import build, errors +from base_images.python import v1 as python_v1 + + +@pytest.mark.anyio +async def test_run_sanity_checks_success(mocker, dagger_client, current_platform): + class _0_0_0(python_v1._1_0_0): + changelog_entry = "testing a base image version with successful sanity checks" + + @staticmethod + async def run_sanity_checks(base_image_version): + return None + + mocker.patch.object(build, "console") + base_image_version = _0_0_0(dagger_client, current_platform) + assert await build.run_sanity_checks(base_image_version) + all_logs_calls = build.console.log.call_args_list + assert all([call[0][0].startswith(":white_check_mark:") for call in all_logs_calls]) + + +@pytest.mark.anyio +async def test_run_sanity_checks_failure(mocker, dagger_client, current_platform): + class _0_0_0(python_v1._1_0_0): + changelog_entry = "testing a base image version with failing sanity checks" + + @staticmethod + async def run_sanity_checks(base_image_version): + raise errors.SanityCheckError("mocked sanity check failure") + + mocker.patch.object(build, "console") + base_image_version = _0_0_0(dagger_client, current_platform) + assert not await build.run_sanity_checks(base_image_version) + all_logs_calls = build.console.log.call_args_list + assert all([call[0][0].startswith(":cross_mark:") for call in all_logs_calls]) + + +@pytest.mark.anyio +async def test_generate_dockerfile(mocker, dagger_client, current_platform, tmp_path): + class _0_0_0(python_v1._1_0_0): + changelog_entry = "testing dockerfile generation" + + mocker.patch.object(build, "console") + mocker.patch.object(build.consts, "PROJECT_DIR", tmp_path) + base_image_version = _0_0_0(dagger_client, current_platform) + build.generate_dockerfile(base_image_version) + dockerfile_path = tmp_path / "generated" / "dockerfiles" / current_platform / f"{base_image_version.name_with_tag}.Dockerfile" + assert dockerfile_path.exists() + all_logs_calls = build.console.log.call_args_list + assert all([call[0][0].startswith(":whale2: Generated Dockerfile") for call in all_logs_calls]) + + +def test_write_changelog_file(tmp_path): + changelog_path = tmp_path / "CHANGELOG.md" + build.write_changelog_file(changelog_path, "test-image", {"0.0.0": python_v1._1_0_0}) + assert changelog_path.exists() + changelog_content = changelog_path.read_text() + assert changelog_content.startswith("# Changelog for test-image") + assert python_v1._1_0_0.changelog_entry in changelog_content diff --git a/airbyte-ci/connectors/base_images/tests/test_common.py b/airbyte-ci/connectors/base_images/tests/test_common.py new file mode 100644 index 0000000000000..93c5ba524a90d --- /dev/null +++ b/airbyte-ci/connectors/base_images/tests/test_common.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from base_images import common, consts + + +class TestPlatformAwareDockerImage: + def test_get_full_image_name(self): + image = common.PlatformAwareDockerImage( + image_name="my-image", + tag="v1.0", + sha="abc123", + platform=consts.SUPPORTED_PLATFORMS[0], + ) + + expected_full_image_name = "my-image:v1.0@sha256:abc123" + + full_image_name = image.get_full_image_name() + + assert full_image_name == expected_full_image_name diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/qa_checks.py b/airbyte-ci/connectors/connector_ops/connector_ops/qa_checks.py index 462a05c1c4d0b..03ac01c3a4c60 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/qa_checks.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/qa_checks.py @@ -5,7 +5,7 @@ import sys from pathlib import Path -from typing import Iterable, Optional, Set, Tuple +from typing import Callable, Iterable, Optional, Set, Tuple from connector_ops.utils import Connector from pydash.objects import get @@ -238,7 +238,7 @@ def check_metadata_version_matches_dockerfile_label(connector: Connector) -> boo return connector.version_in_dockerfile_label == connector.version -QA_CHECKS = [ +DEFAULT_QA_CHECKS = ( check_documentation_file_exists, check_migration_guide, # Disabling the following check because it's likely to not pass on a lot of connectors. @@ -250,8 +250,13 @@ def check_metadata_version_matches_dockerfile_label(connector: Connector) -> boo # https://github.com/airbytehq/airbyte/issues/21606 check_connector_https_url_only, check_connector_has_no_critical_vulnerabilities, - check_metadata_version_matches_dockerfile_label, -] +) + + +def get_qa_checks_to_run(connector: Connector) -> Tuple[Callable]: + if connector.has_dockerfile: + return DEFAULT_QA_CHECKS + (check_metadata_version_matches_dockerfile_label,) + return DEFAULT_QA_CHECKS def remove_strict_encrypt_suffix(connector_technical_name: str) -> str: @@ -285,7 +290,7 @@ def run_qa_checks(): connector_technical_name = remove_strict_encrypt_suffix(connector_technical_name) connector = Connector(connector_technical_name) print(f"Running QA checks for {connector_technical_name}:{connector.version}") - qa_check_results = {qa_check.__name__: qa_check(connector) for qa_check in QA_CHECKS} + qa_check_results = {qa_check.__name__: qa_check(connector) for qa_check in get_qa_checks_to_run(connector)} if not all(qa_check_results.values()): print(f"QA checks failed for {connector_technical_name}:{connector.version}:") for check_name, check_result in qa_check_results.items(): diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py index 32bf07a2dd1e0..0578e7631bf9e 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/utils.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/utils.py @@ -236,6 +236,10 @@ def icon_path(self) -> Path: def code_directory(self) -> Path: return Path(f"./airbyte-integrations/connectors/{self.technical_name}") + @property + def has_dockerfile(self) -> bool: + return (self.code_directory / "Dockerfile").is_file() + @property def metadata_file_path(self) -> Path: return self.code_directory / METADATA_FILE_NAME @@ -253,22 +257,20 @@ def language(self) -> ConnectorLanguage: return ConnectorLanguage.LOW_CODE if Path(self.code_directory / "setup.py").is_file(): return ConnectorLanguage.PYTHON - try: - with open(self.code_directory / "Dockerfile") as dockerfile: - if "FROM airbyte/integration-base-java" in dockerfile.read(): - return ConnectorLanguage.JAVA - except FileNotFoundError: - pass + if Path(self.code_directory / "build.gradle").is_file(): + return ConnectorLanguage.JAVA return None @property - def version(self) -> str: + def version(self) -> Optional[str]: if self.metadata is None: return self.version_in_dockerfile_label return self.metadata["dockerImageTag"] @property - def version_in_dockerfile_label(self) -> str: + def version_in_dockerfile_label(self) -> Optional[str]: + if not self.has_dockerfile: + return None with open(self.code_directory / "Dockerfile") as f: for line in f: if "io.airbyte.version" in line: diff --git a/airbyte-ci/connectors/connector_ops/poetry.lock b/airbyte-ci/connectors/connector_ops/poetry.lock index 298e1371e4851..983b3985bc173 100644 --- a/airbyte-ci/connectors/connector_ops/poetry.lock +++ b/airbyte-ci/connectors/connector_ops/poetry.lock @@ -203,13 +203,13 @@ url = "../ci_credentials" [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -244,20 +244,6 @@ requests = "^2.28.2" type = "directory" url = "../common_utils" -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -optional = false -python-versions = "*" -files = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - [[package]] name = "cryptography" version = "3.4.8" @@ -358,18 +344,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.32" +version = "3.1.36" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, - {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, + {file = "GitPython-3.1.36-py3-none-any.whl", hash = "sha256:8d22b5cfefd17c79914226982bb7851d6ade47545b1735a9d010a2a4c26d8388"}, + {file = "GitPython-3.1.36.tar.gz", hash = "sha256:4bb0c2a6995e85064140d31a33289aa5dce80133a23d36fcd372d716c54d3ebf"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar", "virtualenv"] + [[package]] name = "google-api-core" version = "2.11.1" @@ -394,20 +383,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.22.0" +version = "2.23.0" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, + {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" urllib3 = "<2.0" [package.extras] @@ -538,20 +526,20 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.5.0" +version = "2.6.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, + {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, + {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, ] [package.dependencies] google-crc32c = ">=1.0,<2.0dev" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] @@ -593,6 +581,41 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "numpy" version = "1.25.2" @@ -640,41 +663,35 @@ files = [ [[package]] name = "pandas" -version = "2.0.3" +version = "2.1.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, + {file = "pandas-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:40dd20439ff94f1b2ed55b393ecee9cb6f3b08104c2c40b0cb7186a2f0046242"}, + {file = "pandas-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4f38e4fedeba580285eaac7ede4f686c6701a9e618d8a857b138a126d067f2f"}, + {file = "pandas-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e6a0fe052cf27ceb29be9429428b4918f3740e37ff185658f40d8702f0b3e09"}, + {file = "pandas-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d81e1813191070440d4c7a413cb673052b3b4a984ffd86b8dd468c45742d3cc"}, + {file = "pandas-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eb20252720b1cc1b7d0b2879ffc7e0542dd568f24d7c4b2347cb035206936421"}, + {file = "pandas-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:38f74ef7ebc0ffb43b3d633e23d74882bce7e27bfa09607f3c5d3e03ffd9a4a5"}, + {file = "pandas-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cda72cc8c4761c8f1d97b169661f23a86b16fdb240bdc341173aee17e4d6cedd"}, + {file = "pandas-2.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d97daeac0db8c993420b10da4f5f5b39b01fc9ca689a17844e07c0a35ac96b4b"}, + {file = "pandas-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8c58b1113892e0c8078f006a167cc210a92bdae23322bb4614f2f0b7a4b510f"}, + {file = "pandas-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:629124923bcf798965b054a540f9ccdfd60f71361255c81fa1ecd94a904b9dd3"}, + {file = "pandas-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:70cf866af3ab346a10debba8ea78077cf3a8cd14bd5e4bed3d41555a3280041c"}, + {file = "pandas-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d53c8c1001f6a192ff1de1efe03b31a423d0eee2e9e855e69d004308e046e694"}, + {file = "pandas-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:86f100b3876b8c6d1a2c66207288ead435dc71041ee4aea789e55ef0e06408cb"}, + {file = "pandas-2.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28f330845ad21c11db51e02d8d69acc9035edfd1116926ff7245c7215db57957"}, + {file = "pandas-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9a6ccf0963db88f9b12df6720e55f337447aea217f426a22d71f4213a3099a6"}, + {file = "pandas-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99e678180bc59b0c9443314297bddce4ad35727a1a2656dbe585fd78710b3b9"}, + {file = "pandas-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b31da36d376d50a1a492efb18097b9101bdbd8b3fbb3f49006e02d4495d4c644"}, + {file = "pandas-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0164b85937707ec7f70b34a6c3a578dbf0f50787f910f21ca3b26a7fd3363437"}, + {file = "pandas-2.1.0.tar.gz", hash = "sha256:62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918"}, ] [package.dependencies] numpy = [ - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -682,37 +699,38 @@ pytz = ">=2020.1" tzdata = ">=2022.1" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -721,24 +739,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.24.0" +version = "4.24.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, - {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, - {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, - {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, - {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, - {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, - {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, - {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, - {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, - {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, - {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, + {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, + {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, + {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, + {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, + {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, + {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, + {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, + {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, + {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, + {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, + {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, ] [[package]] @@ -925,13 +943,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -978,13 +996,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -1059,22 +1077,21 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "11.2.0" +version = "13.5.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.7.0" files = [ - {file = "rich-11.2.0-py3-none-any.whl", hash = "sha256:d5f49ad91fb343efcae45a2b2df04a9755e863e50413623ab8c9e74f05aee52b"}, - {file = "rich-11.2.0.tar.gz", hash = "sha256:1a6266a5738115017bb64a66c59c717e7aa047b3ae49a011ede4abdeffc6536e"}, + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, ] [package.dependencies] -colorama = ">=0.4.0,<0.5.0" -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" [package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rsa" @@ -1259,4 +1276,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "90d4eb642ae88b77052b419c50abfe1d56ddf9425af44561123cd140f6baf00c" +content-hash = "53c1a2005d0a71be8254cd2d308aab360cd71a7c80dd478931371a9973dcc26f" diff --git a/airbyte-ci/connectors/connector_ops/pyproject.toml b/airbyte-ci/connectors/connector_ops/pyproject.toml index f1531f73abe1d..8ccedea1c4d82 100644 --- a/airbyte-ci/connectors/connector_ops/pyproject.toml +++ b/airbyte-ci/connectors/connector_ops/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "connector_ops" -version = "0.2.2" +version = "0.2.4" description = "Packaged maintained by the connector operations team to perform CI for connectors" authors = ["Airbyte "] @@ -16,7 +16,7 @@ PyYAML = "^6.0" GitPython = "^3.1.29" pydantic = "^1.9" PyGithub = "^1.58.0" -rich = "^11.0.1" +rich = "^13.0.1" pydash = "^7.0.4" google-cloud-storage = "^2.8.0" ci-credentials = {path = "../ci_credentials"} diff --git a/airbyte-ci/connectors/connector_ops/tests/test_qa_checks.py b/airbyte-ci/connectors/connector_ops/tests/test_qa_checks.py index a7860b65505a3..2c647ad9258bf 100644 --- a/airbyte-ci/connectors/connector_ops/tests/test_qa_checks.py +++ b/airbyte-ci/connectors/connector_ops/tests/test_qa_checks.py @@ -80,7 +80,7 @@ def test_run_qa_checks_success(capsys, mocker, user_input, expect_qa_checks_to_r mocker.patch.object(qa_checks, "Connector") mock_qa_check = mocker.Mock(return_value=True, __name__="mock_qa_check") if expect_qa_checks_to_run: - mocker.patch.object(qa_checks, "QA_CHECKS", [mock_qa_check]) + mocker.patch.object(qa_checks, "get_qa_checks_to_run", return_value=[mock_qa_check]) with pytest.raises(SystemExit) as wrapped_error: qa_checks.run_qa_checks() assert wrapped_error.value.code == 0 @@ -101,7 +101,7 @@ def test_run_qa_checks_error(capsys, mocker): mocker.patch.object(qa_checks.sys, "argv", ["", "source-faker"]) mocker.patch.object(qa_checks, "Connector") mock_qa_check = mocker.Mock(return_value=False, __name__="mock_qa_check") - mocker.patch.object(qa_checks, "QA_CHECKS", [mock_qa_check]) + mocker.patch.object(qa_checks, "DEFAULT_QA_CHECKS", (mock_qa_check,)) with pytest.raises(SystemExit) as wrapped_error: qa_checks.run_qa_checks() assert wrapped_error.value.code == 1 @@ -201,7 +201,7 @@ def test_check_missing_migration_guide(mocker, tmp_path, capsys): } mocker.patch.object(qa_checks.Connector, "metadata", mock_metadata_dict) - assert qa_checks.check_migration_guide(connector) == False + assert qa_checks.check_migration_guide(connector) is False stdout, _ = capsys.readouterr() assert "Migration guide file is missing for foobar. Please create a foobar-migrations.md file in the docs folder" in stdout @@ -241,6 +241,28 @@ def test_check_invalid_migration_guides(mocker, tmp_path, capsys, test_file, exp mocker.patch.object(qa_checks.Connector, "metadata", mock_metadata_dict) - assert qa_checks.check_migration_guide(connector) == False + assert qa_checks.check_migration_guide(connector) is False stdout, _ = capsys.readouterr() assert expected_stdout in stdout + + +def test_get_qa_checks_to_run(mocker): + mocker.patch.object(utils.Connector, "has_dockerfile", False) + connector = utils.Connector("source-faker") + + assert ( + qa_checks.get_qa_checks_to_run(connector) == qa_checks.DEFAULT_QA_CHECKS + ), "A connector without a Dockerfile should run the default set of QA checks" + mocker.patch.object(utils.Connector, "has_dockerfile", True) + connector = utils.Connector("source-faker") + assert qa_checks.get_qa_checks_to_run(connector) == qa_checks.DEFAULT_QA_CHECKS + ( + qa_checks.check_metadata_version_matches_dockerfile_label, + ), "A connector with a Dockerfile should run the default set of QA checks plus check_metadata_version_matches_dockerfile_label" + + +def test_check_metadata_version_matches_dockerfile_label_without_dockerfile(mocker): + mocker.patch.object(utils.Connector, "has_dockerfile", False) + connector_without_dockerfile = utils.Connector("source-faker") + assert ( + qa_checks.check_metadata_version_matches_dockerfile_label(connector_without_dockerfile) is False + ), "A connector without a Dockerfile should fail check_metadata_version_matches_dockerfile_label" diff --git a/airbyte-ci/connectors/connector_ops/tests/test_utils.py b/airbyte-ci/connectors/connector_ops/tests/test_utils.py index b4f6ca7746cb4..1f8d3f931cadd 100644 --- a/airbyte-ci/connectors/connector_ops/tests/test_utils.py +++ b/airbyte-ci/connectors/connector_ops/tests/test_utils.py @@ -51,8 +51,7 @@ def test_init(self, connector, exists, mocker, tmp_path): assert connector.support_level is None assert connector.acceptance_test_config is None assert connector.icon_path == Path(f"./airbyte-integrations/connectors/{connector.technical_name}/icon.svg") - with pytest.raises(FileNotFoundError): - connector.version + assert connector.version is None with pytest.raises(utils.ConnectorVersionNotFound): Path(tmp_path / "Dockerfile").touch() mocker.patch.object(utils.Connector, "code_directory", tmp_path) @@ -73,6 +72,25 @@ def test_metadata_query_match(self, mocker): assert not connector.metadata_query_match("data.ab_internal.ql > 101") assert not connector.metadata_query_match("data.ab_internal == whatever") + @pytest.fixture + def connector_without_dockerfile(self, mocker, tmp_path): + mocker.patch.object(utils.Connector, "code_directory", tmp_path) + connector = utils.Connector("source-faker") + return connector + + def test_has_dockerfile_without_dockerfile(self, connector_without_dockerfile): + assert not connector_without_dockerfile.has_dockerfile + + @pytest.fixture + def connector_with_dockerfile(self, mocker, tmp_path): + mocker.patch.object(utils.Connector, "code_directory", tmp_path) + connector = utils.Connector("source-faker") + tmp_path.joinpath("Dockerfile").touch() + return connector + + def test_has_dockerfile_with_dockerfile(self, connector_with_dockerfile): + assert connector_with_dockerfile.has_dockerfile + @pytest.fixture() def gradle_file_with_dependencies(tmpdir) -> Path: @@ -105,49 +123,3 @@ def test_parse_dependencies(gradle_file_with_dependencies): assert all([regular_dependency in expected_regular_dependencies for regular_dependency in regular_dependencies]) assert len(test_dependencies) == len(expected_test_dependencies) assert all([test_dependency in expected_test_dependencies for test_dependency in test_dependencies]) - - -@pytest.mark.parametrize("with_test_dependencies", [True, False]) -def test_get_all_gradle_dependencies(with_test_dependencies): - build_file = Path("airbyte-integrations/connectors/source-postgres-strict-encrypt/build.gradle") - if with_test_dependencies: - all_dependencies = utils.get_all_gradle_dependencies(build_file) - expected_dependencies = [ - Path("airbyte-cdk/java/airbyte-cdk"), - Path("airbyte-db/db-lib"), - Path("airbyte-json-validation"), - Path("airbyte-config-oss/config-models-oss"), - Path("airbyte-commons"), - Path("airbyte-test-utils"), - Path("airbyte-api"), - Path("airbyte-connector-test-harnesses/acceptance-test-harness"), - Path("airbyte-commons-protocol"), - Path("airbyte-integrations/bases/base-java"), - Path("airbyte-commons-cli"), - Path("airbyte-integrations/bases/base"), - Path("airbyte-integrations/connectors/source-postgres"), - Path("airbyte-integrations/bases/debezium"), - Path("airbyte-integrations/connectors/source-jdbc"), - Path("airbyte-integrations/connectors/source-relational-db"), - Path("airbyte-integrations/bases/standard-source-test"), - ] - assert len(all_dependencies) == len(expected_dependencies) - assert all([dependency in expected_dependencies for dependency in all_dependencies]) - else: - all_dependencies = utils.get_all_gradle_dependencies(build_file, with_test_dependencies=False) - expected_dependencies = [ - Path("airbyte-cdk/java/airbyte-cdk"), - Path("airbyte-db/db-lib"), - Path("airbyte-json-validation"), - Path("airbyte-config-oss/config-models-oss"), - Path("airbyte-commons"), - Path("airbyte-integrations/bases/base-java"), - Path("airbyte-commons-cli"), - Path("airbyte-integrations/bases/base"), - Path("airbyte-integrations/connectors/source-postgres"), - Path("airbyte-integrations/bases/debezium"), - Path("airbyte-integrations/connectors/source-jdbc"), - Path("airbyte-integrations/connectors/source-relational-db"), - ] - assert len(all_dependencies) == len(expected_dependencies) - assert all([dependency in expected_dependencies for dependency in all_dependencies]) diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 34e3801e224e3..89231664473ab 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -10,13 +10,10 @@ This documentation should be helpful for both local and CI use of the CLI. We in ## How to install ### Requirements -* A running Docker engine +* A running Docker engine with version >= 20.10.23 * Python >= 3.10 * [pipx](https://pypa.github.io/pipx/installation/) -## Requirements - -This project requires Python 3.10 and pipx. ## Install or Update @@ -406,6 +403,7 @@ This command runs the Python tests for a airbyte-ci poetry package. ## Changelog | Version | PR | Description | |---------| --------------------------------------------------------- |-----------------------------------------------------------------------------------------------------------| +| 1.3.0 | [#TBD](https://github.com/airbytehq/airbyte/pull/TBD) | Start building Python connectors using our base images. | | 1.2.3 | [#30477](https://github.com/airbytehq/airbyte/pull/30477) | Fix a test regression introduced the previous version. | | 1.2.2 | [#30438](https://github.com/airbytehq/airbyte/pull/30438) | Add workaround to always stream logs properly with --is-local. | | 1.2.1 | [#30384](https://github.com/airbytehq/airbyte/pull/30384) | Java connector test performance fixes. | diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py b/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py index 9dc21f916ead7..10627677e247d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py +++ b/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py @@ -14,7 +14,7 @@ from typing import TYPE_CHECKING, Callable, List, Optional import toml -from dagger import CacheSharingMode, CacheVolume, Client, Container, DaggerError, Directory, File, Platform, Secret +from dagger import CacheVolume, Client, Container, DaggerError, Directory, File, Platform, Secret from dagger.engine._version import CLI_VERSION as dagger_engine_version from pipelines import consts from pipelines.consts import ( @@ -88,7 +88,7 @@ def with_testing_dependencies(context: PipelineContext) -> Container: ) -def with_git(dagger_client, ci_github_access_token_secret, ci_git_user) -> Container: +def with_git(dagger_client, ci_git_user: str = "octavia") -> Container: return ( dagger_client.container() .from_("alpine:latest") @@ -103,7 +103,6 @@ def with_git(dagger_client, ci_github_access_token_secret, ci_git_user) -> Conta ] ) ) - .with_secret_variable("GITHUB_TOKEN", ci_github_access_token_secret) .with_workdir("/ghcli") .with_exec( sh_dash_c( @@ -365,36 +364,6 @@ def with_python_connector_source(context: ConnectorContext) -> Container: return with_python_package(context, testing_environment, connector_source_path) -async def with_python_connector_installed(context: ConnectorContext) -> Container: - """Install an airbyte connector python package in a testing environment. - - Args: - context (ConnectorContext): The current test context, providing the repository directory from which the connector sources will be pulled. - Returns: - Container: A python environment container (with the connector installed). - """ - connector_source_path = str(context.connector.code_directory) - testing_environment: Container = with_testing_dependencies(context) - exclude = [ - f"{context.connector.code_directory}/{item}" - for item in [ - "secrets", - "metadata.yaml", - "bootstrap.md", - "icon.svg", - "README.md", - "Dockerfile", - "acceptance-test-docker.sh", - "build.gradle", - ".hypothesis", - ".dockerignore", - ] - ] - return await with_installed_python_package( - context, testing_environment, connector_source_path, additional_dependency_groups=["dev", "tests", "main"], exclude=exclude - ) - - async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: """Install the ci_credentials package in a python environment. diff --git a/airbyte-ci/connectors/pipelines/pipelines/bases.py b/airbyte-ci/connectors/pipelines/pipelines/bases.py index b5f397d69ff2b..d3ccd725859ba 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/bases.py +++ b/airbyte-ci/connectors/pipelines/pipelines/bases.py @@ -23,8 +23,8 @@ from jinja2 import Environment, PackageLoader, select_autoescape from pipelines import sentry_utils from pipelines.actions import remote_storage -from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT, PYPROJECT_TOML_FILE_PATH -from pipelines.utils import METADATA_FILE_NAME, check_path_in_workdir, format_duration, get_exec_result +from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT +from pipelines.utils import METADATA_FILE_NAME, format_duration, get_exec_result from rich.console import Group from rich.panel import Panel from rich.style import Style @@ -276,42 +276,6 @@ def _get_timed_out_step_result(self) -> StepResult: ) -class PytestStep(Step, ABC): - """An abstract class to run pytest tests and evaluate success or failure according to pytest logs.""" - - skipped_exit_code = 5 - - async def _run_tests_in_directory(self, connector_under_test: Container, test_directory: str) -> StepResult: - """Run the pytest tests in the test_directory that was passed. - - A StepStatus.SKIPPED is returned if no tests were discovered. - - Args: - connector_under_test (Container): The connector under test container. - test_directory (str): The directory in which the python test modules are declared - - Returns: - Tuple[StepStatus, Optional[str], Optional[str]]: Tuple of StepStatus, stderr and stdout. - """ - test_config = "pytest.ini" if await check_path_in_workdir(connector_under_test, "pytest.ini") else "/" + PYPROJECT_TOML_FILE_PATH - if await check_path_in_workdir(connector_under_test, test_directory): - tester = connector_under_test.with_exec( - [ - "python", - "-m", - "pytest", - "-s", - test_directory, - "-c", - test_config, - ] - ) - return await self.get_step_result(tester) - - else: - return StepResult(self, StepStatus.SKIPPED) - - class NoOpStep(Step): """A step that does nothing.""" @@ -622,7 +586,7 @@ async def to_html(self) -> str: async def save(self) -> None: local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) absolute_path = await local_html_path.resolve() - if self.pipeline_context.is_local: + if self.pipeline_context.open_report_in_browser: self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") self.pipeline_context.logger.info("Opening HTML report in browser.") webbrowser.open(absolute_path.as_uri()) diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py index d18dc9537d8d0..7f63caff1d1bd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py @@ -2,8 +2,14 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dagger import QueryError -from pipelines.actions.environments import with_airbyte_python_connector +import importlib +from pathlib import Path +from types import ModuleType +from typing import Optional + +from base_images import GLOBAL_REGISTRY +from dagger import Container, QueryError +from pipelines.actions.environments import find_local_python_dependencies from pipelines.bases import StepResult, StepStatus from pipelines.builds.common import BuildConnectorImageBase, BuildConnectorImageForAllPlatformsBase from pipelines.contexts import ConnectorContext @@ -15,13 +21,139 @@ class BuildConnectorImage(BuildConnectorImageBase): A spec command is run on the container to validate it was built successfully. """ + DEFAULT_MAIN_FILE_NAME = "main.py" + PATH_TO_INTEGRATION_CODE = "/airbyte/integration_code" + + @staticmethod + def get_main_file_name(build_customization_module: Optional[ModuleType]) -> str: + if build_customization_module is not None and hasattr(build_customization_module, "MAIN_FILE_NAME"): + return build_customization_module.MAIN_FILE_NAME + return BuildConnectorImage.DEFAULT_MAIN_FILE_NAME + + @staticmethod + def get_entrypoint(main_file_name) -> str: + return ["python", f"/airbyte/integration_code/{main_file_name}"] + + @property + def _build_connector_function(self): + if ( + "connectorBuildOptions" in self.context.connector.metadata + and "baseImage" in self.context.connector.metadata["connectorBuildOptions"] + ): + return self._build_from_base_image + else: + return self._build_from_dockerfile + async def _run(self) -> StepResult: - connector = await with_airbyte_python_connector(self.context, self.build_platform) + connector: Container = await self._build_connector_function() try: return await self.get_step_result(connector.with_exec(["spec"])) except QueryError as e: return StepResult(self, StepStatus.FAILURE, stderr=str(e)) + def _get_base_container(self) -> Container: + base_image_name = self.context.connector.metadata["connectorBuildOptions"]["baseImage"] + BaseImageVersion = GLOBAL_REGISTRY.get_version(base_image_name) + self.logger.info(f"Building connector from base image {base_image_name}") + return BaseImageVersion(self.dagger_client, self.build_platform).container + + async def _provision_builder_container(self, base_container: Container) -> Container: + """Pre install the connector dependencies in a builder container. + If a python connectors depends on another local python connector, we need to mount its source in the container + This occurs for the source-file-secure connector for example, which depends on source-file + + Args: + base_container (Container): The base container to use to build the connector. + + Returns: + Container: The builder container, with installed dependencies. + """ + setup_dependencies_to_mount = await find_local_python_dependencies( + self.context, + str(self.context.connector.code_directory), + search_dependencies_in_setup_py=True, + search_dependencies_in_requirements_txt=False, + ) + builder = ( + base_container.with_workdir(self.PATH_TO_INTEGRATION_CODE) + # This env var is used in the setup.py to know if it is run in a container or not + # When run in a container, we need to mount the local dependencies to ./local_dependencies + # The setup.py reacts to this env var and use the /local_dependencies path instead of the normal local path + .with_env_variable("DAGGER_BUILD", "1").with_file( + "setup.py", (await self.context.get_connector_dir(include="setup.py")).file("setup.py") + ) + ) + for dependency_path in setup_dependencies_to_mount: + in_container_dependency_path = f"/local_dependencies/{Path(dependency_path).name}" + builder = builder.with_mounted_directory(in_container_dependency_path, self.context.get_repo_dir(dependency_path)) + + return builder.with_exec(["pip", "install", "--prefix=/install", "."]) + + def _get_build_customization_module(self) -> Optional[ModuleType]: + """Import the build_customization.py file from the connector directory if it exists. + + Returns: + Optional[ModuleType]: The build_customization.py module if it exists, None otherwise. + """ + build_customization_spec_path = self.context.connector.code_directory / "build_customization.py" + if not build_customization_spec_path.exists(): + return None + + build_customization_spec = importlib.util.spec_from_file_location( + f"{self.context.connector.code_directory.name}_build_customization", build_customization_spec_path + ) + build_customization_module = importlib.util.module_from_spec(build_customization_spec) + build_customization_spec.loader.exec_module(build_customization_module) + self.logger.info("This connector has a build_customization.py file. Using it to customize the build.") + return build_customization_module + + async def _build_from_base_image(self) -> Container: + """Build the connector container using the base image defined in the metadata, in the connectorBuildOptions.baseImage field. + + Returns: + Container: The connector container built from the base image. + """ + base = self._get_base_container() + build_customization_module = self._get_build_customization_module() + if hasattr(build_customization_module, "pre_connector_install"): + self.logger.info("Adding the pre_connector_install hook to the base") + base = await build_customization_module.pre_connector_install(base) + + builder = await self._provision_builder_container(base) + connector_snake_case_name = self.context.connector.technical_name.replace("-", "_") + main_file_name = self.get_main_file_name(build_customization_module) + entrypoint = self.get_entrypoint(main_file_name) + + connector_container = ( + base.with_directory("/usr/local", builder.directory("/install")) + .with_workdir(self.PATH_TO_INTEGRATION_CODE) + .with_file(main_file_name, (await self.context.get_connector_dir(include=main_file_name)).file(main_file_name)) + .with_directory( + connector_snake_case_name, + (await self.context.get_connector_dir(include=connector_snake_case_name)).directory(connector_snake_case_name), + ) + .with_env_variable("AIRBYTE_ENTRYPOINT", " ".join(entrypoint)) + .with_entrypoint(entrypoint) + .with_label("io.airbyte.version", self.context.connector.metadata["dockerImageTag"]) + .with_label("io.airbyte.name", self.context.connector.metadata["dockerRepository"]) + ) + + if hasattr(build_customization_module, "post_connector_install"): + self.logger.info("Adding the post_connector_install hook to the connector container") + connector_container = await build_customization_module.post_connector_install(connector_container) + return connector_container + + async def _build_from_dockerfile(self) -> Container: + """Build the connector container using its Dockerfile. + + Returns: + Container: The connector container built from its Dockerfile. + """ + self.logger.warn( + "This connector is built from its Dockerfile. This is now deprecated. Please set connectorBuildOptions.baseImage metadata field to use or new build process." + ) + return self.dagger_client.container(platform=self.build_platform).build(await self.context.get_connector_dir()) + class BuildConnectorImageForAllPlatforms(BuildConnectorImageForAllPlatformsBase): """Build a Python connector image for all platforms.""" diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py b/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py index 3af9576c9d031..dd33b0008cb3d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py @@ -15,13 +15,17 @@ from pipelines import main_logger from pipelines.bases import ConnectorWithModifiedFiles from pipelines.builds import run_connector_build_pipeline +from pipelines.connector_changes.base_image_version_upgrade import run_connector_base_image_upgrade_pipeline +from pipelines.connector_changes.format import run_connector_format_pipeline +from pipelines.connector_changes.version_bump import run_connector_version_bump_pipeline from pipelines.contexts import ConnectorContext, ContextState, PublishConnectorContext -from pipelines.format import run_connectors_format_pipelines from pipelines.github import update_global_commit_status_check_for_tests from pipelines.pipelines.connectors import run_connectors_pipelines from pipelines.publish import reorder_contexts, run_connector_publish_pipeline from pipelines.tests import run_connector_test_pipeline from pipelines.utils import DaggerPipelineCommand, get_connector_modified_files, get_modified_connectors +from rich.table import Table +from rich.text import Text # HELPERS @@ -478,8 +482,10 @@ def list( @connectors.command(name="format", cls=DaggerPipelineCommand, help="Autoformat connector code.") +@click.option("--commit-and-push", default=False) +@click.option("--export-to-host", default=True) @click.pass_context -def format_code(ctx: click.Context) -> bool: +def format_code(ctx: click.Context, commit_and_push, export_to_host) -> bool: connectors_contexts = [ ConnectorContext( pipeline_name=f"Format connector {connector.technical_name}", @@ -504,13 +510,130 @@ def format_code(ctx: click.Context) -> bool: ] anyio.run( - run_connectors_format_pipelines, + run_connectors_pipelines, connectors_contexts, - ctx.obj["ci_git_user"], - ctx.obj["ci_github_access_token"], - ctx.obj["git_branch"], - ctx.obj["is_local"], + run_connector_format_pipeline, + "Format connectors pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + commit_and_push, + export_to_host, + ) + + return True + + +@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") +@click.option("--commit-and-push", default=False) +@click.option("--export-to-host", default=True) +@click.option("--set-if-exists", default=True) +@click.pass_context +def upgrade_base_image(ctx: click.Context, commit_and_push: bool, export_to_host: bool, set_if_exists: bool) -> bool: + """Upgrades the base image version used by the selected connectors.""" + + if ctx.obj["is_local"] and commit_and_push: + raise click.UsageError("You can't use the --commit-and-push option in local mode.") + if ctx.obj["is_local"] and not export_to_host: + main_logger.warning("Not using the --export-to-host option in local mode will not change anything on your local repo.") + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_base_image_upgrade_pipeline, + "Upgrade base image pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + commit_and_push, + export_to_host, + set_if_exists, + ) + + return True + + +@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") +@click.argument("bump-type", type=click.Choice(["patch", "minor", "major"])) +@click.argument("pull-request-number", type=str) +@click.argument("changelog-entry", type=str) +@click.option("--commit-and-push", default=False) +@click.option("--export-to-host", default=True) +@click.option("--set-if-exists", default=True) +@click.pass_context +def bump_version( + ctx: click.Context, + bump_type: str, + pull_request_number: str, + changelog_entry: str, + commit_and_push: bool, + export_to_host: bool, + set_if_exists: bool, +) -> bool: + """Upgrades the base image version used by the selected connectors.""" + + if ctx.obj["is_local"] and commit_and_push: + raise click.UsageError("You can't use the --commit-and-push option in local mode.") + if ctx.obj["is_local"] and not export_to_host: + main_logger.warning("Not using the --export-to-host option in local mode will not change anything on your local repo.") + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_version_bump_pipeline, + "Version bump pipeline pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], ctx.obj["execute_timeout"], + commit_and_push, + export_to_host, + bump_type, + changelog_entry, + pull_request_number, ) return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_upgrade.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_upgrade.py new file mode 100644 index 0000000000000..80870bb1f8e3d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_upgrade.py @@ -0,0 +1,106 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from typing import Optional + +import yaml +from base_images import python +from connector_ops.utils import ConnectorLanguage +from dagger import Container +from pipelines.bases import ConnectorReport, StepResult, StepStatus +from pipelines.connector_changes.common import MetadataUpdateStep +from pipelines.contexts import ConnectorContext + + +class UpgradeBaseImageMetadata(MetadataUpdateStep): + title = "Upgrade the base image to the latest version in metadata.yaml" + latest_python_version = python.VERSION_REGISTRY.latest_version.name_with_tag + # latest_java_version = java.VERSION_REGISTRY.latest_version + + def __init__( + self, + context: ConnectorContext, + export_changes_to_host: bool, + container_with_airbyte_repo: Container | None = None, + commit: bool = False, + push: bool = False, + skip_ci=True, + set_if_not_exists: bool = False, + ): + super().__init__(context, export_changes_to_host, container_with_airbyte_repo, commit, push, skip_ci) + self.set_if_not_exists = set_if_not_exists + + @property + def latest_base_image_version(self) -> Optional[str]: + if self.context.connector.language in [ConnectorLanguage.PYTHON, ConnectorLanguage.LOW_CODE]: + return self.latest_python_version + return None + + async def get_current_base_image_version(self) -> Optional[str]: + current_metadata = await self.get_current_metadata() + return current_metadata.get("data", {}).get("connectorBuildOptions", {}).get("baseImage") + + async def get_updated_metadata(self) -> str: + current_metadata = await self.get_current_metadata() + current_connector_build_options = current_metadata["data"].get("connectorBuildOptions", {}) + current_metadata["data"]["connectorBuildOptions"] = { + **current_connector_build_options, + **{"baseImage": self.latest_base_image_version}, + } + return yaml.safe_dump(current_metadata) + + async def make_connector_change(self) -> StepResult: + if self.context.connector.language is ConnectorLanguage.JAVA: + return StepResult( + self, StepStatus.SKIPPED, stdout="Java connectors are not supported yet", output_artifact=self.container_with_airbyte_repo + ) + current_base_image_version = await self.get_current_base_image_version() + if current_base_image_version is None and not self.set_if_not_exists: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector does not have a base image metadata field.", + output_artifact=self.container_with_airbyte_repo, + ) + if current_base_image_version == self.latest_python_version: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector already uses latest base image", + output_artifact=self.container_with_airbyte_repo, + ) + container_with_updated_metadata = await self.get_container_with_updated_metadata(self.container_with_airbyte_repo) + + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Updated base image to {self.latest_base_image_version} in metadata.yaml", + output_artifact=container_with_updated_metadata, + ) + + +async def run_connector_base_image_upgrade_pipeline( + context: ConnectorContext, semaphore, commit_and_push: bool, export_changes_to_host: bool, set_if_exists: bool +) -> ConnectorReport: + """Run a pipeline to upgrade for a single connector. + + Args: + context (ConnectorContext): The initialized connector context. + + Returns: + ConnectorReport: The reports holding the base image version upgrade results. + """ + async with semaphore: + steps_results = [] + async with context: + update_base_image_in_metadata = UpgradeBaseImageMetadata( + context, + commit=commit_and_push, + push=commit_and_push, + export_changes_to_host=export_changes_to_host, + set_if_not_exists=set_if_exists, + ) + update_base_image_in_metadata_result = await update_base_image_in_metadata.run() + steps_results.append(update_base_image_in_metadata_result) + context.report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS") + return context.report diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/common.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/common.py new file mode 100644 index 0000000000000..efe673f7446a0 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/common.py @@ -0,0 +1,135 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from abc import abstractmethod +from pathlib import Path +from typing import Dict, List, Optional + +import yaml +from connector_ops.utils import METADATA_FILE_NAME +from dagger import Container, Directory, Secret +from pipelines.actions import environments +from pipelines.bases import Step, StepResult +from pipelines.consts import AIRBYTE_GITHUB_REPO_URL +from pipelines.contexts import ConnectorContext + + +class ConnectorChangeStep(Step): + def __init__( + self, + context: ConnectorContext, + export_changes_to_host: bool, + container_with_airbyte_repo: Optional[Container] = None, + commit: bool = False, + push: bool = False, + skip_ci=True, + ): + super().__init__(context) + self.export_changes_to_host = export_changes_to_host + self.container_with_airbyte_repo = container_with_airbyte_repo + self.commit = commit + self.push = push + self.skip_ci = skip_ci + + @property + def modified_paths(self) -> List[str]: + return [self.context.connector.code_directory] + + async def get_airbyte_local_repo(self) -> Directory: + return self.context.get_repo_dir() + + def get_airbyte_remote_repo(self) -> Directory: + return self.dagger_client.git(AIRBYTE_GITHUB_REPO_URL, keep_git_dir=True).branch(self.context.git_branch).tree() + + async def get_airbyte_repo(self): + if self.context.is_local: + return await self.get_airbyte_local_repo() + return self.get_airbyte_remote_repo() + + @property + def authenticated_repo_url(self) -> Secret: + if self.context.ci_git_user is None or self.context.ci_github_access_token is None: + raise Exception("Missing CI git user or CI github access token") + url = self.GITHUB_REPO_URL.replace("https://", f"https://{self.context.ci_git_user}:{self.context.ci_github_access_token}@") + return self.dagger_client.set_secret("authenticated_repo_url", url) + + @property + def commit_message(self) -> str: + commit_message = f"🤖 {self.context.connector.technical_name} - {self.title}" + return f"[skip ci]: {commit_message} " if self.skip_ci else commit_message + + async def get_fresh_git_container(self, authenticated: bool = False) -> Container: + if not authenticated: + return ( + environments.with_git(self.dagger_client, self.context.ci_git_user) + .with_mounted_directory("/airbyte", (await self.get_airbyte_repo())) + .with_workdir("/airbyte") + ) + else: + return ( + await self.get_fresh_git_container(authenticated=False) + .with_secret_variable("GITHUB_TOKEN", self.context.ci_github_access_token_secret) + .with_secret_variable("AUTHENTICATED_REPO_URL", self.authenticated_repo_url) + .with_exec(environments.sh_dash_c(["git remote set-url origin $AUTHENTICATED_REPO_URL"])) + ) + + def commit_connector_changes(self, container_with_latest_repo_state: Container) -> Container: + return container_with_latest_repo_state.with_exec(["git", "add", str(self.context.connector.code_directory)]).with_exec( + ["git", "commit", "-m", self.commit_message] + ) + + def push(self, container_with_latest_repo_state: Container) -> Container: + return container_with_latest_repo_state.with_exec(["git", "pull", "--rebase", "origin", self.context.git_branch]).with_exec( + ["git", "push", "origin", f"HEAD:{self.context.git_branch}"] + ) + + async def get_connector_dir(self) -> Directory: + return (await self.get_airbyte_repo()).directory(str(self.context.connector.code_directory)) + + async def _run(self) -> StepResult: + self.container_with_airbyte_repo = ( + self.container_with_airbyte_repo if self.container_with_airbyte_repo is not None else await self.get_fresh_git_container() + ) + change_result = await self.make_connector_change() + self.container_with_airbyte_repo = change_result.output_artifact + if self.commit: + self.container_with_airbyte_repo = await self.commit_connector_changes(self.container_with_airbyte_repo) + self.logger.info("Changes committed.") + if self.push: + self.container_with_airbyte_repo = self.push(self.container_with_airbyte_repo) + self.logger.info("Changes pushed.") + if self.export_changes_to_host: + for modified_path in self.modified_paths: + if modified_path.is_dir(): + await self.container_with_airbyte_repo.directory(str(modified_path)).export(str(modified_path)) + else: + await self.container_with_airbyte_repo.file(str(modified_path)).export(str(modified_path)) + + self.logger.info("Changes exported back to host.") + return change_result + + @abstractmethod + async def make_connector_change(self, container_with_airbyte_repo) -> StepResult: + raise NotImplementedError() + + +class MetadataUpdateStep(ConnectorChangeStep): + @property + def modified_paths(self) -> List[Path]: + return [self.context.connector.code_directory / METADATA_FILE_NAME] + + @property + def metadata_path(self) -> str: + return str(self.context.connector.code_directory / METADATA_FILE_NAME) + + async def get_current_metadata(self) -> Dict: + return yaml.safe_load(await self.container_with_airbyte_repo.file(self.metadata_path).contents()) + + @abstractmethod + async def get_updated_metadata(self) -> str: + raise NotImplementedError() + + async def get_container_with_updated_metadata(self, container_with_airbyte_repo: Container) -> Container: + new_metadata = await self.get_updated_metadata() + absolute_path_to_new_metadata = f"/airbyte/{self.context.connector.code_directory}/{METADATA_FILE_NAME}" + return container_with_airbyte_repo.with_new_file(absolute_path_to_new_metadata, new_metadata) diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/__init__.py new file mode 100644 index 0000000000000..d449af468dee8 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/__init__.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +"""This module groups factory like functions to dispatch formatting steps according to the connector language.""" + +from __future__ import annotations + +from connector_ops.utils import ConnectorLanguage +from pipelines.bases import ConnectorReport +from pipelines.connector_changes.format import python_connectors +from pipelines.contexts import ConnectorContext + + +class NoFormatStepForLanguageError(Exception): + pass + + +FORMATTING_STEP_TO_CONNECTOR_LANGUAGE_MAPPING = { + ConnectorLanguage.PYTHON: python_connectors.FormatConnectorCode, + ConnectorLanguage.LOW_CODE: python_connectors.FormatConnectorCode, + # ConnectorLanguage.JAVA: java_connectors.FormatConnectorCode, +} + + +async def run_connector_format_pipeline( + context: ConnectorContext, semaphore, commit_and_push: bool, export_changes_to_host: bool +) -> ConnectorReport: + """Run a format pipeline for a single connector. + + Args: + context (ConnectorContext): The initialized connector context. + + Returns: + ConnectorReport: The reports holding formats results. + """ + steps_results = [] + async with context: + FormatConnectorCode = FORMATTING_STEP_TO_CONNECTOR_LANGUAGE_MAPPING.get(context.connector.language) + if not FormatConnectorCode: + raise NoFormatStepForLanguageError( + f"No formatting step found for connector {context.connector.technical_name} with language {context.connector.language}" + ) + format_connector_code_result = await FormatConnectorCode( + context, export_changes_to_host, commit=commit_and_push, push=commit_and_push + ).run() + steps_results.append(format_connector_code_result) + context.report = ConnectorReport(context, steps_results, name="FORMAT RESULTS") + return context.report diff --git a/airbyte-ci/connectors/pipelines/pipelines/format/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/python_connectors.py similarity index 52% rename from airbyte-ci/connectors/pipelines/pipelines/format/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/python_connectors.py index e2ebbcf68d8a8..49f7b96955813 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/format/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/format/python_connectors.py @@ -2,13 +2,14 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import asyncer +from typing import List, Tuple + from pipelines.actions import environments -from pipelines.bases import Step, StepResult -from pipelines.utils import with_exit_code, with_stderr, with_stdout +from pipelines.bases import StepResult +from pipelines.connector_changes.common import ConnectorChangeStep -class FormatConnectorCode(Step): +class FormatConnectorCode(ConnectorChangeStep): """ A step to format a Python connector code. """ @@ -34,24 +35,25 @@ def licenseheaders_cmd(self): "--exclude=**/models/__init__.py", ] - async def _run(self) -> StepResult: + async def make_connector_change(self) -> Tuple[StepResult, List[str]]: + in_container_code_dir = f"/airbyte/{self.context.connector.code_directory}" + formatted = ( environments.with_testing_dependencies(self.context) - .with_mounted_directory("/connector_code", await self.context.get_connector_dir()) - .with_workdir("/connector_code") + .with_mounted_directory(in_container_code_dir, await (await self.get_connector_dir())) + .with_workdir(in_container_code_dir) .with_exec(self.licenseheaders_cmd) .with_exec(self.isort_cmd) .with_exec(self.black_cmd) ) - async with asyncer.create_task_group() as task_group: - soon_exit_code = task_group.soonify(with_exit_code)(formatted) - soon_stderr = task_group.soonify(with_stderr)(formatted) - soon_stdout = task_group.soonify(with_stdout)(formatted) - + format_result = await self.get_step_result(formatted) + self.container_with_airbyte_repo = self.container_with_airbyte_repo.with_directory( + in_container_code_dir, format_result.output_artifact.directory(in_container_code_dir) + ) return StepResult( self, - self.get_step_status_from_exit_code(await soon_exit_code), - stderr=soon_stderr.value, - stdout=soon_stdout.value, - output_artifact=formatted.directory("/connector_code"), + status=format_result.status, + stdout=format_result.stdout, + stderr=format_result.stderr, + output_artifact=self.container_with_airbyte_repo, ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py new file mode 100644 index 0000000000000..bef8787eef73c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py @@ -0,0 +1,183 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +import datetime +from typing import List, Optional + +import semver +import yaml +from dagger import Container +from pipelines.bases import ConnectorReport, StepResult, StepStatus +from pipelines.connector_changes.common import ConnectorChangeStep, MetadataUpdateStep +from pipelines.contexts import ConnectorContext + + +class BumpDockerImageTagInMetadata(MetadataUpdateStep): + title = "Upgrade the dockerImageTag to the latest version in metadata.yaml" + + def __init__( + self, + context: ConnectorContext, + new_version: str, + export_changes_to_host: bool, + container_with_airbyte_repo: Container | None = None, + commit: bool = False, + push: bool = False, + skip_ci=True, + ): + super().__init__(context, export_changes_to_host, container_with_airbyte_repo, commit, push, skip_ci) + self.new_version = new_version + + async def get_current_docker_image_tag(self) -> Optional[str]: + current_metadata = await self.get_current_metadata() + return current_metadata.get("data", {}).get("dockerImageTag") + + async def get_current_version(self) -> Optional[str]: + return (await self.get_current_metadata()).get("data", {}).get("dockerImageTag") + + async def get_updated_metadata(self) -> str: + current_version = await self.get_current_version() + current_metadata = await self.get_current_metadata() + current_metadata["data"]["dockerImageTag"] = self.new_version + # Bump strict versions + if current_metadata["data"].get("registries", {}).get("cloud", {}).get("dockerImageTag") == current_version: + current_metadata["data"]["registries"]["cloud"]["dockerImageTag"] = self.new_version + return yaml.safe_dump(current_metadata) + + async def make_connector_change(self) -> StepResult: + og_version = await self.get_current_version() + if og_version is None: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Can't retrieve the connector current version.", + output_artifact=self.container_with_airbyte_repo, + ) + + container_with_updated_metadata = await self.get_container_with_updated_metadata(self.container_with_airbyte_repo) + + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Updated dockerImageTag from {og_version} to {self.new_version} in metadata.yaml", + output_artifact=container_with_updated_metadata, + ) + + +class AddChangelogEntry(ConnectorChangeStep): + title = "Add changelog entry" + + def __init__( + self, + context: ConnectorContext, + new_version: str, + changelog_entry: str, + pull_request_number: str, + export_changes_to_host: bool, + container_with_airbyte_repo: Container | None = None, + commit: bool = False, + push: bool = False, + skip_ci=True, + ): + super().__init__(context, export_changes_to_host, container_with_airbyte_repo, commit, push, skip_ci) + self.new_version = new_version + self.changelog_entry = changelog_entry + self.pull_request_number = pull_request_number + + @property + def modified_paths(self) -> List[str]: + return [self.context.connector.documentation_file_path] + + async def make_connector_change(self) -> StepResult: + doc_path = self.context.connector.documentation_file_path + if not doc_path.exists(): + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Connector does not have a documentation file.", + output_artifact=self.container_with_airbyte_repo, + ) + try: + updated_doc = self.add_changelog_entry(doc_path.read_text()) + except Exception as e: + return StepResult( + self, + StepStatus.FAILURE, + stdout=f"Could not add changelog entry: {e}", + output_artifact=self.container_with_airbyte_repo, + ) + self.container_with_airbyte_repo = await self.container_with_airbyte_repo.with_new_file(str(doc_path), updated_doc) + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Added changelog entry to {doc_path}", + output_artifact=self.container_with_airbyte_repo, + ) + + def find_line_index_for_new_entry(self, markdown_text) -> int: + lines = markdown_text.splitlines() + for line_index, line in enumerate(lines): + if "version" in line.lower() and "date" in line.lower() and "pull request" in line.lower() and "subject" in line.lower(): + return line_index + 2 + raise Exception("Could not find the changelog section table in the documentation file.") + + def add_changelog_entry(self, og_doc_content) -> str: + today = datetime.date.today().strftime("%Y-%m-%d") + lines = og_doc_content.splitlines() + line_index_for_new_entry = self.find_line_index_for_new_entry(og_doc_content) + new_entry = f"| {self.new_version} | {today} | [{self.pull_request_number}](https://github.com/airbytehq/airbyte/pull/{self.pull_request_number}) | {self.changelog_entry} |" + lines.insert(line_index_for_new_entry, new_entry) + return "\n".join(lines) + + +async def run_connector_version_bump_pipeline( + context: ConnectorContext, + semaphore, + commit_and_push: bool, + export_changes_to_host: bool, + bump_type: str, + changelog_entry: str, + pull_request_number: str, +) -> ConnectorReport: + """Run a pipeline to upgrade for a single connector. + + Args: + context (ConnectorContext): The initialized connector context. + + Returns: + ConnectorReport: The reports holding the base image version upgrade results. + """ + async with semaphore: + steps_results = [] + async with context: + current_version = semver.VersionInfo.parse(context.connector.version) + if bump_type == "patch": + new_version = current_version.bump_patch() + elif bump_type == "minor": + new_version = current_version.bump_minor() + elif bump_type == "major": + new_version = current_version.bump_major() + new_version = str(new_version) + + update_docker_image_tag_in_metadata = BumpDockerImageTagInMetadata( + context, + new_version, + export_changes_to_host, + commit=commit_and_push, + push=commit_and_push, + ) + update_docker_image_tag_in_metadata_result = await update_docker_image_tag_in_metadata.run() + steps_results.append(update_docker_image_tag_in_metadata_result) + add_changelog_entry = AddChangelogEntry( + context, + new_version, + changelog_entry, + pull_request_number, + export_changes_to_host, + commit=commit_and_push, + push=commit_and_push, + ) + add_changelog_entry_result = await add_changelog_entry.run() + steps_results.append(add_changelog_entry_result) + context.report = ConnectorReport(context, steps_results, name="CONNECTOR VERSION BUMP RESULTS") + return context.report diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index ae7d6add49267..1524a69b078e2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -3,7 +3,6 @@ # import platform -from pathlib import Path from dagger import Platform @@ -38,3 +37,5 @@ DOCKER_HOST_NAME = "global-docker-host" DOCKER_HOST_PORT = 2375 DOCKER_TMP_VOLUME_NAME = "shared-tmp" +AIRBYTE_GITHUB_REPO = "airbytehq/airbyte" +AIRBYTE_GITHUB_REPO_URL = f"https://github.com/{AIRBYTE_GITHUB_REPO}.git" diff --git a/airbyte-ci/connectors/pipelines/pipelines/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/contexts.py index e3807b41df072..7f1eb720d2cf6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/contexts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/contexts.py @@ -74,6 +74,7 @@ def __init__( ci_gcs_credentials: Optional[str] = None, ci_git_user: Optional[str] = None, ci_github_access_token: Optional[str] = None, + open_report_in_browser: bool = True, ): """Initialize a pipeline context. @@ -116,6 +117,7 @@ def __init__( self.started_at = None self.stopped_at = None self.secrets_to_mask = [] + self.open_report_in_browser = open_report_in_browser update_commit_status_check(**self.github_commit_status) @property @@ -320,6 +322,7 @@ def __init__( fail_fast: bool = False, fast_tests_only: bool = False, code_tests_only: bool = False, + open_report_in_browser: bool = True, ): """Initialize a connector context. @@ -372,6 +375,7 @@ def __init__( ci_gcs_credentials=ci_gcs_credentials, ci_git_user=ci_git_user, ci_github_access_token=ci_github_access_token, + open_report_in_browser=open_report_in_browser, ) @property diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py b/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py index d9ef708796175..5a2bc2c10d6ae 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py @@ -22,7 +22,7 @@ "_EXPERIMENTAL_DAGGER_CLOUD_TOKEN", "p.eyJ1IjogIjFiZjEwMmRjLWYyZmQtNDVhNi1iNzM1LTgxNzI1NGFkZDU2ZiIsICJpZCI6ICJlNjk3YzZiYy0yMDhiLTRlMTktODBjZC0yNjIyNGI3ZDBjMDEifQ.hT6eMOYt3KZgNoVGNYI3_v4CC-s19z8uQsBkGrBhU3k", ) -ARGS_DISABLING_TUI = ["--no-tui", "publish"] +ARGS_DISABLING_TUI = ["--no-tui", "publish", "upgrade-base-image", "--help", "format", "bump-version"] def get_dagger_path() -> Optional[str]: diff --git a/airbyte-ci/connectors/pipelines/pipelines/format/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/format/__init__.py deleted file mode 100644 index 730874bd6b9bb..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/format/__init__.py +++ /dev/null @@ -1,101 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -"""This module groups factory like functions to dispatch formatting steps according to the connector language.""" - -from __future__ import annotations - -import sys -from typing import List, Optional - -import anyio -import dagger -from connector_ops.utils import ConnectorLanguage -from pipelines.actions import environments -from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus -from pipelines.contexts import ConnectorContext -from pipelines.format import java_connectors, python_connectors -from pipelines.git import GitPushChanges -from pipelines.pipelines.connectors import run_report_complete_pipeline - - -class NoFormatStepForLanguageError(Exception): - pass - - -FORMATTING_STEP_TO_CONNECTOR_LANGUAGE_MAPPING = { - ConnectorLanguage.PYTHON: python_connectors.FormatConnectorCode, - ConnectorLanguage.LOW_CODE: python_connectors.FormatConnectorCode, - ConnectorLanguage.JAVA: java_connectors.FormatConnectorCode, -} - - -class ExportChanges(Step): - title = "Export changes to local repository" - - async def _run(self, changed_directory: dagger.Directory, changed_directory_path_in_repo: str) -> StepResult: - await changed_directory.export(changed_directory_path_in_repo) - return StepResult(self, StepStatus.SUCCESS, stdout=f"Changes exported to {changed_directory_path_in_repo}") - - -async def run_connector_format_pipeline(context: ConnectorContext) -> ConnectorReport: - """Run a format pipeline for a single connector. - - Args: - context (ConnectorContext): The initialized connector context. - - Returns: - ConnectorReport: The reports holding formats results. - """ - steps_results = [] - async with context: - FormatConnectorCode = FORMATTING_STEP_TO_CONNECTOR_LANGUAGE_MAPPING.get(context.connector.language) - if not FormatConnectorCode: - raise NoFormatStepForLanguageError( - f"No formatting step found for connector {context.connector.technical_name} with language {context.connector.language}" - ) - format_connector_code_result = await FormatConnectorCode(context).run() - steps_results.append(format_connector_code_result) - - if context.is_local: - export_changes_results = await ExportChanges(context).run( - format_connector_code_result.output_artifact, str(context.connector.code_directory) - ) - steps_results.append(export_changes_results) - else: - git_push_changes_results = await GitPushChanges(context).run( - format_connector_code_result.output_artifact, - str(context.connector.code_directory), - f"Auto format {context.connector.technical_name} code", - skip_ci=True, - ) - steps_results.append(git_push_changes_results) - context.report = ConnectorReport(context, steps_results, name="FORMAT RESULTS") - return context.report - - -async def run_connectors_format_pipelines( - contexts: List[ConnectorContext], - ci_git_user: str, - ci_github_access_token: str, - git_branch: str, - is_local: bool, - execute_timeout: Optional[int], -) -> List[ConnectorContext]: - async with dagger.Connection(dagger.Config(log_output=sys.stderr, execute_timeout=execute_timeout)) as dagger_client: - requires_dind = any(context.connector.language == ConnectorLanguage.JAVA for context in contexts) - dockerd_service = environments.with_global_dockerd_service(dagger_client) - async with anyio.create_task_group() as tg_main: - if requires_dind: - tg_main.start_soon(dockerd_service.sync) - await anyio.sleep(10) # Wait for the docker service to be ready - for context in contexts: - context.dagger_client = dagger_client.pipeline(f"Format - {context.connector.technical_name}") - context.dockerd_service = dockerd_service - await run_connector_format_pipeline(context) - # When the connectors pipelines are done, we can stop the dockerd service - tg_main.cancel_scope.cancel() - - await run_report_complete_pipeline(dagger_client, contexts) - - return contexts diff --git a/airbyte-ci/connectors/pipelines/pipelines/format/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/format/java_connectors.py deleted file mode 100644 index aba30683fc419..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/format/java_connectors.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from pipelines.actions import environments -from pipelines.bases import StepResult -from pipelines.gradle import GradleTask -from pipelines.utils import get_exec_result - - -class FormatConnectorCode(GradleTask): - """ - A step to format a Java connector code. - """ - - title = "Format connector code" - gradle_task_name = "format" - - async def _run(self) -> StepResult: - result = await super()._run() - return StepResult( - self, - result.status, - stderr=result.stderr, - stdout=result.stdout, - output_artifact=result.output_artifact.directory(str(self.context.connector.code_directory)), - ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/git.py b/airbyte-ci/connectors/pipelines/pipelines/git.py deleted file mode 100644 index 01c1d180d4f7f..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/git.py +++ /dev/null @@ -1,118 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from dagger import Client, Directory, Secret -from pipelines.actions import environments -from pipelines.bases import Step, StepResult -from pipelines.github import AIRBYTE_GITHUB_REPO -from pipelines.utils import sh_dash_c - - -class GitPushChanges(Step): - """ - A step to push changes to the remote repository. - """ - - title = "Push changes to the remote repository" - - GITHUB_REPO_URL = f"https://github.com/{AIRBYTE_GITHUB_REPO}.git" - - @property - def ci_git_user(self) -> str: - return self.context.ci_git_user - - @property - def ci_github_access_token(self) -> str: - return self.context.ci_github_access_token - - @property - def dagger_client(self) -> Client: - return self.context.dagger_client - - @property - def git_branch(self) -> str: - return self.context.git_branch - - @property - def authenticated_repo_url(self) -> Secret: - url = self.GITHUB_REPO_URL.replace("https://", f"https://{self.ci_git_user}:{self.ci_github_access_token}@") - return self.dagger_client.set_secret("authenticated_repo_url", url) - - @property - def airbyte_repo(self) -> Directory: - return self.dagger_client.git(self.GITHUB_REPO_URL, keep_git_dir=True).branch(self.git_branch).tree() - - def get_commit_message(self, commit_message: str, skip_ci: bool) -> str: - commit_message = f"🤖 {commit_message}" - return f"{commit_message} [skip ci]" if skip_ci else commit_message - - async def _run( - self, changed_directory: Directory, changed_directory_path: str, commit_message: str, skip_ci: bool = True - ) -> StepResult: - diff = ( - environments.with_git(self.dagger_client, self.context.ci_github_access_token_secret, self.ci_git_user) - .with_secret_variable("AUTHENTICATED_REPO_URL", self.authenticated_repo_url) - .with_mounted_directory("/airbyte", self.airbyte_repo) - .with_workdir("/airbyte") - .with_exec(["git", "checkout", self.git_branch]) - .with_mounted_directory(f"/airbyte/{changed_directory_path}", changed_directory) - .with_exec(["git", "diff", "--name-only"]) - ) - - if not await diff.stdout(): - return self.skip("No changes to push") - - commit_and_push = ( - diff.with_exec(["sh", "-c", "git remote set-url origin $AUTHENTICATED_REPO_URL"]) - .with_exec(["git", "add", "."]) - .with_exec(["git", "commit", "-m", self.get_commit_message(commit_message, skip_ci)]) - .with_exec(["git", "pull", "--rebase", "origin", self.git_branch]) - .with_exec(["git", "push"]) - ) - return await self.get_step_result(commit_and_push) - - -class GitPushEmptyCommit(GitPushChanges): - """ - A step to push an empty commit to the remote repository. - """ - - title = "Push empty commit to the remote repository" - - def __init__(self, dagger_client, ci_git_user, ci_github_access_token, git_branch): - self._dagger_client = dagger_client - self._ci_github_access_token = ci_github_access_token - self._ci_git_user = ci_git_user - self._git_branch = git_branch - self.ci_github_access_token_secret = dagger_client.set_secret("ci_github_access_token", ci_github_access_token) - - @property - def dagger_client(self) -> Client: - return self._dagger_client - - @property - def ci_git_user(self) -> str: - return self._ci_git_user - - @property - def ci_github_access_token(self) -> Secret: - return self._ci_github_access_token - - @property - def git_branch(self) -> str: - return self._git_branch - - async def _run(self, commit_message: str, skip_ci: bool = True) -> StepResult: - push_empty_commit = ( - environments.with_git(self.dagger_client, self.ci_github_access_token_secret, self.ci_git_user) - .with_secret_variable("AUTHENTICATED_REPO_URL", self.authenticated_repo_url) - .with_mounted_directory("/airbyte", self.airbyte_repo) - .with_workdir("/airbyte") - .with_exec(["git", "checkout", self.git_branch]) - .with_exec(sh_dash_c(["git remote set-url origin $AUTHENTICATED_REPO_URL"])) - .with_exec(["git", "commit", "--allow-empty", "-m", self.get_commit_message(commit_message, skip_ci)]) - .with_exec(["git", "pull", "--rebase", "origin", self.git_branch]) - .with_exec(["git", "push"]) - ) - return await self.get_step_result(push_empty_commit) diff --git a/airbyte-ci/connectors/pipelines/pipelines/github.py b/airbyte-ci/connectors/pipelines/pipelines/github.py index fd6bb7e47530f..cfef07455e6d5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/github.py +++ b/airbyte-ci/connectors/pipelines/pipelines/github.py @@ -10,7 +10,7 @@ from typing import TYPE_CHECKING, Optional from connector_ops.utils import console -from pipelines import main_logger +from pipelines import consts, main_logger from pipelines.bases import CIContext if TYPE_CHECKING: @@ -18,7 +18,6 @@ from github import Github, PullRequest -AIRBYTE_GITHUB_REPO = "airbytehq/airbyte" GITHUB_GLOBAL_CONTEXT_FOR_TESTS = "Connectors CI tests" GITHUB_GLOBAL_DESCRIPTION_FOR_TESTS = "Running connectors tests" @@ -52,7 +51,7 @@ def update_commit_status_check( safe_log(logger, f"Attempting to create {state} status for commit {sha} on Github in {context} context.") try: github_client = Github(os.environ["CI_GITHUB_ACCESS_TOKEN"]) - airbyte_repo = github_client.get_repo(AIRBYTE_GITHUB_REPO) + airbyte_repo = github_client.get_repo(consts.AIRBYTE_GITHUB_REPO) except Exception as e: if logger: logger.error("No commit status check sent, the connection to Github API failed", exc_info=True) @@ -87,7 +86,7 @@ def get_pull_request(pull_request_number: int, github_access_token: str) -> Pull PullRequest: The pull request object. """ github_client = Github(github_access_token) - airbyte_repo = github_client.get_repo(AIRBYTE_GITHUB_REPO) + airbyte_repo = github_client.get_repo(consts.AIRBYTE_GITHUB_REPO) return airbyte_repo.get_pull(pull_request_number) diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/common.py b/airbyte-ci/connectors/pipelines/pipelines/tests/common.py index 90d0f5f7db484..25e709c2c0ea0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/tests/common.py @@ -17,7 +17,7 @@ from dagger import Container, Directory, File from pipelines import hacks from pipelines.actions import environments -from pipelines.bases import CIContext, PytestStep, Step, StepResult, StepStatus +from pipelines.bases import CIContext, Step, StepResult, StepStatus from pipelines.utils import METADATA_FILE_NAME @@ -174,12 +174,13 @@ async def _run(self) -> StepResult: return await self.get_step_result(qa_checks) -class AcceptanceTests(PytestStep): +class AcceptanceTests(Step): """A step to run acceptance tests for a connector if it has an acceptance test config file.""" title = "Acceptance tests" CONTAINER_TEST_INPUT_DIRECTORY = "/test_input" CONTAINER_SECRETS_DIRECTORY = "/test_input/secrets" + skipped_exit_code = 5 @property def base_cat_command(self) -> List[str]: diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py index 0a5b0aedd236a..77aed7f61c31a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py @@ -4,8 +4,8 @@ """This module groups steps made to run tests for a specific Python connector given a test context.""" -from datetime import timedelta -from typing import List +from abc import ABC, abstractmethod +from typing import Callable, Iterable, List import asyncer from dagger import Container @@ -14,8 +14,7 @@ from pipelines.builds import LOCAL_BUILD_PLATFORM from pipelines.builds.python_connectors import BuildConnectorImage from pipelines.contexts import ConnectorContext -from pipelines.helpers.steps import run_steps -from pipelines.tests.common import AcceptanceTests, PytestStep +from pipelines.tests.common import AcceptanceTests from pipelines.utils import export_container_to_tarball @@ -55,30 +54,21 @@ async def _run(self) -> StepResult: return await self.get_step_result(formatter) -class ConnectorPackageInstall(Step): - """A step to install the Python connector package in a container.""" +class PytestStep(Step, ABC): + """An abstract class to run pytest tests and evaluate success or failure according to pytest logs.""" - title = "Connector package install" - max_duration = timedelta(minutes=20) - max_retries = 3 + PYTEST_INI_FILE_NAME = "pytest.ini" + PYPROJECT_FILE_NAME = "pyproject.toml" + extra_dependencies_names = ("dev", "tests") + skipped_exit_code = 5 - async def _run(self) -> StepResult: - """Install the connector under test package in a Python container. - - Returns: - StepResult: Failure or success of the package installation and the connector under test container (with the connector package installed). - """ - connector_under_test = await environments.with_python_connector_installed(self.context) - return await self.get_step_result(connector_under_test) - - -class UnitTests(PytestStep): - """A step to run the connector unit tests with Pytest.""" - - title = "Unit tests" + @property + @abstractmethod + def test_directory_name(self) -> str: + raise NotImplementedError("test_directory_name must be implemented in the child class.") async def _run(self, connector_under_test: Container) -> StepResult: - """Run all pytest tests declared in the unit_tests directory of the connector code. + """Run all pytest tests declared in the test directory of the connector code. Args: connector_under_test (Container): The connector under test container. @@ -86,31 +76,94 @@ async def _run(self, connector_under_test: Container) -> StepResult: Returns: StepResult: Failure or success of the unit tests with stdout and stdout. """ - connector_under_test_with_secrets = connector_under_test.with_( - await environments.mounted_connector_secrets(self.context, "secrets") - ) - return await self._run_tests_in_directory(connector_under_test_with_secrets, "unit_tests") + if not await self.check_if_tests_are_available(self.test_directory_name): + return self.skip(f"No {self.test_directory_name} directory found in the connector.") + connector_under_test = connector_under_test.with_(await self.testing_environment(self.extra_dependencies_names)) -class IntegrationTests(PytestStep): - """A step to run the connector integration tests with Pytest.""" + return await self.get_step_result(connector_under_test) - title = "Integration tests" + async def check_if_tests_are_available(self, test_directory_name: str) -> bool: + """Check if the tests are available in the connector directory. - async def _run(self, connector_under_test: Container) -> StepResult: - """Run all pytest tests declared in the integration_tests directory of the connector code. + Returns: + bool: True if the tests are available. + """ + connector_dir = await self.context.get_connector_dir() + connector_dir_entries = await connector_dir.entries() + return test_directory_name in connector_dir_entries + + async def testing_environment(self, extra_dependencies_names: Iterable[str]) -> Callable: + """Install all extra dependencies of a connector. Args: - connector_under_test (Container): The connector under test container. + extra_dependencies_names (Iterable[str]): Extra dependencies to install. Returns: - StepResult: Failure or success of the integration tests with stdout and stdout. + Callable: The decorator to use with the with_ method of a container. """ + secret_mounting_function = await environments.mounted_connector_secrets(self.context, "secrets") + connector_dir = await self.context.get_connector_dir() + connector_dir_entries = await connector_dir.entries() + + if self.PYTEST_INI_FILE_NAME in connector_dir_entries: + config_file_name = self.PYTEST_INI_FILE_NAME + test_config = await self.context.get_connector_dir(include=[self.PYTEST_INI_FILE_NAME]).file(self.PYTEST_INI_FILE_NAME) + self.logger.info(f"Found {self.PYTEST_INI_FILE_NAME}, using it for testing.") + elif self.PYPROJECT_FILE_NAME in connector_dir_entries: + config_file_name = self.PYPROJECT_FILE_NAME + test_config = await self.context.get_connector_dir(include=[self.PYTEST_INI_FILE_NAME]).file(self.PYTEST_INI_FILE_NAME) + self.logger.info(f"Found {self.PYPROJECT_FILE_NAME} at connector level, using it for testing.") + else: + config_file_name = f"global_{self.PYPROJECT_FILE_NAME}" + test_config = await self.context.get_repo_dir(include=[self.PYPROJECT_FILE_NAME]).file(self.PYPROJECT_FILE_NAME) + self.logger.info(f"Found {self.PYPROJECT_FILE_NAME} at repo level, using it for testing.") + + def prepare_for_testing(built_connector_container: Container) -> Container: + return ( + built_connector_container + # Reset the entrypoint + .with_entrypoint([]) + # Mount the connector directory in /test_environment + # For build optimization the full directory is not mounted by default + # We need the setup.py/pyproject.toml and the tests code to be available + # Install the extra dependencies + .with_mounted_directory("/test_environment", connector_dir) + # Jump in the /test_environment directory + .with_workdir("/test_environment").with_mounted_file(config_file_name, test_config) + # Mount the secrets + .with_(secret_mounting_function) + # Install the extra dependencies + .with_exec(["pip", "install", f".[{','.join(extra_dependencies_names)}]"], skip_entrypoint=True) + # Execute pytest on the test directory + .with_exec( + [ + "python", + "-m", + "pytest", + "-s", + self.test_directory_name, + "-c", + config_file_name, + ] + ) + ) + + return prepare_for_testing - connector_under_test = connector_under_test.with_(environments.bound_docker_host(self.context)).with_( - await environments.mounted_connector_secrets(self.context, "secrets") - ) - return await self._run_tests_in_directory(connector_under_test, "integration_tests") + +class UnitTests(PytestStep): + """A step to run the connector unit tests with Pytest.""" + + title = "Unit tests" + test_directory_name = "unit_tests" + + +class IntegrationTests(PytestStep): + """A step to run the connector integration tests with Pytest.""" + + title = "Integration tests" + test_directory_name = "integration_tests" async def run_all_tests(context: ConnectorContext) -> List[StepResult]: @@ -122,18 +175,14 @@ async def run_all_tests(context: ConnectorContext) -> List[StepResult]: Returns: List[StepResult]: The results of all the steps that ran or were skipped. """ + step_results = [] + build_connector_image_results = await BuildConnectorImage(context, LOCAL_BUILD_PLATFORM).run() + if build_connector_image_results.status is StepStatus.FAILURE: + return [build_connector_image_results] + step_results.append(build_connector_image_results) - step_results = await run_steps( - [ - ConnectorPackageInstall(context), - BuildConnectorImage(context, LOCAL_BUILD_PLATFORM), - ] - ) - if any([step_result.status is StepStatus.FAILURE for step_result in step_results]): - return step_results - connector_package_install_results, build_connector_image_results = step_results[0], step_results[1] connector_image_tar_file, _ = await export_container_to_tarball(context, build_connector_image_results.output_artifact) - connector_container = connector_package_install_results.output_artifact + connector_container = build_connector_image_results.output_artifact context.connector_secrets = await secrets.get_connector_secrets(context) diff --git a/airbyte-ci/connectors/pipelines/pipelines/utils.py b/airbyte-ci/connectors/pipelines/pipelines/utils.py index 93aacf5b063bf..80396266c73bf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/utils.py @@ -54,7 +54,7 @@ async def check_path_in_workdir(container: Container, path: str) -> bool: Returns: bool: Whether the path exists in the container working directory. """ - workdir = (await container.with_exec(["pwd"]).stdout()).strip() + workdir = (await container.with_exec(["pwd"], skip_entrypoint=True).stdout()).strip() mounts = await container.mounts() if workdir in mounts: expected_file_path = Path(workdir[1:]) / path diff --git a/airbyte-ci/connectors/pipelines/poetry.lock b/airbyte-ci/connectors/pipelines/poetry.lock index 7e0af449a697a..18c1bd793ffb4 100644 --- a/airbyte-ci/connectors/pipelines/poetry.lock +++ b/airbyte-ci/connectors/pipelines/poetry.lock @@ -1,5 +1,25 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +[[package]] +name = "airbyte-connectors-base-images" +version = "0.1.0" +description = "This package should be used as a library to import base images definitions to build Airbyte connectors." +optional = false +python-versions = "^3.10" +files = [] +develop = true + +[package.dependencies] +dagger-io = "0.6.4" +gitpython = "^3.1.35" +py-markdown-table = "0.4.0" +rich = "^13.0.1" +semver = "^3.0.1" + +[package.source] +type = "directory" +url = "../base_images" + [[package]] name = "airbyte-protocol-models" version = "1.0.1" @@ -334,13 +354,13 @@ url = "../ci_credentials" [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -375,23 +395,9 @@ requests = "^2.28.2" type = "directory" url = "../common_utils" -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -optional = false -python-versions = "*" -files = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - [[package]] name = "connector-ops" -version = "0.2.2" +version = "0.2.3" description = "Packaged maintained by the connector operations team to perform CI for connectors" optional = false python-versions = "^3.10" @@ -409,7 +415,7 @@ pydash = "^7.0.4" PyGithub = "^1.58.0" PyYAML = "^6.0" requests = "^2.28.2" -rich = "^11.0.1" +rich = "^13.0.1" simpleeval = "^0.9.13" [package.source] @@ -418,71 +424,63 @@ url = "../connector_ops" [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, ] [package.dependencies] @@ -574,33 +572,34 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "docker" -version = "5.0.3" +version = "6.1.3" description = "A Python library for the Docker Engine API." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"}, - {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, ] [package.dependencies] -pywin32 = {version = "227", markers = "sys_platform == \"win32\""} -requests = ">=2.14.2,<2.18.0 || >2.18.0" +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" websocket-client = ">=0.32.0" [package.extras] -ssh = ["paramiko (>=2.4.2)"] -tls = ["cryptography (>=3.4.7)", "idna (>=2.0.0)", "pyOpenSSL (>=17.5.0)"] +ssh = ["paramiko (>=2.4.3)"] [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -636,18 +635,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.32" +version = "3.1.36" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, - {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, + {file = "GitPython-3.1.36-py3-none-any.whl", hash = "sha256:8d22b5cfefd17c79914226982bb7851d6ade47545b1735a9d010a2a4c26d8388"}, + {file = "GitPython-3.1.36.tar.gz", hash = "sha256:4bb0c2a6995e85064140d31a33289aa5dce80133a23d36fcd372d716c54d3ebf"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar", "virtualenv"] + [[package]] name = "google-api-core" version = "2.11.1" @@ -672,28 +674,27 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.22.0" +version = "2.17.3" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.17.3.tar.gz", hash = "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc"}, + {file = "google_auth-2.17.3-py2.py3-none-any.whl", hash = "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" +rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" -urllib3 = "<2.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0dev)"] [[package]] name = "google-cloud-core" @@ -816,20 +817,20 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.5.0" +version = "2.6.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, + {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, + {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, ] [package.dependencies] google-crc32c = ">=1.0,<2.0dev" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] @@ -899,13 +900,13 @@ files = [ [[package]] name = "httpcore" -version = "0.17.3" +version = "0.18.0" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, + {file = "httpcore-0.18.0-py3-none-any.whl", hash = "sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced"}, + {file = "httpcore-0.18.0.tar.gz", hash = "sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9"}, ] [package.dependencies] @@ -920,18 +921,18 @@ socks = ["socksio (==1.*)"] [[package]] name = "httpx" -version = "0.24.1" +version = "0.25.0" description = "The next generation HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, + {file = "httpx-0.25.0-py3-none-any.whl", hash = "sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100"}, + {file = "httpx-0.25.0.tar.gz", hash = "sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875"}, ] [package.dependencies] certifi = "*" -httpcore = ">=0.15.0,<0.18.0" +httpcore = ">=0.18.0,<0.19.0" idna = "*" sniffio = "*" @@ -980,6 +981,30 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -1039,6 +1064,17 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "more-itertools" version = "8.14.0" @@ -1256,13 +1292,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1271,24 +1307,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "4.24.0" +version = "4.24.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, - {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, - {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, - {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, - {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, - {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, - {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, - {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, - {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, - {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, - {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, + {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, + {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, + {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, + {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, + {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, + {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, + {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, + {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, + {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, + {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, + {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, ] [[package]] @@ -1302,6 +1338,20 @@ files = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +[[package]] +name = "py-markdown-table" +version = "0.4.0" +description = "Package that generates markdown tables from a list of dicts" +optional = false +python-versions = ">=3.6" +files = [ + {file = "py-markdown-table-0.4.0.tar.gz", hash = "sha256:e595f9cc03d8a464f6aa616027be29cf033cd778ae8fe8c030d3d664e790dbc3"}, + {file = "py_markdown_table-0.4.0-py3-none-any.whl", hash = "sha256:28a4848322cb1f975cf444080d3ada752138968b0f3727e295504c5f5fb5bae1"}, +] + +[package.extras] +dev = ["black", "pylint", "pytest", "pytest-cov"] + [[package]] name = "pyasn1" version = "0.5.0" @@ -1569,23 +1619,25 @@ files = [ [[package]] name = "pywin32" -version = "227" +version = "306" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"}, - {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"}, - {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"}, - {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"}, - {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"}, - {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"}, - {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"}, - {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"}, - {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"}, - {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"}, - {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"}, - {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"}, + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] [[package]] @@ -1660,22 +1712,21 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "11.2.0" +version = "13.5.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.7.0" files = [ - {file = "rich-11.2.0-py3-none-any.whl", hash = "sha256:d5f49ad91fb343efcae45a2b2df04a9755e863e50413623ab8c9e74f05aee52b"}, - {file = "rich-11.2.0.tar.gz", hash = "sha256:1a6266a5738115017bb64a66c59c717e7aa047b3ae49a011ede4abdeffc6536e"}, + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, ] [package.dependencies] -colorama = ">=0.4.0,<0.5.0" -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" [package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rsa" @@ -1704,13 +1755,13 @@ files = [ [[package]] name = "sentry-sdk" -version = "1.29.2" +version = "1.31.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.29.2.tar.gz", hash = "sha256:a99ee105384788c3f228726a88baf515fe7b5f1d2d0f215a03d194369f158df7"}, - {file = "sentry_sdk-1.29.2-py2.py3-none-any.whl", hash = "sha256:3e17215d8006612e2df02b0e73115eb8376c37e3f586d8436fa41644e605074d"}, + {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"}, + {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"}, ] [package.dependencies] @@ -1720,10 +1771,12 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] bottle = ["bottle (>=0.12.13)"] celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] @@ -1733,6 +1786,7 @@ httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] @@ -1848,33 +1902,34 @@ files = [ [[package]] name = "urllib3" -version = "1.26.16" +version = "2.0.4" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.3" description = "WebSocket client for Python with low level API options" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, + {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] @@ -2052,4 +2107,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "af9fca8fc8b614482fef8d85b15f4b351e97757b05fd3de6f054d3cd3119ad07" +content-hash = "099729e9fc75cdebf03b91f1b3a569abdde4ad6cd41fc8fde35d1fe1a4bb2973" diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index e447764ab61b3..9ee0c1c8d308f 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "1.2.3" +version = "1.3.0" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] @@ -14,7 +14,7 @@ dagger-io = "^0.6.4" asyncer = "^0.0.2" anyio = "^3.4.1" more-itertools = "^8.11.0" -docker = "^5.0.3" +docker = "^6.0.0" semver = "^3.0.1" airbyte-protocol-models = "*" tabulate = "^0.8.9" @@ -23,6 +23,7 @@ requests = "^2.28.2" connector-ops = {path = "../connector_ops", develop = true} toml = "^0.10.2" sentry-sdk = "^1.28.1" +airbyte-connectors-base-images = {path = "../base_images", develop = true} [tool.poetry.group.test.dependencies] pytest = "^6.2.5" diff --git a/airbyte-ci/connectors/pipelines/tests/conftest.py b/airbyte-ci/connectors/pipelines/tests/conftest.py index 47cfb0fc195f2..2e35e8e37bab9 100644 --- a/airbyte-ci/connectors/pipelines/tests/conftest.py +++ b/airbyte-ci/connectors/pipelines/tests/conftest.py @@ -3,9 +3,10 @@ # import os +import platform import sys from pathlib import Path -from typing import Set +from typing import List import dagger import git @@ -68,5 +69,10 @@ def from_airbyte_root(airbyte_repo_path): @pytest.fixture(scope="session") -def all_connectors() -> Set[Connector]: - return ALL_CONNECTORS +def all_connectors() -> List[Connector]: + return sorted(ALL_CONNECTORS, key=lambda connector: connector.technical_name) + + +@pytest.fixture(scope="session") +def current_platform(): + return dagger.Platform(f"linux/{platform.machine()}") diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py b/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py new file mode 100644 index 0000000000000..70840b06ce0bd --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py @@ -0,0 +1,35 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dagger import Container + + +async def pre_connector_install(base_image_container: Container) -> Container: + """This function will run before the connector installation. + It can mutate the base image container. + + Args: + base_image_container (Container): The base image container to mutate. + + Returns: + Container: The mutated base image container. + """ + return await base_image_container.with_env_variable("MY_PRE_BUILD_ENV_VAR", "my_pre_build_env_var_value") + + +async def post_connector_install(connector_container: Container) -> Container: + """This function will run after the connector installation during the build process. + It can mutate the connector container. + + Args: + connector_container (Container): The connector container to mutate. + + Returns: + Container: The mutated connector container. + """ + return await connector_container.with_env_variable("MY_POST_BUILD_ENV_VAR", "my_post_build_env_var_value") diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py new file mode 100644 index 0000000000000..ff4d201cb1c36 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -0,0 +1,150 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from pathlib import Path + +import pytest +from pipelines.bases import StepStatus +from pipelines.builds.python_connectors import BuildConnectorImage +from pipelines.contexts import ConnectorContext + +pytestmark = [ + pytest.mark.anyio, +] + + +class TestBuildConnectorImage: + @pytest.fixture + def test_context(self, mocker): + return mocker.Mock(secrets_to_mask=[]) + + @pytest.fixture + def test_context_with_connector_with_base_image(self, test_context): + test_context.connector.metadata = {"connectorBuildOptions": {"baseImage": "xyz"}} + return test_context + + @pytest.fixture + def test_context_with_connector_without_base_image(self, test_context): + test_context.connector.metadata = {} + return test_context + + @pytest.fixture + def connector_with_base_image(self, all_connectors): + for connector in all_connectors: + if connector.metadata and connector.metadata.get("connectorBuildOptions", {}).get("baseImage"): + if not (connector.code_directory / "build_customization.py").exists(): + return connector + pytest.skip("No connector with a connectorBuildOptions.baseImage metadata found") + + @pytest.fixture + def connector_with_base_image_with_build_customization(self, connector_with_base_image): + dummy_build_customization = (Path(__file__).parent / "dummy_build_customization.py").read_text() + (connector_with_base_image.code_directory / "build_customization.py").write_text(dummy_build_customization) + yield connector_with_base_image + (connector_with_base_image.code_directory / "build_customization.py").unlink() + + @pytest.fixture + def test_context_with_real_connector_using_base_image(self, connector_with_base_image, dagger_client): + context = ConnectorContext( + pipeline_name="test build", + connector=connector_with_base_image, + git_branch="test", + git_revision="test", + report_output_prefix="test", + is_local=True, + use_remote_secrets=True, + ) + context.dagger_client = dagger_client + return context + + @pytest.fixture + def test_context_with_real_connector_using_base_image_with_build_customization( + self, connector_with_base_image_with_build_customization, dagger_client + ): + context = ConnectorContext( + pipeline_name="test build", + connector=connector_with_base_image_with_build_customization, + git_branch="test", + git_revision="test", + report_output_prefix="test", + is_local=True, + use_remote_secrets=True, + ) + context.dagger_client = dagger_client + return context + + @pytest.fixture + def connector_without_base_image(self, all_connectors): + for connector in all_connectors: + if connector.metadata and not connector.metadata.get("connectorBuildOptions", {}).get("baseImage"): + return connector + pytest.skip("No connector without a connectorBuildOptions.baseImage metadata found") + + @pytest.fixture + def test_context_with_real_connector_without_base_image(self, connector_without_base_image, dagger_client): + context = ConnectorContext( + pipeline_name="test build", + connector=connector_without_base_image, + git_branch="test", + git_revision="test", + report_output_prefix="test", + is_local=True, + use_remote_secrets=True, + ) + context.dagger_client = dagger_client + return context + + async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_connector_with_base_image, current_platform): + container_built_from_base = mocker.Mock() + mocker.patch.object(BuildConnectorImage, "_build_from_base_image", mocker.AsyncMock(return_value=container_built_from_base)) + mocker.patch.object(BuildConnectorImage, "get_step_result", mocker.AsyncMock()) + step = BuildConnectorImage(test_context_with_connector_with_base_image, current_platform) + step_result = await step._run() + step._build_from_base_image.assert_called_once() + step.get_step_result.assert_called_once_with(container_built_from_base.with_exec.return_value) + container_built_from_base.with_exec.assert_called_once_with(["spec"]) + assert step_result == step.get_step_result.return_value + + async def test_building_from_base_image_for_real(self, test_context_with_real_connector_using_base_image, current_platform): + step = BuildConnectorImage(test_context_with_real_connector_using_base_image, current_platform) + step_result = await step._run() + step_result.status is StepStatus.SUCCESS + built_container = step_result.output_artifact + assert await built_container.env_variable("AIRBYTE_ENTRYPOINT") == " ".join(step.get_entrypoint("main.py")) + assert await built_container.workdir() == step.PATH_TO_INTEGRATION_CODE + assert await built_container.entrypoint() == step.get_entrypoint("main.py") + assert ( + await built_container.label("io.airbyte.version") + == test_context_with_real_connector_using_base_image.connector.metadata["dockerImageTag"] + ) + assert ( + await built_container.label("io.airbyte.name") + == test_context_with_real_connector_using_base_image.connector.metadata["dockerRepository"] + ) + + async def test_building_from_base_image_with_customization_for_real( + self, test_context_with_real_connector_using_base_image_with_build_customization, current_platform + ): + step = BuildConnectorImage(test_context_with_real_connector_using_base_image_with_build_customization, current_platform) + step_result = await step._run() + step_result.status is StepStatus.SUCCESS + built_container = step_result.output_artifact + assert await built_container.env_variable("MY_PRE_BUILD_ENV_VAR") == "my_pre_build_env_var_value" + assert await built_container.env_variable("MY_POST_BUILD_ENV_VAR") == "my_post_build_env_var_value" + + async def test__run_using_base_dockerfile_with_mocks(self, mocker, test_context_with_connector_without_base_image, current_platform): + container_built_from_dockerfile = mocker.Mock() + mocker.patch.object(BuildConnectorImage, "_build_from_dockerfile", mocker.AsyncMock(return_value=container_built_from_dockerfile)) + mocker.patch.object(BuildConnectorImage, "get_step_result", mocker.AsyncMock()) + step = BuildConnectorImage(test_context_with_connector_without_base_image, current_platform) + step_result = await step._run() + step._build_from_dockerfile.assert_called_once() + step.get_step_result.assert_called_once_with(container_built_from_dockerfile.with_exec.return_value) + container_built_from_dockerfile.with_exec.assert_called_once_with(["spec"]) + assert step_result == step.get_step_result.return_value + + async def test_building_from_dockerfile_for_real(self, test_context_with_real_connector_without_base_image, current_platform): + step = BuildConnectorImage(test_context_with_real_connector_without_base_image, current_platform) + step_result = await step._run() + step_result.status is StepStatus.SUCCESS diff --git a/airbyte-integrations/connectors/source-faker/metadata.yaml b/airbyte-integrations/connectors/source-faker/metadata.yaml index 83aa3520b7110..669596f15e624 100644 --- a/airbyte-integrations/connectors/source-faker/metadata.yaml +++ b/airbyte-integrations/connectors/source-faker/metadata.yaml @@ -4,6 +4,8 @@ data: sl: 100 allowedHosts: hosts: [] + connectorBuildOptions: + baseImage: airbyte-python-connector-base:1.1.0 connectorSubtype: api connectorType: source definitionId: dfd88b22-b603-4c3d-aad7-3701784586b1 @@ -24,24 +26,23 @@ data: breakingChanges: 4.0.0: message: This is a breaking change message - upgradeDeadline: "2023-07-19" + upgradeDeadline: '2023-07-19' 5.0.0: - message: - ID and products.year fields are changing to be integers instead of + message: ID and products.year fields are changing to be integers instead of floats. - upgradeDeadline: "2023-08-31" + upgradeDeadline: '2023-08-31' resourceRequirements: jobSpecific: - - jobType: sync - resourceRequirements: - cpu_limit: "4.0" - cpu_request: "1.0" + - jobType: sync + resourceRequirements: + cpu_limit: '4.0' + cpu_request: '1.0' suggestedStreams: streams: - - users - - products - - purchases + - users + - products + - purchases supportLevel: community tags: - - language:python -metadataSpecVersion: "1.0" + - language:python +metadataSpecVersion: '1.0' diff --git a/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile b/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile deleted file mode 100644 index c7764ec2d2e5b..0000000000000 --- a/airbyte-integrations/connectors/source-zendesk-chat/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.9-slim - -# Bash is installed for more convenient debugging. -RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* - -ENV CODE_PATH="source_zendesk_chat" -ENV AIRBYTE_IMPL_MODULE="source_zendesk_chat" -ENV AIRBYTE_IMPL_PATH="SourceZendeskChat" -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main_dev.py" - -WORKDIR /airbyte/integration_code -COPY $CODE_PATH ./$CODE_PATH -COPY main_dev.py ./ -COPY setup.py ./ -RUN pip install . - -ENTRYPOINT ["python", "/airbyte/integration_code/main_dev.py"] - -LABEL io.airbyte.version=0.1.14 -LABEL io.airbyte.name=airbyte/source-zendesk-chat diff --git a/airbyte-integrations/connectors/source-zendesk-chat/build_customization.py b/airbyte-integrations/connectors/source-zendesk-chat/build_customization.py new file mode 100644 index 0000000000000..02fb3cde3d3e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-zendesk-chat/build_customization.py @@ -0,0 +1,41 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dagger import Container + +MAIN_FILE_NAME = "main_dev.py" + + +async def pre_connector_install(base_image_container: Container) -> Container: + """This function will run before the connector installation. + It can mutate the base image container. + + Args: + base_image_container (Container): The base image container to mutate. + + Returns: + Container: The mutated base image container. + """ + return await base_image_container + + +async def post_connector_install(connector_container: Container) -> Container: + """This function will run after the connector installation during the build process. + It can mutate the connector container. + + Args: + connector_container (Container): The connector container to mutate. + + Returns: + Container: The mutated connector container. + """ + return await ( + connector_container.with_env_variable("CODE_PATH", "source_zendesk_chat") + .with_env_variable("AIRBYTE_IMPL_MODULE", "source_zendesk_chat") + .with_env_variable("AIRBYTE_IMPL_PATH", "source_zendesk_chat") + ) diff --git a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml index 5d9c006e08538..80c9113860960 100644 --- a/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-chat/metadata.yaml @@ -24,4 +24,6 @@ data: sl: 200 ql: 400 supportLevel: certified + connectorBuildOptions: + baseImage: "airbyte-python-connector-base:1.0.0" metadataSpecVersion: "1.0"