From 0d1c6a200e0ee36148b5f9c4b039a37cdfab04eb Mon Sep 17 00:00:00 2001 From: Ben Church Date: Tue, 17 Oct 2023 12:59:52 -0700 Subject: [PATCH 01/38] First refactor pass --- .../pipelines/pipelines/__init__.py | 4 +- .../pipelines/pipelines/actions/__init__.py | 4 - .../pipelines/actions/environments.py | 1015 ----------------- .../connectors/pipelines/pipelines/bases.py | 624 ---------- .../pipelines/{commands => cli}/airbyte_ci.py | 21 +- .../pipelines/cli/dagger_pipeline_command.py | 107 ++ .../pipelines/{ => cli}/dagger_run.py | 0 .../pipelines/{ => cli}/telemetry.py | 0 .../pipelines/commands/groups/connectors.py | 697 ----------- .../connectors/pipelines/pipelines/consts.py | 1 + .../pipelines/pipelines/contexts.py | 622 ---------- .../{commands => dagger}/__init__.py | 0 .../groups => dagger/actions}/__init__.py | 0 .../dagger/actions/connector/hooks.py | 48 + .../dagger/actions/connector/normalization.py | 80 ++ .../dagger/actions/internal_tools.py | 37 + .../actions/python}/__init__.py | 0 .../pipelines/dagger/actions/python/common.py | 267 +++++ .../pipelines/dagger/actions/python/pipx.py | 52 + .../pipelines/dagger/actions/python/poetry.py | 104 ++ .../{ => dagger}/actions/remote_storage.py | 2 +- .../pipelines/{ => dagger}/actions/secrets.py | 62 +- .../actions/system}/__init__.py | 0 .../pipelines/dagger/actions/system/common.py | 21 + .../pipelines/dagger/actions/system/docker.py | 139 +++ .../pipelines/dagger/containers/__init__.py | 3 + .../pipelines/dagger/containers/java.py | 174 +++ .../pipelines/dagger/containers/python.py | 60 + .../connectors/pipelines/pipelines/gradle.py | 163 --- .../connectors/pipelines/pipelines/hacks.py | 2 +- .../pipelines/helpers/connectors/__init__.py | 3 + .../connectors}/metadata_change_helpers.py | 0 .../pipelines/helpers/connectors/modifed.py | 58 + .../pipelines/pipelines/helpers/gcs.py | 50 + .../pipelines/pipelines/helpers/git.py | 120 ++ .../pipelines/{ => helpers}/github.py | 2 +- .../pipelines/{ => helpers}/sentry_utils.py | 0 .../pipelines/{ => helpers}/slack.py | 0 .../pipelines/pipelines/helpers/steps.py | 4 +- .../pipelines/pipelines/helpers/utils.py | 325 ++++++ .../{tools => internal_tools}/internal.py | 0 .../pipelines/pipelines/models/contexts.py | 303 +++++ .../pipelines/pipelines/models/reports.py | 182 +++ .../pipelines/pipelines/models/steps.py | 585 ++++++++++ .../pipelines/pipelines/pipeline/__init__.py | 3 + .../pipelines/pipeline/connectors/__init__.py | 3 + .../pipeline/connectors/builds/__init__.py | 3 + .../pipeline/connectors/builds/commands.py | 57 + .../pipeline/connectors/builds/pipeline.py | 0 .../connectors/builds/steps}/__init__.py | 9 +- .../builds/steps}/build_customization.py | 0 .../connectors/builds/steps}/common.py | 6 +- .../builds/steps}/java_connectors.py | 12 +- .../connectors/builds/steps}/normalization.py | 8 +- .../builds/steps}/python_connectors.py | 11 +- .../connectors/bump_version/__init__.py | 3 + .../connectors/bump_version/commands.py | 60 + .../connectors/bump_version/pipeline.py} | 125 +- .../pipelines/pipeline/connectors/commands.py | 204 ++++ .../pipelines/pipeline/connectors/context.py | 234 ++++ .../pipeline/connectors/list/__init__.py | 3 + .../pipeline/connectors/list/commands.py | 41 + .../pipeline/connectors/list/pipeline.py | 0 .../migrate_to_base_image/__init__.py | 3 + .../migrate_to_base_image/commands.py | 75 ++ .../migrate_to_base_image/pipeline.py} | 9 +- .../connectors/pipeline.py} | 10 +- .../pipeline/connectors/publish/__init__.py | 3 + .../pipeline/connectors/publish/commands.py | 135 +++ .../pipeline/connectors/publish/context.py | 120 ++ .../connectors/publish/pipeline.py} | 33 +- .../pipeline/connectors/test/__init__.py | 3 + .../pipeline/connectors/test/commands.py | 107 ++ .../pipeline/connectors/test/pipeline.py | 0 .../connectors/test/steps}/__init__.py | 11 +- .../connectors/test/steps}/common.py | 11 +- .../connectors/test/steps}/java_connectors.py | 20 +- .../test/steps}/python_connectors.py | 23 +- .../test/steps}/templates/test_report.html.j2 | 0 .../connectors/upgrade_base_image/__init__.py | 3 + .../connectors/upgrade_base_image/commands.py | 67 ++ .../connectors/upgrade_base_image/pipeline.py | 0 .../pipelines/pipeline/metadata/__init__.py | 3 + .../metadata/commands.py} | 12 +- .../metadata/pipeline.py} | 21 +- .../pipelines/pipeline/steps/__init__.py | 3 + .../pipelines/{ => pipeline/steps}/git.py | 42 +- .../pipelines/pipeline/test/__init__.py | 3 + .../pipelines/pipeline/test/commands.py | 23 + .../tests.py => pipeline/test/pipeline.py} | 35 +- .../pipelines/steps/poetry_run_step.py | 29 - .../pipelines/steps/simple_docker_step.py | 129 --- .../connectors/pipelines/pipelines/utils.py | 640 ----------- .../connectors/pipelines/pyproject.toml | 2 +- .../connectors/pipelines/tests/conftest.py | 2 +- .../tests/test_actions/test_environments.py | 6 +- .../connectors/pipelines/tests/test_bases.py | 2 +- .../test_builds/test_python_connectors.py | 7 +- .../test_groups/test_connectors.py | 35 +- .../connectors/pipelines/tests/test_gradle.py | 3 +- .../pipelines/tests/test_publish.py | 17 +- .../test_steps/test_simple_docker_step.py | 6 +- .../pipelines/tests/test_tests/test_common.py | 4 +- .../test_tests/test_python_connectors.py | 8 +- .../connectors/pipelines/tests/test_utils.py | 9 +- 105 files changed, 4206 insertions(+), 4191 deletions(-) delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/actions/__init__.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/actions/environments.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/bases.py rename airbyte-ci/connectors/pipelines/pipelines/{commands => cli}/airbyte_ci.py (93%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py rename airbyte-ci/connectors/pipelines/pipelines/{ => cli}/dagger_run.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{ => cli}/telemetry.py (100%) delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/contexts.py rename airbyte-ci/connectors/pipelines/pipelines/{commands => dagger}/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{commands/groups => dagger/actions}/__init__.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py rename airbyte-ci/connectors/pipelines/pipelines/{connector_changes => dagger/actions/python}/__init__.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py rename airbyte-ci/connectors/pipelines/pipelines/{ => dagger}/actions/remote_storage.py (97%) rename airbyte-ci/connectors/pipelines/pipelines/{ => dagger}/actions/secrets.py (57%) rename airbyte-ci/connectors/pipelines/pipelines/{pipelines => dagger/actions/system}/__init__.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/containers/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/gradle.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/__init__.py rename airbyte-ci/connectors/pipelines/pipelines/{connector_changes => helpers/connectors}/metadata_change_helpers.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/helpers/git.py rename airbyte-ci/connectors/pipelines/pipelines/{ => helpers}/github.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{ => helpers}/sentry_utils.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{ => helpers}/slack.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py rename airbyte-ci/connectors/pipelines/pipelines/{tools => internal_tools}/internal.py (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/contexts.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/reports.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/steps.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/__init__.py (83%) rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/build_customization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/common.py (94%) rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/java_connectors.py (87%) rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/normalization.py (81%) rename airbyte-ci/connectors/pipelines/pipelines/{builds => pipeline/connectors/builds/steps}/python_connectors.py (91%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py rename airbyte-ci/connectors/pipelines/pipelines/{connector_changes/version_bump.py => pipeline/connectors/bump_version/pipeline.py} (96%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py rename airbyte-ci/connectors/pipelines/pipelines/{connector_changes/base_image_version_migration.py => pipeline/connectors/migrate_to_base_image/pipeline.py} (97%) rename airbyte-ci/connectors/pipelines/pipelines/{pipelines/connectors.py => pipeline/connectors/pipeline.py} (93%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py rename airbyte-ci/connectors/pipelines/pipelines/{publish.py => pipeline/connectors/publish/pipeline.py} (95%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py rename airbyte-ci/connectors/pipelines/pipelines/{tests => pipeline/connectors/test/steps}/__init__.py (90%) rename airbyte-ci/connectors/pipelines/pipelines/{tests => pipeline/connectors/test/steps}/common.py (96%) rename airbyte-ci/connectors/pipelines/pipelines/{tests => pipeline/connectors/test/steps}/java_connectors.py (86%) rename airbyte-ci/connectors/pipelines/pipelines/{tests => pipeline/connectors/test/steps}/python_connectors.py (90%) rename airbyte-ci/connectors/pipelines/pipelines/{tests => pipeline/connectors/test/steps}/templates/test_report.html.j2 (100%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py rename airbyte-ci/connectors/pipelines/pipelines/{commands/groups/metadata.py => pipeline/metadata/commands.py} (88%) rename airbyte-ci/connectors/pipelines/pipelines/{pipelines/metadata.py => pipeline/metadata/pipeline.py} (90%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py rename airbyte-ci/connectors/pipelines/pipelines/{ => pipeline/steps}/git.py (74%) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py rename airbyte-ci/connectors/pipelines/pipelines/{commands/groups/tests.py => pipeline/test/pipeline.py} (79%) delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/steps/poetry_run_step.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/steps/simple_docker_step.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/utils.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/__init__.py index 371bafaa1370e..00914e3c459dd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/__init__.py @@ -8,7 +8,7 @@ from rich.logging import RichHandler -from . import sentry_utils +from .helpers import sentry_utils sentry_utils.initialize() @@ -28,3 +28,5 @@ ) main_logger = logging.getLogger(__name__) +0 +322+60 diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/actions/__init__.py deleted file mode 100644 index 09bf0600a802a..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -"""The actions package is made to declare reusable pipeline components.""" diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py b/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py deleted file mode 100644 index 3a21ea9b305e4..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/environments.py +++ /dev/null @@ -1,1015 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""This modules groups functions made to create reusable environments packaged in dagger containers.""" - -from __future__ import annotations - -import importlib.util -import json -import re -import uuid -from pathlib import Path -from typing import TYPE_CHECKING, Callable, List, Optional - -import toml -from dagger import CacheVolume, Client, Container, Directory, File, Platform, Secret -from dagger.engine._version import CLI_VERSION as dagger_engine_version -from pipelines import consts -from pipelines.consts import ( - AMAZONCORRETTO_IMAGE, - CONNECTOR_TESTING_REQUIREMENTS, - DOCKER_HOST_NAME, - DOCKER_HOST_PORT, - DOCKER_TMP_VOLUME_NAME, - LICENSE_SHORT_FILE_PATH, - PYPROJECT_TOML_FILE_PATH, -) -from pipelines.tools.internal import INTERNAL_TOOL_PATHS -from pipelines.utils import check_path_in_workdir, get_file_contents, sh_dash_c - -if TYPE_CHECKING: - from pipelines.contexts import ConnectorContext, PipelineContext - - -def with_python_base(context: PipelineContext, python_version: str = "3.10") -> Container: - """Build a Python container with a cache volume for pip cache. - - Args: - context (PipelineContext): The current test context, providing a dagger client and a repository directory. - python_image_name (str, optional): The python image to use to build the python base environment. Defaults to "python:3.9-slim". - - Raises: - ValueError: Raised if the python_image_name is not a python image. - - Returns: - Container: The python base environment container. - """ - - pip_cache: CacheVolume = context.dagger_client.cache_volume(f"pip_cache-{python_version}") - - base_container = ( - context.dagger_client.container() - .from_(f"python:{python_version}-slim") - .with_mounted_cache("/root/.cache/pip", pip_cache) - .with_exec( - sh_dash_c( - [ - "apt-get update", - "apt-get install -y build-essential cmake g++ libffi-dev libstdc++6 git", - "pip install pip==23.1.2", - ] - ) - ) - ) - - return base_container - - -def with_testing_dependencies(context: PipelineContext) -> Container: - """Build a testing environment by installing testing dependencies on top of a python base environment. - - Args: - context (PipelineContext): The current test context, providing a dagger client and a repository directory. - - Returns: - Container: The testing environment container. - """ - python_environment: Container = with_python_base(context) - pyproject_toml_file = context.get_repo_dir(".", include=[PYPROJECT_TOML_FILE_PATH]).file(PYPROJECT_TOML_FILE_PATH) - license_short_file = context.get_repo_dir(".", include=[LICENSE_SHORT_FILE_PATH]).file(LICENSE_SHORT_FILE_PATH) - - return ( - python_environment.with_exec(["pip", "install"] + CONNECTOR_TESTING_REQUIREMENTS) - .with_file(f"/{PYPROJECT_TOML_FILE_PATH}", pyproject_toml_file) - .with_file(f"/{LICENSE_SHORT_FILE_PATH}", license_short_file) - ) - - -def with_git(dagger_client, ci_git_user: str = "octavia") -> Container: - return ( - dagger_client.container() - .from_("alpine:latest") - .with_exec( - sh_dash_c( - [ - "apk update", - "apk add git tar wget", - f"git config --global user.email {ci_git_user}@users.noreply.github.com", - f"git config --global user.name {ci_git_user}", - "git config --global --add --bool push.autoSetupRemote true", - ] - ) - ) - .with_workdir("/ghcli") - .with_exec( - sh_dash_c( - [ - "wget https://github.com/cli/cli/releases/download/v2.30.0/gh_2.30.0_linux_amd64.tar.gz -O ghcli.tar.gz", - "tar --strip-components=1 -xf ghcli.tar.gz", - "rm ghcli.tar.gz", - "cp bin/gh /usr/local/bin/gh", - ] - ) - ) - ) - - -async def with_installed_pipx_package( - context: PipelineContext, - python_environment: Container, - package_source_code_path: str, - exclude: Optional[List] = None, -) -> Container: - """Install a python package in a python environment container using pipx. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. - python_environment (Container): An existing python environment in which the package will be installed. - package_source_code_path (str): The local path to the package source code. - exclude (Optional[List]): A list of file or directory to exclude from the python package source code. - - Returns: - Container: A python environment container with the python package installed. - """ - pipx_python_environment = with_pipx(python_environment) - container = with_python_package(context, pipx_python_environment, package_source_code_path, exclude=exclude) - - local_dependencies = await find_local_dependencies_in_pyproject_toml(context, container, package_source_code_path, exclude=exclude) - for dependency_directory in local_dependencies: - container = container.with_mounted_directory("/" + dependency_directory, context.get_repo_dir(dependency_directory)) - - container = container.with_exec(["pipx", "install", f"/{package_source_code_path}"]) - - return container - - -def with_python_package( - context: PipelineContext, - python_environment: Container, - package_source_code_path: str, - exclude: Optional[List] = None, - include: Optional[List] = None, -) -> Container: - """Load a python package source code to a python environment container. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. - python_environment (Container): An existing python environment in which the package will be installed. - package_source_code_path (str): The local path to the package source code. - additional_dependency_groups (Optional[List]): extra_requires dependency of setup.py to install. Defaults to None. - exclude (Optional[List]): A list of file or directory to exclude from the python package source code. - - Returns: - Container: A python environment container with the python package source code. - """ - package_source_code_directory: Directory = context.get_repo_dir(package_source_code_path, exclude=exclude, include=include) - work_dir_path = f"/{package_source_code_path}" - container = python_environment.with_mounted_directory(work_dir_path, package_source_code_directory).with_workdir(work_dir_path) - return container - - -async def find_local_python_dependencies( - context: PipelineContext, - package_source_code_path: str, - search_dependencies_in_setup_py: bool = True, - search_dependencies_in_requirements_txt: bool = True, -) -> List[str]: - """Find local python dependencies of a python package. The dependencies are found in the setup.py and requirements.txt files. - - Args: - context (PipelineContext): The current pipeline context, providing a dagger client and a repository directory. - package_source_code_path (str): The local path to the python package source code. - search_dependencies_in_setup_py (bool, optional): Whether to search for local dependencies in the setup.py file. Defaults to True. - search_dependencies_in_requirements_txt (bool, optional): Whether to search for local dependencies in the requirements.txt file. Defaults to True. - - Returns: - List[str]: Paths to the local dependencies relative to the airbyte repo. - """ - python_environment = with_python_base(context) - container = with_python_package(context, python_environment, package_source_code_path) - - local_dependency_paths = [] - if search_dependencies_in_setup_py: - local_dependency_paths += await find_local_dependencies_in_setup_py(container) - if search_dependencies_in_requirements_txt: - local_dependency_paths += await find_local_dependencies_in_requirements_txt(container, package_source_code_path) - - transitive_dependency_paths = [] - for local_dependency_path in local_dependency_paths: - # Transitive local dependencies installation is achieved by calling their setup.py file, not their requirements.txt file. - transitive_dependency_paths += await find_local_python_dependencies(context, local_dependency_path, True, False) - - all_dependency_paths = local_dependency_paths + transitive_dependency_paths - if all_dependency_paths: - context.logger.debug(f"Found local dependencies for {package_source_code_path}: {all_dependency_paths}") - return all_dependency_paths - - -async def find_local_dependencies_in_setup_py(python_package: Container) -> List[str]: - """Find local dependencies of a python package in its setup.py file. - - Args: - python_package (Container): A python package container. - - Returns: - List[str]: Paths to the local dependencies relative to the airbyte repo. - """ - setup_file_content = await get_file_contents(python_package, "setup.py") - if not setup_file_content: - return [] - - local_setup_dependency_paths = [] - with_egg_info = python_package.with_exec(["python", "setup.py", "egg_info"]) - egg_info_output = await with_egg_info.stdout() - dependency_in_requires_txt = [] - for line in egg_info_output.split("\n"): - if line.startswith("writing requirements to"): - # Find the path to the requirements.txt file that was generated by calling egg_info - requires_txt_path = line.replace("writing requirements to", "").strip() - requirements_txt_content = await with_egg_info.file(requires_txt_path).contents() - dependency_in_requires_txt = requirements_txt_content.split("\n") - - for dependency_line in dependency_in_requires_txt: - if "file://" in dependency_line: - match = re.search(r"file:///(.+)", dependency_line) - if match: - local_setup_dependency_paths.append([match.group(1)][0]) - return local_setup_dependency_paths - - -async def find_local_dependencies_in_requirements_txt(python_package: Container, package_source_code_path: str) -> List[str]: - """Find local dependencies of a python package in a requirements.txt file. - - Args: - python_package (Container): A python environment container with the python package source code. - package_source_code_path (str): The local path to the python package source code. - - Returns: - List[str]: Paths to the local dependencies relative to the airbyte repo. - """ - requirements_txt_content = await get_file_contents(python_package, "requirements.txt") - if not requirements_txt_content: - return [] - - local_requirements_dependency_paths = [] - for line in requirements_txt_content.split("\n"): - # Some package declare themselves as a requirement in requirements.txt, - # #Without line != "-e ." the package will be considered a dependency of itself which can cause an infinite loop - if line.startswith("-e .") and line != "-e .": - local_dependency_path = Path(line[3:]) - package_source_code_path = Path(package_source_code_path) - local_dependency_path = str((package_source_code_path / local_dependency_path).resolve().relative_to(Path.cwd())) - local_requirements_dependency_paths.append(local_dependency_path) - return local_requirements_dependency_paths - - -async def find_local_dependencies_in_pyproject_toml( - context: PipelineContext, - base_container: Container, - pyproject_file_path: str, - exclude: Optional[List] = None, -) -> list: - """Find local dependencies of a python package in a pyproject.toml file. - - Args: - python_package (Container): A python environment container with the python package source code. - pyproject_file_path (str): The path to the pyproject.toml file. - - Returns: - list: Paths to the local dependencies relative to the current directory. - """ - python_package = with_python_package(context, base_container, pyproject_file_path) - pyproject_content_raw = await get_file_contents(python_package, "pyproject.toml") - if not pyproject_content_raw: - return [] - - pyproject_content = toml.loads(pyproject_content_raw) - local_dependency_paths = [] - for dep, value in pyproject_content["tool"]["poetry"]["dependencies"].items(): - if isinstance(value, dict) and "path" in value: - local_dependency_path = Path(value["path"]) - pyproject_file_path = Path(pyproject_file_path) - local_dependency_path = str((pyproject_file_path / local_dependency_path).resolve().relative_to(Path.cwd())) - local_dependency_paths.append(local_dependency_path) - - # Ensure we parse the child dependencies - # TODO handle more than pyproject.toml - child_local_dependencies = await find_local_dependencies_in_pyproject_toml( - context, base_container, local_dependency_path, exclude=exclude - ) - local_dependency_paths += child_local_dependencies - - return local_dependency_paths - - -def _install_python_dependencies_from_setup_py( - container: Container, - additional_dependency_groups: Optional[List] = None, -) -> Container: - install_connector_package_cmd = ["pip", "install", "."] - container = container.with_exec(install_connector_package_cmd) - - if additional_dependency_groups: - # e.g. .[dev,tests] - group_string = f".[{','.join(additional_dependency_groups)}]" - group_install_cmd = ["pip", "install", group_string] - - container = container.with_exec(group_install_cmd) - - return container - - -def _install_python_dependencies_from_requirements_txt(container: Container) -> Container: - install_requirements_cmd = ["pip", "install", "-r", "requirements.txt"] - return container.with_exec(install_requirements_cmd) - - -def _install_python_dependencies_from_poetry( - container: Container, - additional_dependency_groups: Optional[List] = None, -) -> Container: - pip_install_poetry_cmd = ["pip", "install", "poetry"] - poetry_disable_virtual_env_cmd = ["poetry", "config", "virtualenvs.create", "false"] - poetry_install_no_venv_cmd = ["poetry", "install"] - if additional_dependency_groups: - for group in additional_dependency_groups: - poetry_install_no_venv_cmd += ["--with", group] - - return container.with_exec(pip_install_poetry_cmd).with_exec(poetry_disable_virtual_env_cmd).with_exec(poetry_install_no_venv_cmd) - - -async def with_installed_python_package( - context: PipelineContext, - python_environment: Container, - package_source_code_path: str, - additional_dependency_groups: Optional[List] = None, - exclude: Optional[List] = None, - include: Optional[List] = None, -) -> Container: - """Install a python package in a python environment container. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. - python_environment (Container): An existing python environment in which the package will be installed. - package_source_code_path (str): The local path to the package source code. - additional_dependency_groups (Optional[List]): extra_requires dependency of setup.py to install. Defaults to None. - exclude (Optional[List]): A list of file or directory to exclude from the python package source code. - - Returns: - Container: A python environment container with the python package installed. - """ - container = with_python_package(context, python_environment, package_source_code_path, exclude=exclude, include=include) - - local_dependencies = await find_local_python_dependencies(context, package_source_code_path) - - for dependency_directory in local_dependencies: - container = container.with_mounted_directory("/" + dependency_directory, context.get_repo_dir(dependency_directory)) - - has_setup_py = await check_path_in_workdir(container, "setup.py") - has_requirements_txt = await check_path_in_workdir(container, "requirements.txt") - has_pyproject_toml = await check_path_in_workdir(container, "pyproject.toml") - - if has_pyproject_toml: - container = _install_python_dependencies_from_poetry(container, additional_dependency_groups) - elif has_setup_py: - container = _install_python_dependencies_from_setup_py(container, additional_dependency_groups) - elif has_requirements_txt: - container = _install_python_dependencies_from_requirements_txt(container) - - return container - - -def with_python_connector_source(context: ConnectorContext) -> Container: - """Load an airbyte connector source code in a testing environment. - - Args: - context (ConnectorContext): The current test context, providing the repository directory from which the connector sources will be pulled. - Returns: - Container: A python environment container (with the connector source code). - """ - connector_source_path = str(context.connector.code_directory) - testing_environment: Container = with_testing_dependencies(context) - - return with_python_package(context, testing_environment, connector_source_path) - - -async def apply_python_development_overrides(context: ConnectorContext, connector_container: Container) -> Container: - # Run the connector using the local cdk if flag is set - if context.use_local_cdk: - context.logger.info("Using local CDK") - # mount the local cdk - path_to_cdk = "airbyte-cdk/python/" - directory_to_mount = context.get_repo_dir(path_to_cdk) - - context.logger.info(f"Mounting CDK from {directory_to_mount}") - - # Install the airbyte-cdk package from the local directory - # We use --no-deps to avoid conflicts with the airbyte-cdk version required by the connector - connector_container = connector_container.with_mounted_directory(f"/{path_to_cdk}", directory_to_mount).with_exec( - ["pip", "install", "--no-deps", f"/{path_to_cdk}"], skip_entrypoint=True - ) - - return connector_container - - -async def with_python_connector_installed( - context: PipelineContext, - python_container: Container, - connector_source_path: str, - additional_dependency_groups: Optional[List] = None, - exclude: Optional[List] = None, - include: Optional[List] = None, -) -> Container: - """Install an airbyte python connectors dependencies.""" - container = await with_installed_python_package( - context, - python_container, - connector_source_path, - additional_dependency_groups=additional_dependency_groups, - exclude=exclude, - include=include, - ) - - container = await apply_python_development_overrides(context, container) - - return container - - -async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: - """Install the ci_credentials package in a python environment. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. - gsm_secret (Secret): The secret holding GCP_GSM_CREDENTIALS env variable value. - - Returns: - Container: A python environment with the ci_credentials package installed. - """ - python_base_environment: Container = with_python_base(context) - ci_credentials = await with_installed_pipx_package(context, python_base_environment, INTERNAL_TOOL_PATHS.CI_CREDENTIALS.value) - ci_credentials = ci_credentials.with_env_variable("VERSION", "dagger_ci") - return ci_credentials.with_secret_variable("GCP_GSM_CREDENTIALS", gsm_secret).with_workdir("/") - - -def with_alpine_packages(base_container: Container, packages_to_install: List[str]) -> Container: - """Installs packages using apk-get. - Args: - context (Container): A alpine based container. - - Returns: - Container: A container with the packages installed. - - """ - package_install_command = ["apk", "add"] - return base_container.with_exec(package_install_command + packages_to_install) - - -def with_debian_packages(base_container: Container, packages_to_install: List[str]) -> Container: - """Installs packages using apt-get. - Args: - context (Container): A alpine based container. - - Returns: - Container: A container with the packages installed. - - """ - update_packages_command = ["apt-get", "update"] - package_install_command = ["apt-get", "install", "-y"] - return base_container.with_exec(update_packages_command).with_exec(package_install_command + packages_to_install) - - -def with_pip_packages(base_container: Container, packages_to_install: List[str]) -> Container: - """Installs packages using pip - Args: - context (Container): A container with python installed - - Returns: - Container: A container with the pip packages installed. - - """ - package_install_command = ["pip", "install"] - return base_container.with_exec(package_install_command + packages_to_install) - - -async def with_connector_ops(context: PipelineContext) -> Container: - """Installs the connector_ops package in a Container running Python > 3.10 with git.. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the ci_connector_sources sources will be pulled. - - Returns: - Container: A python environment container with connector_ops installed. - """ - python_base_environment: Container = with_python_base(context) - - return await with_installed_pipx_package(context, python_base_environment, INTERNAL_TOOL_PATHS.CONNECTOR_OPS.value) - - -def with_global_dockerd_service(dagger_client: Client) -> Container: - """Create a container with a docker daemon running. - We expose its 2375 port to use it as a docker host for docker-in-docker use cases. - Args: - dagger_client (Client): The dagger client used to create the container. - Returns: - Container: The container running dockerd as a service - """ - return ( - dagger_client.container().from_(consts.DOCKER_DIND_IMAGE) - # We set this env var because we need to use a non-default zombie reaper setting. - # The reason for this is that by default it will want to set its parent process ID to 1 when reaping. - # This won't be possible because of container-ception: dind is running inside the dagger engine. - # See https://github.com/krallin/tini#subreaping for details. - .with_env_variable("TINI_SUBREAPER", "") - # Similarly, because of container-ception, we have to use the fuse-overlayfs storage engine. - .with_exec( - sh_dash_c( - [ - # Update package metadata. - "apk update", - # Install the storage driver package. - "apk add fuse-overlayfs", - # Update daemon config with storage driver. - "mkdir /etc/docker", - '(echo {\\"storage-driver\\": \\"fuse-overlayfs\\"} > /etc/docker/daemon.json)', - ] - ) - ) - # Expose the docker host port. - .with_exposed_port(DOCKER_HOST_PORT) - # Mount the docker cache volumes. - .with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME)) - # Run the docker daemon and bind it to the exposed TCP port. - .with_exec( - ["dockerd", "--log-level=error", f"--host=tcp://0.0.0.0:{DOCKER_HOST_PORT}", "--tls=false"], insecure_root_capabilities=True - ) - ) - - -def with_bound_docker_host( - context: ConnectorContext, - container: Container, -) -> Container: - """Bind a container to a docker host. It will use the dockerd service as a docker host. - - Args: - context (ConnectorContext): The current connector context. - container (Container): The container to bind to the docker host. - Returns: - Container: The container bound to the docker host. - """ - return ( - container.with_env_variable("DOCKER_HOST", f"tcp://{DOCKER_HOST_NAME}:{DOCKER_HOST_PORT}") - .with_service_binding(DOCKER_HOST_NAME, context.dockerd_service) - .with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME)) - ) - - -def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Container]: - def bound_docker_host_inner(container: Container) -> Container: - return with_bound_docker_host(context, container) - - return bound_docker_host_inner - - -def with_docker_cli(context: ConnectorContext) -> Container: - """Create a container with the docker CLI installed and bound to a persistent docker host. - - Args: - context (ConnectorContext): The current connector context. - - Returns: - Container: A docker cli container bound to a docker host. - """ - docker_cli = context.dagger_client.container().from_(consts.DOCKER_CLI_IMAGE) - return with_bound_docker_host(context, docker_cli) - - -async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, image_tag: str): - """Load a docker image tar archive to the docker host. - - Args: - context (ConnectorContext): The current connector context. - tar_file (File): The file object holding the docker image tar archive. - image_tag (str): The tag to create on the image if it has no tag. - """ - # Hacky way to make sure the image is always loaded - tar_name = f"{str(uuid.uuid4())}.tar" - docker_cli = with_docker_cli(context).with_mounted_file(tar_name, tar_file) - - image_load_output = await docker_cli.with_exec(["docker", "load", "--input", tar_name]).stdout() - # Not tagged images only have a sha256 id the load output shares. - if "sha256:" in image_load_output: - image_id = image_load_output.replace("\n", "").replace("Loaded image ID: sha256:", "") - await docker_cli.with_exec(["docker", "tag", image_id, image_tag]) - image_sha = json.loads(await docker_cli.with_exec(["docker", "inspect", image_tag]).stdout())[0].get("Id") - return image_sha - - -def with_pipx(base_python_container: Container) -> Container: - """Installs pipx in a python container. - - Args: - base_python_container (Container): The container to install pipx on. - - Returns: - Container: A python environment with pipx installed. - """ - python_with_pipx = with_pip_packages(base_python_container, ["pipx"]).with_env_variable("PIPX_BIN_DIR", "/usr/local/bin") - - return python_with_pipx - - -def with_poetry(context: PipelineContext) -> Container: - """Install poetry in a python environment. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. - Returns: - Container: A python environment with poetry installed. - """ - python_base_environment: Container = with_python_base(context) - python_with_git = with_debian_packages(python_base_environment, ["git"]) - python_with_poetry = with_pip_packages(python_with_git, ["poetry"]) - - # poetry_cache: CacheVolume = context.dagger_client.cache_volume("poetry_cache") - # poetry_with_cache = python_with_poetry.with_mounted_cache("/root/.cache/pypoetry", poetry_cache, sharing=CacheSharingMode.SHARED) - - return python_with_poetry - - -def with_poetry_module(context: PipelineContext, parent_dir: Directory, module_path: str) -> Container: - """Sets up a Poetry module. - - Args: - context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. - Returns: - Container: A python environment with dependencies installed using poetry. - """ - poetry_install_dependencies_cmd = ["poetry", "install"] - - python_with_poetry = with_poetry(context) - return ( - python_with_poetry.with_mounted_directory("/src", parent_dir) - .with_workdir(f"/src/{module_path}") - .with_exec(poetry_install_dependencies_cmd) - .with_env_variable("CACHEBUSTER", str(uuid.uuid4())) - ) - - -def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: - return ( - context.dagger_client.container(platform=build_platform) - .from_("amazonlinux:2022.0.20220831.1") - .with_workdir("/airbyte") - .with_file("base.sh", context.get_repo_dir("airbyte-integrations/bases/base", include=["base.sh"]).file("base.sh")) - .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") - .with_label("io.airbyte.version", "0.1.0") - .with_label("io.airbyte.name", "airbyte/integration-base") - ) - - -def with_integration_base_java(context: PipelineContext, build_platform: Platform) -> Container: - integration_base = with_integration_base(context, build_platform) - yum_packages_to_install = [ - "tar", # required to untar java connector binary distributions. - "openssl", # required because we need to ssh and scp sometimes. - "findutils", # required for xargs, which is shipped as part of findutils. - ] - return ( - context.dagger_client.container(platform=build_platform) - # Use a linux+jdk base image with long-term support, such as amazoncorretto. - .from_(AMAZONCORRETTO_IMAGE) - # Install a bunch of packages as early as possible. - .with_exec( - sh_dash_c( - [ - # Update first, but in the same .with_exec step as the package installation. - # Otherwise, we risk caching stale package URLs. - "yum update -y", - # - f"yum install -y {' '.join(yum_packages_to_install)}", - # Remove any dangly bits. - "yum clean all", - ] - ) - ) - # Add what files we need to the /airbyte directory. - # Copy base.sh from the airbyte/integration-base image. - .with_directory("/airbyte", integration_base.directory("/airbyte")) - .with_workdir("/airbyte") - # Download a utility jar from the internet. - .with_file("dd-java-agent.jar", context.dagger_client.http("https://dtdg.co/latest-java-tracer")) - # Copy javabase.sh from the git repo. - .with_file("javabase.sh", context.get_repo_dir("airbyte-integrations/bases/base-java", include=["javabase.sh"]).file("javabase.sh")) - # Set a bunch of env variables used by base.sh. - .with_env_variable("AIRBYTE_SPEC_CMD", "/airbyte/javabase.sh --spec") - .with_env_variable("AIRBYTE_CHECK_CMD", "/airbyte/javabase.sh --check") - .with_env_variable("AIRBYTE_DISCOVER_CMD", "/airbyte/javabase.sh --discover") - .with_env_variable("AIRBYTE_READ_CMD", "/airbyte/javabase.sh --read") - .with_env_variable("AIRBYTE_WRITE_CMD", "/airbyte/javabase.sh --write") - .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") - # Set image labels. - .with_label("io.airbyte.version", "0.1.2") - .with_label("io.airbyte.name", "airbyte/integration-base-java") - ) - - -BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { - "destination-clickhouse": { - "dockerfile": "clickhouse.Dockerfile", - "dbt_adapter": "dbt-clickhouse>=1.4.0", - "integration_name": "clickhouse", - "normalization_image": "airbyte/normalization-clickhouse:0.4.3", - "supports_in_connector_normalization": False, - "yum_packages": [], - }, - "destination-duckdb": { - "dockerfile": "duckdb.Dockerfile", - "dbt_adapter": "dbt-duckdb==1.0.1", - "integration_name": "duckdb", - "normalization_image": "airbyte/normalization-duckdb:0.4.3", - "supports_in_connector_normalization": False, - "yum_packages": [], - }, - "destination-mssql": { - "dockerfile": "mssql.Dockerfile", - "dbt_adapter": "dbt-sqlserver==1.0.0", - "integration_name": "mssql", - "normalization_image": "airbyte/normalization-mssql:0.4.3", - "supports_in_connector_normalization": True, - "yum_packages": [], - }, - "destination-mysql": { - "dockerfile": "mysql.Dockerfile", - "dbt_adapter": "dbt-mysql==1.0.0", - "integration_name": "mysql", - "normalization_image": "airbyte/normalization-mysql:0.4.3", - "supports_in_connector_normalization": False, - "yum_packages": [], - }, - "destination-oracle": { - "dockerfile": "oracle.Dockerfile", - "dbt_adapter": "dbt-oracle==0.4.3", - "integration_name": "oracle", - "normalization_image": "airbyte/normalization-oracle:0.4.3", - "supports_in_connector_normalization": False, - "yum_packages": [], - }, - "destination-postgres": { - "dockerfile": "Dockerfile", - "dbt_adapter": "dbt-postgres==1.0.0", - "integration_name": "postgres", - "normalization_image": "airbyte/normalization:0.4.3", - "supports_in_connector_normalization": False, - "yum_packages": [], - }, - "destination-redshift": { - "dockerfile": "redshift.Dockerfile", - "dbt_adapter": "dbt-redshift==1.0.0", - "integration_name": "redshift", - "normalization_image": "airbyte/normalization-redshift:0.4.3", - "supports_in_connector_normalization": True, - "yum_packages": [], - }, - "destination-tidb": { - "dockerfile": "tidb.Dockerfile", - "dbt_adapter": "dbt-tidb==1.0.1", - "integration_name": "tidb", - "normalization_image": "airbyte/normalization-tidb:0.4.3", - "supports_in_connector_normalization": True, - "yum_packages": [], - }, -} - -DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { - **BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, - **{f"{k}-strict-encrypt": v for k, v in BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION.items()}, -} - - -def with_normalization(context: ConnectorContext, build_platform: Platform) -> Container: - return context.dagger_client.container(platform=build_platform).from_( - DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["normalization_image"] - ) - - -def with_integration_base_java_and_normalization(context: PipelineContext, build_platform: Platform) -> Container: - yum_packages_to_install = [ - "python3", - "python3-devel", - "jq", - "sshpass", - "git", - ] - - additional_yum_packages = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["yum_packages"] - yum_packages_to_install += additional_yum_packages - - dbt_adapter_package = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["dbt_adapter"] - normalization_integration_name = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["integration_name"] - - pip_cache: CacheVolume = context.dagger_client.cache_volume("pip_cache") - - return ( - with_integration_base_java(context, build_platform) - .with_exec( - sh_dash_c( - [ - "yum update -y", - f"yum install -y {' '.join(yum_packages_to_install)}", - "yum clean all", - "alternatives --install /usr/bin/python python /usr/bin/python3 60", - ] - ) - ) - .with_mounted_cache("/root/.cache/pip", pip_cache) - .with_exec( - sh_dash_c( - [ - "python -m ensurepip --upgrade", - # Workaround for https://github.com/yaml/pyyaml/issues/601 - "pip3 install 'Cython<3.0' 'pyyaml~=5.4' --no-build-isolation", - # Required for dbt https://github.com/dbt-labs/dbt-core/issues/7075 - "pip3 install 'pytz~=2023.3'", - f"pip3 install {dbt_adapter_package}", - # amazon linux 2 isn't compatible with urllib3 2.x, so force 1.x - "pip3 install 'urllib3<2'", - ] - ) - ) - .with_directory("airbyte_normalization", with_normalization(context, build_platform).directory("/airbyte")) - .with_workdir("airbyte_normalization") - .with_exec(sh_dash_c(["mv * .."])) - .with_workdir("/airbyte") - .with_exec(["rm", "-rf", "airbyte_normalization"]) - .with_workdir("/airbyte/normalization_code") - .with_exec(["pip3", "install", "."]) - .with_workdir("/airbyte/normalization_code/dbt-template/") - .with_exec(["dbt", "deps"]) - .with_workdir("/airbyte") - .with_file( - "run_with_normalization.sh", - context.get_repo_dir("airbyte-integrations/bases/base-java", include=["run_with_normalization.sh"]).file( - "run_with_normalization.sh" - ), - ) - .with_env_variable("AIRBYTE_NORMALIZATION_INTEGRATION", normalization_integration_name) - .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/run_with_normalization.sh") - ) - - -async def with_airbyte_java_connector(context: ConnectorContext, connector_java_tar_file: File, build_platform: Platform) -> Container: - application = context.connector.technical_name - - build_stage = ( - with_integration_base_java(context, build_platform) - .with_workdir("/airbyte") - .with_env_variable("APPLICATION", context.connector.technical_name) - .with_file(f"{application}.tar", connector_java_tar_file) - .with_exec( - sh_dash_c( - [ - f"tar xf {application}.tar --strip-components=1", - f"rm -rf {application}.tar", - ] - ) - ) - ) - - if ( - context.connector.supports_normalization - and DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["supports_in_connector_normalization"] - ): - base = with_integration_base_java_and_normalization(context, build_platform) - entrypoint = ["/airbyte/run_with_normalization.sh"] - else: - base = with_integration_base_java(context, build_platform) - entrypoint = ["/airbyte/base.sh"] - - connector_container = ( - base.with_workdir("/airbyte") - .with_env_variable("APPLICATION", application) - .with_mounted_directory("built_artifacts", build_stage.directory("/airbyte")) - .with_exec(sh_dash_c(["mv built_artifacts/* ."])) - .with_label("io.airbyte.version", context.metadata["dockerImageTag"]) - .with_label("io.airbyte.name", context.metadata["dockerRepository"]) - .with_entrypoint(entrypoint) - ) - return await finalize_build(context, connector_container) - - -async def finalize_build(context: ConnectorContext, connector_container: Container) -> Container: - """Finalize build by adding dagger engine version label and running finalize_build.sh or finalize_build.py if present in the connector directory.""" - connector_container = connector_container.with_label("io.dagger.engine_version", dagger_engine_version) - connector_dir_with_finalize_script = await context.get_connector_dir(include=["finalize_build.sh", "finalize_build.py"]) - finalize_scripts = await connector_dir_with_finalize_script.entries() - if not finalize_scripts: - return connector_container - - # We don't want finalize scripts to override the entrypoint so we keep it in memory to reset it after finalization - original_entrypoint = await connector_container.entrypoint() - - has_finalize_bash_script = "finalize_build.sh" in finalize_scripts - has_finalize_python_script = "finalize_build.py" in finalize_scripts - if has_finalize_python_script and has_finalize_bash_script: - raise Exception("Connector has both finalize_build.sh and finalize_build.py, please remove one of them") - - if has_finalize_python_script: - context.logger.info(f"{context.connector.technical_name} has a finalize_build.py script, running it to finalize build...") - module_path = context.connector.code_directory / "finalize_build.py" - connector_finalize_module_spec = importlib.util.spec_from_file_location( - f"{context.connector.code_directory.name}_finalize", module_path - ) - connector_finalize_module = importlib.util.module_from_spec(connector_finalize_module_spec) - connector_finalize_module_spec.loader.exec_module(connector_finalize_module) - try: - connector_container = await connector_finalize_module.finalize_build(context, connector_container) - except AttributeError: - raise Exception("Connector has a finalize_build.py script but it doesn't have a finalize_build function.") - - if has_finalize_bash_script: - context.logger.info(f"{context.connector.technical_name} has finalize_build.sh script, running it to finalize build...") - connector_container = ( - connector_container.with_file("/tmp/finalize_build.sh", connector_dir_with_finalize_script.file("finalize_build.sh")) - .with_entrypoint("sh") - .with_exec(["/tmp/finalize_build.sh"]) - ) - - return connector_container.with_entrypoint(original_entrypoint) - - -def with_crane( - context: PipelineContext, -) -> Container: - """Crane is a tool to analyze and manipulate container images. - We can use it to extract the image manifest and the list of layers or list the existing tags on an image repository. - https://github.com/google/go-containerregistry/tree/main/cmd/crane - """ - - # We use the debug image as it contains a shell which we need to properly use environment variables - # https://github.com/google/go-containerregistry/tree/main/cmd/crane#images - base_container = context.dagger_client.container().from_("gcr.io/go-containerregistry/crane/debug:v0.15.1") - - if context.docker_hub_username_secret and context.docker_hub_password_secret: - base_container = ( - base_container.with_secret_variable("DOCKER_HUB_USERNAME", context.docker_hub_username_secret).with_secret_variable( - "DOCKER_HUB_PASSWORD", context.docker_hub_password_secret - ) - # We need to use skip_entrypoint=True to avoid the entrypoint to be overridden by the crane command - # We use sh -c to be able to use environment variables in the command - # This is a workaround as the default crane entrypoint doesn't support environment variables - .with_exec( - sh_dash_c(["crane auth login index.docker.io -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_PASSWORD"]), skip_entrypoint=True - ) - ) - - return base_container - - -async def mounted_connector_secrets(context: PipelineContext, secret_directory_path: str) -> Callable[[Container], Container]: - # By default, mount the secrets properly as dagger secret files. - # - # This will cause the contents of these files to be scrubbed from the logs. This scrubbing comes at the cost of - # unavoidable latency in the log output, see next paragraph for details as to why. This is fine in a CI environment - # however this becomes a nuisance locally: the developer wants the logs to be displayed to them in an as timely - # manner as possible. Since the secrets aren't really secret in that case anyway, we mount them in the container as - # regular files instead. - # - # The buffering behavior that comes into play when logs are scrubbed is both unavoidable and not configurable. - # It's fundamentally unavoidable because dagger needs to match a bunch of regexes (one per secret) and therefore - # needs to buffer at least as many bytes as the longest of all possible matches. Still, this isn't that long in - # practice in our case. The real problem is that the buffering is not configurable: dagger relies on a golang - # library called transform [1] to perform the regexp matching on a stream and this library hard-codes a buffer - # size of 4096 bytes for each regex [2]. - # - # Remove the special local case whenever dagger implements scrubbing differently [3,4]. - # - # [1] https://golang.org/x/text/transform - # [2] https://cs.opensource.google/go/x/text/+/refs/tags/v0.13.0:transform/transform.go;l=130 - # [3] https://github.com/dagger/dagger/blob/v0.6.4/cmd/shim/main.go#L294 - # [4] https://github.com/airbytehq/airbyte/issues/30394 - # - if context.is_local: - # Special case for local development. - # Query dagger for the contents of the secrets and mount these strings as files in the container. - contents = {} - for secret_file_name, secret in context.connector_secrets.items(): - contents[secret_file_name] = await secret.plaintext() - - def with_secrets_mounted_as_regular_files(container: Container) -> Container: - container = container.with_exec(["mkdir", "-p", secret_directory_path], skip_entrypoint=True) - for secret_file_name, secret_content_str in contents.items(): - container = container.with_new_file(f"{secret_directory_path}/{secret_file_name}", secret_content_str, permissions=0o600) - return container - - return with_secrets_mounted_as_regular_files - - def with_secrets_mounted_as_dagger_secrets(container: Container) -> Container: - container = container.with_exec(["mkdir", "-p", secret_directory_path], skip_entrypoint=True) - for secret_file_name, secret in context.connector_secrets.items(): - container = container.with_mounted_secret(f"{secret_directory_path}/{secret_file_name}", secret) - return container - - return with_secrets_mounted_as_dagger_secrets diff --git a/airbyte-ci/connectors/pipelines/pipelines/bases.py b/airbyte-ci/connectors/pipelines/pipelines/bases.py deleted file mode 100644 index cb1cbd6314da9..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/bases.py +++ /dev/null @@ -1,624 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""This module declare base / abstract models to be reused in a pipeline lifecycle.""" - -from __future__ import annotations - -import json -import logging -import webbrowser -from abc import ABC, abstractmethod -from dataclasses import dataclass, field -from datetime import datetime, timedelta -from enum import Enum -from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Set - -import anyio -import asyncer -from anyio import Path -from connector_ops.utils import Connector, console -from dagger import Container, DaggerError -from jinja2 import Environment, PackageLoader, select_autoescape -from pipelines import sentry_utils -from pipelines.actions import remote_storage -from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT -from pipelines.utils import METADATA_FILE_NAME, format_duration, get_exec_result -from rich.console import Group -from rich.panel import Panel -from rich.style import Style -from rich.table import Table -from rich.text import Text -from tabulate import tabulate - -if TYPE_CHECKING: - from pipelines.contexts import PipelineContext - - -@dataclass(frozen=True) -class ConnectorWithModifiedFiles(Connector): - modified_files: Set[Path] = field(default_factory=frozenset) - - @property - def has_metadata_change(self) -> bool: - return any(path.name == METADATA_FILE_NAME for path in self.modified_files) - - -class CIContext(str, Enum): - """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" - - MANUAL = "manual" - PULL_REQUEST = "pull_request" - NIGHTLY_BUILDS = "nightly_builds" - MASTER = "master" - - def __str__(self) -> str: - return self.value - - -class StepStatus(Enum): - """An Enum to characterize the success, failure or skipping of a Step.""" - - SUCCESS = "Successful" - FAILURE = "Failed" - SKIPPED = "Skipped" - - def get_rich_style(self) -> Style: - """Match color used in the console output to the step status.""" - if self is StepStatus.SUCCESS: - return Style(color="green") - if self is StepStatus.FAILURE: - return Style(color="red", bold=True) - if self is StepStatus.SKIPPED: - return Style(color="yellow") - - def get_emoji(self) -> str: - """Match emoji used in the console output to the step status.""" - if self is StepStatus.SUCCESS: - return "✅" - if self is StepStatus.FAILURE: - return "❌" - if self is StepStatus.SKIPPED: - return "🟡" - - def __str__(self) -> str: # noqa D105 - return self.value - - -class Step(ABC): - """An abstract class to declare and run pipeline step.""" - - title: ClassVar[str] - max_retries: ClassVar[int] = 0 - max_dagger_error_retries: ClassVar[int] = 3 - should_log: ClassVar[bool] = True - success_exit_code: ClassVar[int] = 0 - skipped_exit_code: ClassVar[int] = None - # The max duration of a step run. If the step run for more than this duration it will be considered as timed out. - # The default of 5 hours is arbitrary and can be changed if needed. - max_duration: ClassVar[timedelta] = timedelta(hours=5) - - retry_delay = timedelta(seconds=10) - - def __init__(self, context: PipelineContext) -> None: # noqa D107 - self.context = context - self.retry_count = 0 - self.started_at = None - self.stopped_at = None - - @property - def run_duration(self) -> timedelta: - if self.started_at and self.stopped_at: - return self.stopped_at - self.started_at - else: - return timedelta(seconds=0) - - @property - def logger(self) -> logging.Logger: - if self.should_log: - return logging.getLogger(f"{self.context.pipeline_name} - {self.title}") - else: - disabled_logger = logging.getLogger() - disabled_logger.disabled = True - return disabled_logger - - @property - def dagger_client(self) -> Container: - return self.context.dagger_client.pipeline(self.title) - - async def log_progress(self, completion_event: anyio.Event) -> None: - """Log the step progress every 30 seconds until the step is done.""" - while not completion_event.is_set(): - duration = datetime.utcnow() - self.started_at - elapsed_seconds = duration.total_seconds() - if elapsed_seconds > 30 and round(elapsed_seconds) % 30 == 0: - self.logger.info(f"⏳ Still running... (duration: {format_duration(duration)})") - await anyio.sleep(1) - - async def run_with_completion(self, completion_event: anyio.Event, *args, **kwargs) -> StepResult: - """Run the step with a timeout and set the completion event when the step is done.""" - try: - with anyio.fail_after(self.max_duration.total_seconds()): - result = await self._run(*args, **kwargs) - completion_event.set() - return result - except TimeoutError: - self.retry_count = self.max_retries + 1 - self.logger.error(f"🚨 {self.title} timed out after {self.max_duration}. No additional retry will happen.") - completion_event.set() - return self._get_timed_out_step_result() - - @sentry_utils.with_step_context - async def run(self, *args, **kwargs) -> StepResult: - """Public method to run the step. It output a step result. - - If an unexpected dagger error happens it outputs a failed step result with the exception payload. - - Returns: - StepResult: The step result following the step run. - """ - self.logger.info(f"🚀 Start {self.title}") - self.started_at = datetime.utcnow() - completion_event = anyio.Event() - try: - async with asyncer.create_task_group() as task_group: - soon_result = task_group.soonify(self.run_with_completion)(completion_event, *args, **kwargs) - task_group.soonify(self.log_progress)(completion_event) - step_result = soon_result.value - except DaggerError as e: - self.logger.error("Step failed with an unexpected dagger error", exc_info=e) - step_result = StepResult(self, StepStatus.FAILURE, stderr=str(e), exc_info=e) - - self.stopped_at = datetime.utcnow() - self.log_step_result(step_result) - - lets_retry = self.should_retry(step_result) - step_result = await self.retry(step_result, *args, **kwargs) if lets_retry else step_result - return step_result - - def should_retry(self, step_result: StepResult) -> bool: - """Return True if the step should be retried.""" - if step_result.status is not StepStatus.FAILURE: - return False - max_retries = self.max_dagger_error_retries if step_result.exc_info else self.max_retries - return self.retry_count < max_retries and max_retries > 0 - - async def retry(self, step_result, *args, **kwargs) -> StepResult: - self.retry_count += 1 - self.logger.warn( - f"Failed with error: {step_result.stderr}.\nRetry #{self.retry_count} in {self.retry_delay.total_seconds()} seconds..." - ) - await anyio.sleep(self.retry_delay.total_seconds()) - return await self.run(*args, **kwargs) - - def log_step_result(self, result: StepResult) -> None: - """Log the step result. - - Args: - result (StepResult): The step result to log. - """ - duration = format_duration(self.run_duration) - if result.status is StepStatus.FAILURE: - self.logger.info(f"{result.status.get_emoji()} failed (duration: {duration})") - if result.status is StepStatus.SKIPPED: - self.logger.info(f"{result.status.get_emoji()} was skipped (duration: {duration})") - if result.status is StepStatus.SUCCESS: - self.logger.info(f"{result.status.get_emoji()} was successful (duration: {duration})") - - @abstractmethod - async def _run(self, *args, **kwargs) -> StepResult: - """Implement the execution of the step and return a step result. - - Returns: - StepResult: The result of the step run. - """ - raise NotImplementedError("Steps must define a '_run' attribute.") - - def skip(self, reason: str = None) -> StepResult: - """Declare a step as skipped. - - Args: - reason (str, optional): Reason why the step was skipped. - - Returns: - StepResult: A skipped step result. - """ - return StepResult(self, StepStatus.SKIPPED, stdout=reason) - - def get_step_status_from_exit_code( - self, - exit_code: int, - ) -> StepStatus: - """Map an exit code to a step status. - - Args: - exit_code (int): A process exit code. - - Raises: - ValueError: Raised if the exit code is not mapped to a step status. - - Returns: - StepStatus: The step status inferred from the exit code. - """ - if exit_code == self.success_exit_code: - return StepStatus.SUCCESS - elif self.skipped_exit_code is not None and exit_code == self.skipped_exit_code: - return StepStatus.SKIPPED - else: - return StepStatus.FAILURE - - async def get_step_result(self, container: Container) -> StepResult: - """Concurrent retrieval of exit code, stdout and stdout of a container. - - Create a StepResult object from these objects. - - Args: - container (Container): The container from which we want to infer a step result/ - - Returns: - StepResult: Failure or success with stdout and stderr. - """ - exit_code, stdout, stderr = await get_exec_result(container) - return StepResult( - self, - self.get_step_status_from_exit_code(exit_code), - stderr=stderr, - stdout=stdout, - output_artifact=container, - ) - - def _get_timed_out_step_result(self) -> StepResult: - return StepResult( - self, - StepStatus.FAILURE, - stdout=f"Timed out after the max duration of {format_duration(self.max_duration)}. Please checkout the Dagger logs to see what happened.", - ) - - -class NoOpStep(Step): - """A step that does nothing.""" - - title = "No Op" - should_log = False - - def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: - super().__init__(context) - self.step_status = step_status - - async def _run(self, *args, **kwargs) -> StepResult: - return StepResult(self, self.step_status) - - -@dataclass(frozen=True) -class StepResult: - """A dataclass to capture the result of a step.""" - - step: Step - status: StepStatus - created_at: datetime = field(default_factory=datetime.utcnow) - stderr: Optional[str] = None - stdout: Optional[str] = None - output_artifact: Any = None - exc_info: Optional[Exception] = None - - def __repr__(self) -> str: # noqa D105 - return f"{self.step.title}: {self.status.value}" - - def __str__(self) -> str: # noqa D105 - return f"{self.step.title}: {self.status.value}\n\nSTDOUT:\n{self.stdout}\n\nSTDERR:\n{self.stderr}" - - def __post_init__(self): - if self.stderr: - super().__setattr__("stderr", self.redact_secrets_from_string(self.stderr)) - if self.stdout: - super().__setattr__("stdout", self.redact_secrets_from_string(self.stdout)) - - def redact_secrets_from_string(self, value: str) -> str: - for secret in self.step.context.secrets_to_mask: - value = value.replace(secret, "********") - return value - - -@dataclass(frozen=True) -class Report: - """A dataclass to build reports to share pipelines executions results with the user.""" - - pipeline_context: PipelineContext - steps_results: List[StepResult] - created_at: datetime = field(default_factory=datetime.utcnow) - name: str = "REPORT" - filename: str = "output" - - @property - def report_output_prefix(self) -> str: # noqa D102 - return self.pipeline_context.report_output_prefix - - @property - def json_report_file_name(self) -> str: # noqa D102 - return self.filename + ".json" - - @property - def json_report_remote_storage_key(self) -> str: # noqa D102 - return f"{self.report_output_prefix}/{self.json_report_file_name}" - - @property - def failed_steps(self) -> List[StepResult]: # noqa D102 - return [step_result for step_result in self.steps_results if step_result.status is StepStatus.FAILURE] - - @property - def successful_steps(self) -> List[StepResult]: # noqa D102 - return [step_result for step_result in self.steps_results if step_result.status is StepStatus.SUCCESS] - - @property - def skipped_steps(self) -> List[StepResult]: # noqa D102 - return [step_result for step_result in self.steps_results if step_result.status is StepStatus.SKIPPED] - - @property - def success(self) -> bool: # noqa D102 - return len(self.failed_steps) == 0 and (len(self.skipped_steps) > 0 or len(self.successful_steps) > 0) - - @property - def run_duration(self) -> timedelta: # noqa D102 - return self.pipeline_context.stopped_at - self.pipeline_context.started_at - - @property - def lead_duration(self) -> timedelta: # noqa D102 - return self.pipeline_context.stopped_at - self.pipeline_context.created_at - - @property - def remote_storage_enabled(self) -> bool: # noqa D102 - return self.pipeline_context.is_ci - - async def save_local(self, filename: str, content: str) -> Path: - """Save the report files locally.""" - local_path = anyio.Path(f"{LOCAL_REPORTS_PATH_ROOT}/{self.report_output_prefix}/{filename}") - await local_path.parents[0].mkdir(parents=True, exist_ok=True) - await local_path.write_text(content) - return local_path - - async def save_remote(self, local_path: Path, remote_key: str, content_type: str = None) -> int: - gcs_cp_flags = None if content_type is None else [f"--content-type={content_type}"] - local_file = self.pipeline_context.dagger_client.host().directory(".", include=[str(local_path)]).file(str(local_path)) - report_upload_exit_code, _, _ = await remote_storage.upload_to_gcs( - dagger_client=self.pipeline_context.dagger_client, - file_to_upload=local_file, - key=remote_key, - bucket=self.pipeline_context.ci_report_bucket, - gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, - flags=gcs_cp_flags, - ) - gcs_uri = "gs://" + self.pipeline_context.ci_report_bucket + "/" + remote_key - public_url = f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{remote_key}" - if report_upload_exit_code != 0: - self.pipeline_context.logger.error(f"Uploading {local_path} to {gcs_uri} failed.") - else: - self.pipeline_context.logger.info(f"Uploading {local_path} to {gcs_uri} succeeded. Public URL: {public_url}") - return report_upload_exit_code - - async def save(self) -> None: - """Save the report files.""" - local_json_path = await self.save_local(self.json_report_file_name, self.to_json()) - absolute_path = await local_json_path.absolute() - self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}") - if self.remote_storage_enabled: - await self.save_remote(local_json_path, self.json_report_remote_storage_key, "application/json") - - def to_json(self) -> str: - """Create a JSON representation of the report. - - Returns: - str: The JSON representation of the report. - """ - return json.dumps( - { - "pipeline_name": self.pipeline_context.pipeline_name, - "run_timestamp": self.pipeline_context.started_at.isoformat(), - "run_duration": self.run_duration.total_seconds(), - "success": self.success, - "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], - "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], - "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], - "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, - "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, - "pipeline_end_timestamp": round(self.pipeline_context.stopped_at.timestamp()), - "pipeline_duration": round(self.pipeline_context.stopped_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, - "git_branch": self.pipeline_context.git_branch, - "git_revision": self.pipeline_context.git_revision, - "ci_context": self.pipeline_context.ci_context, - "pull_request_url": self.pipeline_context.pull_request.html_url if self.pipeline_context.pull_request else None, - "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, - } - ) - - def print(self): - """Print the test report to the console in a nice way.""" - pipeline_name = self.pipeline_context.pipeline_name - main_panel_title = Text(f"{pipeline_name.upper()} - {self.name}") - main_panel_title.stylize(Style(color="blue", bold=True)) - duration_subtitle = Text(f"⏲️ Total pipeline duration for {pipeline_name}: {format_duration(self.run_duration)}") - step_results_table = Table(title="Steps results") - step_results_table.add_column("Step") - step_results_table.add_column("Result") - step_results_table.add_column("Finished after") - - for step_result in self.steps_results: - step = Text(step_result.step.title) - step.stylize(step_result.status.get_rich_style()) - result = Text(step_result.status.value) - result.stylize(step_result.status.get_rich_style()) - - if step_result.status is StepStatus.SKIPPED: - step_results_table.add_row(step, result, "N/A") - else: - run_time = format_duration((step_result.created_at - step_result.step.started_at)) - step_results_table.add_row(step, result, run_time) - - to_render = [step_results_table] - if self.failed_steps: - sub_panels = [] - for failed_step in self.failed_steps: - errors = Text(failed_step.stderr) - panel_title = Text(f"{pipeline_name} {failed_step.step.title.lower()} failures") - panel_title.stylize(Style(color="red", bold=True)) - sub_panel = Panel(errors, title=panel_title) - sub_panels.append(sub_panel) - failures_group = Group(*sub_panels) - to_render.append(failures_group) - - if self.pipeline_context.dagger_cloud_url: - self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") - - main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) - console.print(main_panel) - - -@dataclass(frozen=True) -class ConnectorReport(Report): - """A dataclass to build connector test reports to share pipelines executions results with the user.""" - - @property - def report_output_prefix(self) -> str: # noqa D102 - return f"{self.pipeline_context.report_output_prefix}/{self.pipeline_context.connector.technical_name}/{self.pipeline_context.connector.version}" - - @property - def html_report_file_name(self) -> str: # noqa D102 - return self.filename + ".html" - - @property - def html_report_remote_storage_key(self) -> str: # noqa D102 - return f"{self.report_output_prefix}/{self.html_report_file_name}" - - @property - def html_report_url(self) -> str: # noqa D102 - return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}" - - @property - def should_be_commented_on_pr(self) -> bool: # noqa D102 - return ( - self.pipeline_context.should_save_report - and self.pipeline_context.is_ci - and self.pipeline_context.pull_request - and self.pipeline_context.PRODUCTION - ) - - def to_json(self) -> str: - """Create a JSON representation of the connector test report. - - Returns: - str: The JSON representation of the report. - """ - return json.dumps( - { - "connector_technical_name": self.pipeline_context.connector.technical_name, - "connector_version": self.pipeline_context.connector.version, - "run_timestamp": self.created_at.isoformat(), - "run_duration": self.run_duration.total_seconds(), - "success": self.success, - "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], - "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], - "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], - "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, - "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, - "pipeline_end_timestamp": round(self.created_at.timestamp()), - "pipeline_duration": round(self.created_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, - "git_branch": self.pipeline_context.git_branch, - "git_revision": self.pipeline_context.git_revision, - "ci_context": self.pipeline_context.ci_context, - "cdk_version": self.pipeline_context.cdk_version, - "html_report_url": self.html_report_url, - "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, - } - ) - - def post_comment_on_pr(self) -> None: - icon_url = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" - global_status_emoji = "✅" if self.success else "❌" - commit_url = f"{self.pipeline_context.pull_request.html_url}/commits/{self.pipeline_context.git_revision}" - markdown_comment = f'## {self.pipeline_context.connector.technical_name} test report (commit [`{self.pipeline_context.git_revision[:10]}`]({commit_url})) - {global_status_emoji}\n\n' - markdown_comment += f"⏲️ Total pipeline duration: {format_duration(self.run_duration)} \n\n" - report_data = [ - [step_result.step.title, step_result.status.get_emoji()] - for step_result in self.steps_results - if step_result.status is not StepStatus.SKIPPED - ] - markdown_comment += tabulate(report_data, headers=["Step", "Result"], tablefmt="pipe") + "\n\n" - markdown_comment += f"🔗 [View the logs here]({self.html_report_url})\n\n" - - if self.pipeline_context.dagger_cloud_url: - markdown_comment += f"☁️ [View runs for commit in Dagger Cloud]({self.pipeline_context.dagger_cloud_url})\n\n" - - markdown_comment += "*Please note that tests are only run on PR ready for review. Please set your PR to draft mode to not flood the CI engine and upstream service on following commits.*\n" - markdown_comment += "**You can run the same pipeline locally on this branch with the [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool with the following command**\n" - markdown_comment += f"```bash\nairbyte-ci connectors --name={self.pipeline_context.connector.technical_name} test\n```\n\n" - self.pipeline_context.pull_request.create_issue_comment(markdown_comment) - - async def to_html(self) -> str: - env = Environment(loader=PackageLoader("pipelines.tests"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) - template = env.get_template("test_report.html.j2") - template.globals["StepStatus"] = StepStatus - template.globals["format_duration"] = format_duration - local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve() - template_context = { - "connector_name": self.pipeline_context.connector.technical_name, - "step_results": self.steps_results, - "run_duration": self.run_duration, - "created_at": self.created_at.isoformat(), - "connector_version": self.pipeline_context.connector.version, - "gha_workflow_run_url": None, - "dagger_logs_url": None, - "git_branch": self.pipeline_context.git_branch, - "git_revision": self.pipeline_context.git_revision, - "commit_url": None, - "icon_url": local_icon_path.as_uri(), - } - - if self.pipeline_context.is_ci: - template_context["commit_url"] = f"https://github.com/airbytehq/airbyte/commit/{self.pipeline_context.git_revision}" - template_context["gha_workflow_run_url"] = self.pipeline_context.gha_workflow_run_url - template_context["dagger_logs_url"] = self.pipeline_context.dagger_logs_url - template_context["dagger_cloud_url"] = self.pipeline_context.dagger_cloud_url - template_context[ - "icon_url" - ] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" - return template.render(template_context) - - async def save(self) -> None: - local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) - absolute_path = await local_html_path.resolve() - if self.pipeline_context.open_report_in_browser: - self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") - if self.pipeline_context.open_report_in_browser: - self.pipeline_context.logger.info("Opening HTML report in browser.") - webbrowser.open(absolute_path.as_uri()) - if self.remote_storage_enabled: - await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html") - self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}") - await super().save() - - def print(self): - """Print the test report to the console in a nice way.""" - connector_name = self.pipeline_context.connector.technical_name - main_panel_title = Text(f"{connector_name.upper()} - {self.name}") - main_panel_title.stylize(Style(color="blue", bold=True)) - duration_subtitle = Text(f"⏲️ Total pipeline duration for {connector_name}: {format_duration(self.run_duration)}") - step_results_table = Table(title="Steps results") - step_results_table.add_column("Step") - step_results_table.add_column("Result") - step_results_table.add_column("Duration") - - for step_result in self.steps_results: - step = Text(step_result.step.title) - step.stylize(step_result.status.get_rich_style()) - result = Text(step_result.status.value) - result.stylize(step_result.status.get_rich_style()) - step_results_table.add_row(step, result, format_duration(step_result.step.run_duration)) - - details_instructions = Text("ℹ️ You can find more details with step executions logs in the saved HTML report.") - to_render = [step_results_table, details_instructions] - - if self.pipeline_context.dagger_cloud_url: - self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") - - main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) - console.print(main_panel) diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py similarity index 93% rename from airbyte-ci/connectors/pipelines/pipelines/commands/airbyte_ci.py rename to airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py index d7ec796b8f2e2..44c88148314cf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/airbyte_ci.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py @@ -9,23 +9,20 @@ import click from github import PullRequest -from pipelines import github, main_logger -from pipelines.bases import CIContext +from pipelines import main_logger +from pipelines.helpers import github +from pipelines.helpers.git import get_current_git_branch, get_current_git_revision, get_modified_files_in_branch, get_modified_files_in_commit, get_modified_files_in_pull_request +from pipelines.models.bases import CIContext from pipelines.consts import LOCAL_PIPELINE_PACKAGE_PATH -from pipelines.telemetry import track_command -from pipelines.utils import ( +from pipelines.cli.telemetry import track_command +from pipelines.helpers.utils import ( get_current_epoch_time, - get_current_git_branch, - get_current_git_revision, - get_modified_files_in_branch, - get_modified_files_in_commit, - get_modified_files_in_pull_request, transform_strs_to_paths, ) -from .groups.connectors import connectors -from .groups.metadata import metadata -from .groups.tests import test +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.metadata.commands import metadata +from pipelines.pipeline.test.commands import test # HELPERS diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py new file mode 100644 index 0000000000000..0b989d5a3945c --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module groups util function used in pipelines.""" +from __future__ import annotations + +import sys +from glob import glob +from pathlib import Path +from typing import Any + +import click +from dagger import DaggerError +from pipelines import consts, main_logger +from pipelines.consts import GCS_PUBLIC_DOMAIN +from pipelines.helpers import sentry_utils +from pipelines.helpers.utils import slugify, upload_to_gcs +from pipelines.consts import STATIC_REPORT_PREFIX + +class DaggerPipelineCommand(click.Command): + @sentry_utils.with_command_context + def invoke(self, ctx: click.Context) -> Any: + """Wrap parent invoke in a try catch suited to handle pipeline failures. + Args: + ctx (click.Context): The invocation context. + Raises: + e: Raise whatever exception that was caught. + Returns: + Any: The invocation return value. + """ + command_name = self.name + main_logger.info(f"Running Dagger Command {command_name}...") + main_logger.info( + "If you're running this command for the first time the Dagger engine image will be pulled, it can take a short minute..." + ) + ctx.obj["report_output_prefix"] = self.render_report_output_prefix(ctx) + dagger_logs_gcs_key = f"{ctx.obj['report_output_prefix']}/dagger-logs.txt" + try: + if not ctx.obj["show_dagger_logs"]: + dagger_log_dir = Path(f"{consts.LOCAL_REPORTS_PATH_ROOT}/{ctx.obj['report_output_prefix']}") + dagger_log_path = Path(f"{dagger_log_dir}/dagger.log").resolve() + ctx.obj["dagger_logs_path"] = dagger_log_path + main_logger.info(f"Saving dagger logs to: {dagger_log_path}") + if ctx.obj["is_ci"]: + ctx.obj["dagger_logs_url"] = f"{GCS_PUBLIC_DOMAIN}/{ctx.obj['ci_report_bucket_name']}/{dagger_logs_gcs_key}" + else: + ctx.obj["dagger_logs_url"] = None + else: + ctx.obj["dagger_logs_path"] = None + pipeline_success = super().invoke(ctx) + if not pipeline_success: + raise DaggerError(f"Dagger Command {command_name} failed.") + except DaggerError as e: + main_logger.error(f"Dagger Command {command_name} failed", exc_info=e) + sys.exit(1) + finally: + if ctx.obj.get("dagger_logs_path"): + if ctx.obj["is_local"]: + main_logger.info(f"Dagger logs saved to {ctx.obj['dagger_logs_path']}") + if ctx.obj["is_ci"]: + gcs_uri, public_url = upload_to_gcs( + ctx.obj["dagger_logs_path"], ctx.obj["ci_report_bucket_name"], dagger_logs_gcs_key, ctx.obj["ci_gcs_credentials"] + ) + main_logger.info(f"Dagger logs saved to {gcs_uri}. Public URL: {public_url}") + + @staticmethod + def render_report_output_prefix(ctx: click.Context) -> str: + """Render the report output prefix for any command in the Connector CLI. + + The goal is to standardize the output of all logs and reports generated by the CLI + related to a specific command, and to a specific CI context. + + Note: We cannot hoist this higher in the command hierarchy because only one level of + subcommands are available at the time the context is created. + """ + + git_branch = ctx.obj["git_branch"] + git_revision = ctx.obj["git_revision"] + pipeline_start_timestamp = ctx.obj["pipeline_start_timestamp"] + ci_context = ctx.obj["ci_context"] + ci_job_key = ctx.obj["ci_job_key"] if ctx.obj.get("ci_job_key") else ci_context + + sanitized_branch = slugify(git_branch.replace("/", "_")) + + # get the command name for the current context, if a group then prepend the parent command name + if ctx.command_path: + cmd_components = ctx.command_path.split(" ") + cmd_components[0] = STATIC_REPORT_PREFIX + cmd = "/".join(cmd_components) + else: + cmd = None + + path_values = [ + cmd, + ci_job_key, + sanitized_branch, + pipeline_start_timestamp, + git_revision, + ] + + # check all values are defined + if None in path_values: + raise ValueError(f"Missing value required to render the report output prefix: {path_values}") + + # join all values with a slash, and convert all values to string + return "/".join(map(str, path_values)) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger_run.py b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_run.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/dagger_run.py rename to airbyte-ci/connectors/pipelines/pipelines/cli/dagger_run.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/telemetry.py b/airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/telemetry.py rename to airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py b/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py deleted file mode 100644 index f1521ba00af7c..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/connectors.py +++ /dev/null @@ -1,697 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""This module declares the CLI commands to run the connectors CI pipelines.""" - -import os -import sys -from pathlib import Path -from typing import List, Set, Tuple - -import anyio -import click -from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, console, get_all_connectors_in_repo -from pipelines import main_logger -from pipelines.bases import ConnectorWithModifiedFiles -from pipelines.builds import run_connector_build_pipeline -from pipelines.connector_changes.base_image_version_migration import ( - run_connector_base_image_upgrade_pipeline, - run_connector_migration_to_base_image_pipeline, -) -from pipelines.connector_changes.version_bump import run_connector_version_bump_pipeline -from pipelines.contexts import ConnectorContext, ContextState, PublishConnectorContext -from pipelines.github import update_global_commit_status_check_for_tests -from pipelines.pipelines.connectors import run_connectors_pipelines -from pipelines.publish import reorder_contexts, run_connector_publish_pipeline -from pipelines.tests import run_connector_test_pipeline -from pipelines.utils import DaggerPipelineCommand, get_connector_modified_files, get_modified_connectors -from rich.table import Table -from rich.text import Text - -# HELPERS - -ALL_CONNECTORS = get_all_connectors_in_repo() - - -def validate_environment(is_local: bool, use_remote_secrets: bool): - """Check if the required environment variables exist.""" - if is_local: - if not Path(".git").is_dir(): - raise click.UsageError("You need to run this command from the repository root.") - else: - required_env_vars_for_ci = [ - "GCP_GSM_CREDENTIALS", - "CI_REPORT_BUCKET_NAME", - "CI_GITHUB_ACCESS_TOKEN", - ] - for required_env_var in required_env_vars_for_ci: - if os.getenv(required_env_var) is None: - raise click.UsageError(f"When running in a CI context a {required_env_var} environment variable must be set.") - if use_remote_secrets and os.getenv("GCP_GSM_CREDENTIALS") is None: - raise click.UsageError( - "You have to set the GCP_GSM_CREDENTIALS if you want to download secrets from GSM. Set the --use-remote-secrets option to false otherwise." - ) - - -def get_selected_connectors_with_modified_files( - selected_names: Tuple[str], - selected_support_levels: Tuple[str], - selected_languages: Tuple[str], - modified: bool, - metadata_changes_only: bool, - metadata_query: str, - modified_files: Set[Path], - enable_dependency_scanning: bool = False, -) -> List[ConnectorWithModifiedFiles]: - """Get the connectors that match the selected criteria. - - Args: - selected_names (Tuple[str]): Selected connector names. - selected_support_levels (Tuple[str]): Selected connector support levels. - selected_languages (Tuple[str]): Selected connector languages. - modified (bool): Whether to select the modified connectors. - metadata_changes_only (bool): Whether to select only the connectors with metadata changes. - modified_files (Set[Path]): The modified files. - enable_dependency_scanning (bool): Whether to enable the dependency scanning. - Returns: - List[ConnectorWithModifiedFiles]: The connectors that match the selected criteria. - """ - - if metadata_changes_only and not modified: - main_logger.info("--metadata-changes-only overrides --modified") - modified = True - - selected_modified_connectors = ( - get_modified_connectors(modified_files, ALL_CONNECTORS, enable_dependency_scanning) if modified else set() - ) - selected_connectors_by_name = {c for c in ALL_CONNECTORS if c.technical_name in selected_names} - selected_connectors_by_support_level = {connector for connector in ALL_CONNECTORS if connector.support_level in selected_support_levels} - selected_connectors_by_language = {connector for connector in ALL_CONNECTORS if connector.language in selected_languages} - selected_connectors_by_query = ( - {connector for connector in ALL_CONNECTORS if connector.metadata_query_match(metadata_query)} if metadata_query else set() - ) - - non_empty_connector_sets = [ - connector_set - for connector_set in [ - selected_connectors_by_name, - selected_connectors_by_support_level, - selected_connectors_by_language, - selected_connectors_by_query, - selected_modified_connectors, - ] - if connector_set - ] - # The selected connectors are the intersection of the selected connectors by name, support_level, language, simpleeval query and modified. - selected_connectors = set.intersection(*non_empty_connector_sets) if non_empty_connector_sets else set() - - selected_connectors_with_modified_files = [] - for connector in selected_connectors: - connector_with_modified_files = ConnectorWithModifiedFiles( - technical_name=connector.technical_name, modified_files=get_connector_modified_files(connector, modified_files) - ) - if not metadata_changes_only: - selected_connectors_with_modified_files.append(connector_with_modified_files) - else: - if connector_with_modified_files.has_metadata_change: - selected_connectors_with_modified_files.append(connector_with_modified_files) - return selected_connectors_with_modified_files - - -# COMMANDS - - -@click.group(help="Commands related to connectors and connector acceptance tests.") -@click.option("--use-remote-secrets", default=True) # specific to connectors -@click.option( - "--name", - "names", - multiple=True, - help="Only test a specific connector. Use its technical name. e.g source-pokeapi.", - type=click.Choice([c.technical_name for c in ALL_CONNECTORS]), -) -@click.option("--language", "languages", multiple=True, help="Filter connectors to test by language.", type=click.Choice(ConnectorLanguage)) -@click.option( - "--support-level", - "support_levels", - multiple=True, - help="Filter connectors to test by support_level.", - type=click.Choice(SupportLevelEnum), -) -@click.option("--modified/--not-modified", help="Only test modified connectors in the current branch.", default=False, type=bool) -@click.option( - "--metadata-changes-only/--not-metadata-changes-only", - help="Only test connectors with modified metadata files in the current branch.", - default=False, - type=bool, -) -@click.option( - "--metadata-query", - help="Filter connectors by metadata query using `simpleeval`. e.g. 'data.ab_internal.ql == 200'", - type=str, -) -@click.option("--concurrency", help="Number of connector tests pipeline to run in parallel.", default=5, type=int) -@click.option( - "--execute-timeout", - help="The maximum time in seconds for the execution of a Dagger request before an ExecuteTimeoutError is raised. Passing None results in waiting forever.", - default=None, - type=int, -) -@click.option( - "--enable-dependency-scanning/--disable-dependency-scanning", - help="When enabled, the dependency scanning will be performed to detect the connectors to test according to a dependency change.", - default=False, - type=bool, -) -@click.option( - "--use-local-cdk", - is_flag=True, - help=("Build with the airbyte-cdk from the local repository. " "This is useful for testing changes to the CDK."), - default=False, - type=bool, -) -@click.option( - "--enable-report-auto-open/--disable-report-auto-open", - is_flag=True, - help=("When enabled, finishes by opening a browser window to display an HTML report."), - default=True, - type=bool, -) -@click.pass_context -def connectors( - ctx: click.Context, - use_remote_secrets: bool, - names: Tuple[str], - languages: Tuple[ConnectorLanguage], - support_levels: Tuple[str], - modified: bool, - metadata_changes_only: bool, - metadata_query: str, - concurrency: int, - execute_timeout: int, - enable_dependency_scanning: bool, - use_local_cdk: bool, - enable_report_auto_open: bool, -): - """Group all the connectors-ci command.""" - validate_environment(ctx.obj["is_local"], use_remote_secrets) - - ctx.ensure_object(dict) - ctx.obj["use_remote_secrets"] = use_remote_secrets - ctx.obj["concurrency"] = concurrency - ctx.obj["execute_timeout"] = execute_timeout - ctx.obj["use_local_cdk"] = use_local_cdk - ctx.obj["open_report_in_browser"] = enable_report_auto_open - ctx.obj["selected_connectors_with_modified_files"] = get_selected_connectors_with_modified_files( - names, - support_levels, - languages, - modified, - metadata_changes_only, - metadata_query, - ctx.obj["modified_files"], - enable_dependency_scanning, - ) - log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) - - -@connectors.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") -@click.option( - "--code-tests-only", - is_flag=True, - help=("Only execute code tests. " "Metadata checks, QA, and acceptance tests will be skipped."), - default=False, - type=bool, -) -@click.option( - "--fail-fast", - help="When enabled, tests will fail fast.", - default=False, - type=bool, - is_flag=True, -) -@click.option( - "--fast-tests-only", - help="When enabled, slow tests are skipped.", - default=False, - type=bool, - is_flag=True, -) -@click.pass_context -def test( - ctx: click.Context, - code_tests_only: bool, - fail_fast: bool, - fast_tests_only: bool, -) -> bool: - """Runs a test pipeline for the selected connectors. - - Args: - ctx (click.Context): The click context. - """ - if ctx.obj["is_ci"] and ctx.obj["pull_request"] and ctx.obj["pull_request"].draft: - main_logger.info("Skipping connectors tests for draft pull request.") - sys.exit(0) - - if ctx.obj["selected_connectors_with_modified_files"]: - update_global_commit_status_check_for_tests(ctx.obj, "pending") - else: - main_logger.warn("No connector were selected for testing.") - update_global_commit_status_check_for_tests(ctx.obj, "success") - return True - - connectors_tests_contexts = [ - ConnectorContext( - pipeline_name=f"Testing connector {connector.technical_name}", - connector=connector, - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - use_remote_secrets=ctx.obj["use_remote_secrets"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - pull_request=ctx.obj.get("pull_request"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - fail_fast=fail_fast, - fast_tests_only=fast_tests_only, - code_tests_only=code_tests_only, - use_local_cdk=ctx.obj.get("use_local_cdk"), - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - try: - anyio.run( - run_connectors_pipelines, - [connector_context for connector_context in connectors_tests_contexts], - run_connector_test_pipeline, - "Test Pipeline", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - ) - except Exception as e: - main_logger.error("An error occurred while running the test pipeline", exc_info=e) - update_global_commit_status_check_for_tests(ctx.obj, "failure") - return False - - @ctx.call_on_close - def send_commit_status_check() -> None: - if ctx.obj["is_ci"]: - global_success = all(connector_context.state is ContextState.SUCCESSFUL for connector_context in connectors_tests_contexts) - update_global_commit_status_check_for_tests(ctx.obj, "success" if global_success else "failure") - - # If we reach this point, it means that all the connectors have been tested so the pipeline did its job and can exit with success. - return True - - -@connectors.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") -@click.option( - "--use-host-gradle-dist-tar", - is_flag=True, - help="Use gradle distTar output from host for java connectors.", - default=False, - type=bool, -) -@click.pass_context -def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool: - """Runs a build pipeline for the selected connectors.""" - - connectors_contexts = [ - ConnectorContext( - pipeline_name=f"Build connector {connector.technical_name}", - connector=connector, - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - use_remote_secrets=ctx.obj["use_remote_secrets"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - use_local_cdk=ctx.obj.get("use_local_cdk"), - open_report_in_browser=ctx.obj.get("open_report_in_browser"), - use_host_gradle_dist_tar=use_host_gradle_dist_tar, - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - if use_host_gradle_dist_tar and not ctx.obj["is_local"]: - raise Exception("flag --use-host-gradle-dist-tar requires --is-local") - anyio.run( - run_connectors_pipelines, - connectors_contexts, - run_connector_build_pipeline, - "Build Pipeline", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - ) - - return True - - -@connectors.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") -@click.option("--pre-release/--main-release", help="Use this flag if you want to publish pre-release images.", default=True, type=bool) -@click.option( - "--spec-cache-gcs-credentials", - help="The service account key to upload files to the GCS bucket hosting spec cache.", - type=click.STRING, - required=True, - envvar="SPEC_CACHE_GCS_CREDENTIALS", -) -@click.option( - "--spec-cache-bucket-name", - help="The name of the GCS bucket where specs will be cached.", - type=click.STRING, - required=True, - envvar="SPEC_CACHE_BUCKET_NAME", -) -@click.option( - "--metadata-service-gcs-credentials", - help="The service account key to upload files to the GCS bucket hosting the metadata files.", - type=click.STRING, - required=True, - envvar="METADATA_SERVICE_GCS_CREDENTIALS", -) -@click.option( - "--metadata-service-bucket-name", - help="The name of the GCS bucket where metadata files will be uploaded.", - type=click.STRING, - required=True, - envvar="METADATA_SERVICE_BUCKET_NAME", -) -@click.option( - "--docker-hub-username", - help="Your username to connect to DockerHub.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_USERNAME", -) -@click.option( - "--docker-hub-password", - help="Your password to connect to DockerHub.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_PASSWORD", -) -@click.option( - "--slack-webhook", - help="The Slack webhook URL to send notifications to.", - type=click.STRING, - envvar="SLACK_WEBHOOK", -) -@click.option( - "--slack-channel", - help="The Slack webhook URL to send notifications to.", - type=click.STRING, - envvar="SLACK_CHANNEL", - default="#connector-publish-updates", -) -@click.pass_context -def publish( - ctx: click.Context, - pre_release: bool, - spec_cache_gcs_credentials: str, - spec_cache_bucket_name: str, - metadata_service_bucket_name: str, - metadata_service_gcs_credentials: str, - docker_hub_username: str, - docker_hub_password: str, - slack_webhook: str, - slack_channel: str, -): - ctx.obj["spec_cache_gcs_credentials"] = spec_cache_gcs_credentials - ctx.obj["spec_cache_bucket_name"] = spec_cache_bucket_name - ctx.obj["metadata_service_bucket_name"] = metadata_service_bucket_name - ctx.obj["metadata_service_gcs_credentials"] = metadata_service_gcs_credentials - if ctx.obj["is_local"]: - click.confirm( - "Publishing from a local environment is not recommended and requires to be logged in Airbyte's DockerHub registry, do you want to continue?", - abort=True, - ) - - publish_connector_contexts = reorder_contexts( - [ - PublishConnectorContext( - connector=connector, - pre_release=pre_release, - spec_cache_gcs_credentials=spec_cache_gcs_credentials, - spec_cache_bucket_name=spec_cache_bucket_name, - metadata_service_gcs_credentials=metadata_service_gcs_credentials, - metadata_bucket_name=metadata_service_bucket_name, - docker_hub_username=docker_hub_username, - docker_hub_password=docker_hub_password, - slack_webhook=slack_webhook, - reporting_slack_channel=slack_channel, - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - pull_request=ctx.obj.get("pull_request"), - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - ) - - main_logger.warn("Concurrency is forced to 1. For stability reasons we disable parallel publish pipelines.") - ctx.obj["concurrency"] = 1 - - publish_connector_contexts = anyio.run( - run_connectors_pipelines, - publish_connector_contexts, - run_connector_publish_pipeline, - "Publishing connectors", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - ) - return all(context.state is ContextState.SUCCESSFUL for context in publish_connector_contexts) - - -@connectors.command(cls=DaggerPipelineCommand, help="List all selected connectors.") -@click.pass_context -def list( - ctx: click.Context, -): - selected_connectors = sorted(ctx.obj["selected_connectors_with_modified_files"], key=lambda x: x.technical_name) - table = Table(title=f"{len(selected_connectors)} selected connectors") - table.add_column("Modified") - table.add_column("Connector") - table.add_column("Language") - table.add_column("Release stage") - table.add_column("Version") - table.add_column("Folder") - - for connector in selected_connectors: - modified = "X" if connector.modified_files else "" - connector_name = Text(connector.technical_name) - language = Text(connector.language.value) if connector.language else "N/A" - try: - support_level = Text(connector.support_level) - except Exception: - support_level = "N/A" - try: - version = Text(connector.version) - except Exception: - version = "N/A" - folder = Text(str(connector.code_directory)) - table.add_row(modified, connector_name, language, support_level, version, folder) - - console.print(table) - return True - - -@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") -@click.option("--set-if-not-exists", default=True) -@click.option( - "--docker-hub-username", - help="Your username to connect to DockerHub to read the registries.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_USERNAME", -) -@click.option( - "--docker-hub-password", - help="Your password to connect to DockerHub to read the registries.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_PASSWORD", -) -@click.pass_context -def upgrade_base_image(ctx: click.Context, set_if_not_exists: bool, docker_hub_username: str, docker_hub_password: str) -> bool: - """Upgrades the base image version used by the selected connectors.""" - - connectors_contexts = [ - ConnectorContext( - pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", - connector=connector, - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - use_remote_secrets=ctx.obj["use_remote_secrets"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - ci_git_user=ctx.obj["ci_git_user"], - ci_github_access_token=ctx.obj["ci_github_access_token"], - open_report_in_browser=False, - docker_hub_username=docker_hub_username, - docker_hub_password=docker_hub_password, - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - - anyio.run( - run_connectors_pipelines, - connectors_contexts, - run_connector_base_image_upgrade_pipeline, - "Upgrade base image pipeline", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - set_if_not_exists, - ) - - return True - - -@connectors.command(cls=DaggerPipelineCommand, help="Bump a connector version: update metadata.yaml and changelog.") -@click.argument("bump-type", type=click.Choice(["patch", "minor", "major"])) -@click.argument("pull-request-number", type=str) -@click.argument("changelog-entry", type=str) -@click.pass_context -def bump_version( - ctx: click.Context, - bump_type: str, - pull_request_number: str, - changelog_entry: str, -) -> bool: - """Bump a connector version: update metadata.yaml and changelog.""" - - connectors_contexts = [ - ConnectorContext( - pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", - connector=connector, - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - use_remote_secrets=ctx.obj["use_remote_secrets"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - ci_git_user=ctx.obj["ci_git_user"], - ci_github_access_token=ctx.obj["ci_github_access_token"], - open_report_in_browser=False, - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - - anyio.run( - run_connectors_pipelines, - connectors_contexts, - run_connector_version_bump_pipeline, - "Version bump pipeline pipeline", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - bump_type, - changelog_entry, - pull_request_number, - ) - - return True - - -@connectors.command( - cls=DaggerPipelineCommand, - help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", -) -@click.argument("pull-request-number", type=str) -@click.option( - "--docker-hub-username", - help="Your username to connect to DockerHub to read the registries.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_USERNAME", -) -@click.option( - "--docker-hub-password", - help="Your password to connect to DockerHub to read the registries.", - type=click.STRING, - required=True, - envvar="DOCKER_HUB_PASSWORD", -) -@click.pass_context -def migrate_to_base_image( - ctx: click.Context, - pull_request_number: str, - docker_hub_username: str, - docker_hub_password: str, -) -> bool: - """Bump a connector version: update metadata.yaml, changelog and delete legacy files.""" - - connectors_contexts = [ - ConnectorContext( - pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", - connector=connector, - is_local=ctx.obj["is_local"], - git_branch=ctx.obj["git_branch"], - git_revision=ctx.obj["git_revision"], - ci_report_bucket=ctx.obj["ci_report_bucket_name"], - report_output_prefix=ctx.obj["report_output_prefix"], - use_remote_secrets=ctx.obj["use_remote_secrets"], - gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), - dagger_logs_url=ctx.obj.get("dagger_logs_url"), - pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), - ci_context=ctx.obj.get("ci_context"), - ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], - ci_git_user=ctx.obj["ci_git_user"], - ci_github_access_token=ctx.obj["ci_github_access_token"], - open_report_in_browser=False, - docker_hub_username=docker_hub_username, - docker_hub_password=docker_hub_password, - ) - for connector in ctx.obj["selected_connectors_with_modified_files"] - ] - - anyio.run( - run_connectors_pipelines, - connectors_contexts, - run_connector_migration_to_base_image_pipeline, - "Migration to base image pipeline", - ctx.obj["concurrency"], - ctx.obj["dagger_logs_path"], - ctx.obj["execute_timeout"], - pull_request_number, - ) - - return True - - -def log_selected_connectors(selected_connectors_with_modified_files: List[ConnectorWithModifiedFiles]) -> None: - if selected_connectors_with_modified_files: - selected_connectors_names = [c.technical_name for c in selected_connectors_with_modified_files] - main_logger.info(f"Will run on the following {len(selected_connectors_names)} connectors: {', '.join(selected_connectors_names)}.") - else: - main_logger.info("No connectors to run.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 2785eb228e499..0b71859085f8d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -40,3 +40,4 @@ DOCKER_TMP_VOLUME_NAME = "shared-tmp" REPO = git.Repo(search_parent_directories=True) REPO_PATH = REPO.working_tree_dir +STATIC_REPORT_PREFIX = "airbyte-ci" diff --git a/airbyte-ci/connectors/pipelines/pipelines/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/contexts.py deleted file mode 100644 index 08f0366cb2ab9..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/contexts.py +++ /dev/null @@ -1,622 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""Module declaring context related classes.""" - -import logging -import os -from datetime import datetime -from enum import Enum -from glob import glob -from types import TracebackType -from typing import List, Optional - -import yaml -from anyio import Path -from asyncer import asyncify -from dagger import Client, Directory, File, Secret -from github import PullRequest -from pipelines import hacks -from pipelines.actions import secrets -from pipelines.bases import CIContext, ConnectorReport, ConnectorWithModifiedFiles, Report -from pipelines.github import update_commit_status_check -from pipelines.slack import send_message_to_webhook -from pipelines.utils import AIRBYTE_REPO_URL, METADATA_FILE_NAME, format_duration, sanitize_gcs_credentials - - -class ContextState(Enum): - """Enum to characterize the current context state, values are used for external representation on GitHub commit checks.""" - - INITIALIZED = {"github_state": "pending", "description": "Pipelines are being initialized..."} - RUNNING = {"github_state": "pending", "description": "Pipelines are running..."} - ERROR = {"github_state": "error", "description": "Something went wrong while running the Pipelines."} - SUCCESSFUL = {"github_state": "success", "description": "All Pipelines ran successfully."} - FAILURE = {"github_state": "failure", "description": "Pipeline failed."} - - -class PipelineContext: - """The pipeline context is used to store configuration for a specific pipeline run.""" - - PRODUCTION = bool(os.environ.get("PRODUCTION", False)) # Set this to True to enable production mode (e.g. to send PR comments) - - DEFAULT_EXCLUDED_FILES = ( - [".git", "airbyte-ci/connectors/pipelines/*"] - + glob("**/build", recursive=True) - + glob("**/.venv", recursive=True) - + glob("**/secrets", recursive=True) - + glob("**/__pycache__", recursive=True) - + glob("**/*.egg-info", recursive=True) - + glob("**/.vscode", recursive=True) - + glob("**/.pytest_cache", recursive=True) - + glob("**/.eggs", recursive=True) - + glob("**/.mypy_cache", recursive=True) - + glob("**/.DS_Store", recursive=True) - + glob("**/airbyte_ci_logs", recursive=True) - + glob("**/.gradle", recursive=True) - ) - - def __init__( - self, - pipeline_name: str, - is_local: bool, - git_branch: str, - git_revision: str, - gha_workflow_run_url: Optional[str] = None, - dagger_logs_url: Optional[str] = None, - pipeline_start_timestamp: Optional[int] = None, - ci_context: Optional[str] = None, - is_ci_optional: bool = False, - slack_webhook: Optional[str] = None, - reporting_slack_channel: Optional[str] = None, - pull_request: PullRequest = None, - ci_report_bucket: Optional[str] = None, - ci_gcs_credentials: Optional[str] = None, - ci_git_user: Optional[str] = None, - ci_github_access_token: Optional[str] = None, - open_report_in_browser: bool = True, - ): - """Initialize a pipeline context. - - Args: - pipeline_name (str): The pipeline name. - is_local (bool): Whether the context is for a local run or a CI run. - git_branch (str): The current git branch name. - git_revision (str): The current git revision, commit hash. - gha_workflow_run_url (Optional[str], optional): URL to the github action workflow run. Only valid for CI run. Defaults to None. - dagger_logs_url (Optional[str], optional): URL to the dagger logs. Only valid for CI run. Defaults to None. - pipeline_start_timestamp (Optional[int], optional): Timestamp at which the pipeline started. Defaults to None. - ci_context (Optional[str], optional): Pull requests, workflow dispatch or nightly build. Defaults to None. - is_ci_optional (bool, optional): Whether the CI is optional. Defaults to False. - slack_webhook (Optional[str], optional): Slack webhook to send messages to. Defaults to None. - reporting_slack_channel (Optional[str], optional): Slack channel to send messages to. Defaults to None. - pull_request (PullRequest, optional): The pull request object if the pipeline was triggered by a pull request. Defaults to None. - """ - self.pipeline_name = pipeline_name - self.is_local = is_local - self.git_branch = git_branch - self.git_revision = git_revision - self.gha_workflow_run_url = gha_workflow_run_url - self.dagger_logs_url = dagger_logs_url - self.pipeline_start_timestamp = pipeline_start_timestamp - self.created_at = datetime.utcnow() - self.ci_context = ci_context - self.state = ContextState.INITIALIZED - self.is_ci_optional = is_ci_optional - self.slack_webhook = slack_webhook - self.reporting_slack_channel = reporting_slack_channel - self.pull_request = pull_request - self.logger = logging.getLogger(self.pipeline_name) - self.dagger_client = None - self._report = None - self.dockerd_service = None - self.ci_gcs_credentials = sanitize_gcs_credentials(ci_gcs_credentials) if ci_gcs_credentials else None - self.ci_report_bucket = ci_report_bucket - self.ci_git_user = ci_git_user - self.ci_github_access_token = ci_github_access_token - self.started_at = None - self.stopped_at = None - self.secrets_to_mask = [] - self.open_report_in_browser = open_report_in_browser - update_commit_status_check(**self.github_commit_status) - - @property - def dagger_client(self) -> Client: # noqa D102 - return self._dagger_client - - @dagger_client.setter - def dagger_client(self, dagger_client: Client): # noqa D102 - self._dagger_client = dagger_client - - @property - def is_ci(self): # noqa D102 - return self.is_local is False - - @property - def is_pr(self): # noqa D102 - return self.ci_context == CIContext.PULL_REQUEST - - @property - def repo(self): # noqa D102 - return self.dagger_client.git(AIRBYTE_REPO_URL, keep_git_dir=True) - - @property - def report(self) -> Report: # noqa D102 - return self._report - - @report.setter - def report(self, report: Report): # noqa D102 - self._report = report - - @property - def ci_gcs_credentials_secret(self) -> Secret: - return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials) - - @property - def ci_github_access_token_secret(self) -> Secret: - return self.dagger_client.set_secret("ci_github_access_token", self.ci_github_access_token) - - @property - def github_commit_status(self) -> dict: - """Build a dictionary used as kwargs to the update_commit_status_check function.""" - return { - "sha": self.git_revision, - "state": self.state.value["github_state"], - "target_url": self.gha_workflow_run_url, - "description": self.state.value["description"], - "context": self.pipeline_name, - "should_send": self.is_pr, - "logger": self.logger, - "is_optional": self.is_ci_optional, - } - - @property - def should_send_slack_message(self) -> bool: - return self.slack_webhook is not None and self.reporting_slack_channel is not None - - @property - def has_dagger_cloud_token(self) -> bool: - return "_EXPERIMENTAL_DAGGER_CLOUD_TOKEN" in os.environ - - @property - def dagger_cloud_url(self) -> str: - """Gets the link to the Dagger Cloud runs page for the current commit.""" - if self.is_local or not self.has_dagger_cloud_token: - return None - - return f"https://alpha.dagger.cloud/changeByPipelines?filter=dagger.io/git.ref:{self.git_revision}" - - def get_repo_file(self, file_path: str) -> File: - """Get a file from the current repository. - - The file is extracted from the host file system. - - Args: - file_path (str): Path to the file to get. - - Returns: - Path: The selected repo file. - """ - return self.dagger_client.host().file(file_path) - - def get_repo_dir(self, subdir: str = ".", exclude: Optional[List[str]] = None, include: Optional[List[str]] = None) -> Directory: - """Get a directory from the current repository. - - The directory is extracted from the host file system. - A couple of files or directories that could corrupt builds are exclude by default (check DEFAULT_EXCLUDED_FILES). - - Args: - subdir (str, optional): Path to the subdirectory to get. Defaults to "." to get the full repository. - exclude ([List[str], optional): List of files or directories to exclude from the directory. Defaults to None. - include ([List[str], optional): List of files or directories to include in the directory. Defaults to None. - - Returns: - Directory: The selected repo directory. - """ - if exclude is None: - exclude = self.DEFAULT_EXCLUDED_FILES - else: - exclude += self.DEFAULT_EXCLUDED_FILES - exclude = list(set(exclude)) - exclude.sort() # sort to make sure the order is always the same to not burst the cache. Casting exclude to set can change the order - if subdir != ".": - subdir = f"{subdir}/" if not subdir.endswith("/") else subdir - exclude = [f.replace(subdir, "") for f in exclude if subdir in f] - return self.dagger_client.host().directory(subdir, exclude=exclude, include=include) - - def create_slack_message(self) -> str: - raise NotImplementedError() - - async def __aenter__(self): - """Perform setup operation for the PipelineContext. - - Updates the current commit status on Github. - - Raises: - Exception: An error is raised when the context was not initialized with a Dagger client - Returns: - PipelineContext: A running instance of the PipelineContext. - """ - if self.dagger_client is None: - raise Exception("A Pipeline can't be entered with an undefined dagger_client") - self.state = ContextState.RUNNING - self.started_at = datetime.utcnow() - self.logger.info("Caching the latest CDK version...") - await hacks.cache_latest_cdk(self.dagger_client) - await asyncify(update_commit_status_check)(**self.github_commit_status) - if self.should_send_slack_message: - await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) - return self - - @staticmethod - def determine_final_state(report: Optional[Report], exception_value: Optional[BaseException]) -> ContextState: - """Determine the final state of the context from the report or the exception value. - - Args: - report (Optional[Report]): The pipeline report if any. - exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. - Returns: - ContextState: The final state of the context. - """ - if exception_value is not None or report is None: - return ContextState.ERROR - if report is not None and report.failed_steps: - return ContextState.FAILURE - if report is not None and report.success: - return ContextState.SUCCESSFUL - raise Exception( - f"The final state of the context could not be determined for the report and exception value provided. Report: {report}, Exception: {exception_value}" - ) - - async def __aexit__( - self, exception_type: Optional[type[BaseException]], exception_value: Optional[BaseException], traceback: Optional[TracebackType] - ) -> bool: - """Perform teardown operation for the PipelineContext. - - On the context exit the following operations will happen: - - Log the error value if an error was handled. - - Log the test report. - - Update the commit status check on GitHub if running in a CI environment. - - It should gracefully handle all the execution errors that happened and always upload a test report and update commit status check. - - Args: - exception_type (Optional[type[BaseException]]): The exception type if an exception was raised in the context execution, None otherwise. - exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. - traceback (Optional[TracebackType]): The traceback if an exception was raised in the context execution, None otherwise. - Returns: - bool: Whether the teardown operation ran successfully. - """ - self.state = self.determine_final_state(self.report, exception_value) - self.stopped_at = datetime.utcnow() - - if exception_value: - self.logger.error("An error was handled by the Pipeline", exc_info=True) - if self.report is None: - self.logger.error("No test report was provided. This is probably due to an upstream error") - self.report = Report(self, steps_results=[]) - - self.report.print() - - await asyncify(update_commit_status_check)(**self.github_commit_status) - if self.should_send_slack_message: - await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) - # supress the exception if it was handled - return True - - -class ConnectorContext(PipelineContext): - """The connector context is used to store configuration for a specific connector pipeline run.""" - - DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE = "airbyte/connector-acceptance-test:dev" - - def __init__( - self, - pipeline_name: str, - connector: ConnectorWithModifiedFiles, - is_local: bool, - git_branch: bool, - git_revision: bool, - report_output_prefix: str, - use_remote_secrets: bool = True, - ci_report_bucket: Optional[str] = None, - ci_gcs_credentials: Optional[str] = None, - ci_git_user: Optional[str] = None, - ci_github_access_token: Optional[str] = None, - connector_acceptance_test_image: Optional[str] = DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE, - gha_workflow_run_url: Optional[str] = None, - dagger_logs_url: Optional[str] = None, - pipeline_start_timestamp: Optional[int] = None, - ci_context: Optional[str] = None, - slack_webhook: Optional[str] = None, - reporting_slack_channel: Optional[str] = None, - pull_request: PullRequest = None, - should_save_report: bool = True, - fail_fast: bool = False, - fast_tests_only: bool = False, - code_tests_only: bool = False, - use_local_cdk: bool = False, - use_host_gradle_dist_tar: bool = False, - open_report_in_browser: bool = True, - docker_hub_username: Optional[str] = None, - docker_hub_password: Optional[str] = None, - ): - """Initialize a connector context. - - Args: - connector (Connector): The connector under test. - is_local (bool): Whether the context is for a local run or a CI run. - git_branch (str): The current git branch name. - git_revision (str): The current git revision, commit hash. - report_output_prefix (str): The S3 key to upload the test report to. - use_remote_secrets (bool, optional): Whether to download secrets for GSM or use the local secrets. Defaults to True. - connector_acceptance_test_image (Optional[str], optional): The image to use to run connector acceptance tests. Defaults to DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE. - gha_workflow_run_url (Optional[str], optional): URL to the github action workflow run. Only valid for CI run. Defaults to None. - dagger_logs_url (Optional[str], optional): URL to the dagger logs. Only valid for CI run. Defaults to None. - pipeline_start_timestamp (Optional[int], optional): Timestamp at which the pipeline started. Defaults to None. - ci_context (Optional[str], optional): Pull requests, workflow dispatch or nightly build. Defaults to None. - slack_webhook (Optional[str], optional): The slack webhook to send messages to. Defaults to None. - reporting_slack_channel (Optional[str], optional): The slack channel to send messages to. Defaults to None. - pull_request (PullRequest, optional): The pull request object if the pipeline was triggered by a pull request. Defaults to None. - fail_fast (bool, optional): Whether to fail fast. Defaults to False. - fast_tests_only (bool, optional): Whether to run only fast tests. Defaults to False. - code_tests_only (bool, optional): Whether to ignore non-code tests like QA and metadata checks. Defaults to False. - use_host_gradle_dist_tar (bool, optional): Used when developing java connectors with gradle. Defaults to False. - open_report_in_browser (bool, optional): Open HTML report in browser window. Defaults to True. - docker_hub_username (Optional[str], optional): Docker Hub username to use to read registries. Defaults to None. - docker_hub_password (Optional[str], optional): Docker Hub password to use to read registries. Defaults to None. - """ - - self.pipeline_name = pipeline_name - self.connector = connector - self.use_remote_secrets = use_remote_secrets - self.connector_acceptance_test_image = connector_acceptance_test_image - self.report_output_prefix = report_output_prefix - self._secrets_dir = None - self._updated_secrets_dir = None - self.cdk_version = None - self.should_save_report = should_save_report - self.fail_fast = fail_fast - self.fast_tests_only = fast_tests_only - self.code_tests_only = code_tests_only - self.use_local_cdk = use_local_cdk - self.use_host_gradle_dist_tar = use_host_gradle_dist_tar - self.open_report_in_browser = open_report_in_browser - self.docker_hub_username = docker_hub_username - self.docker_hub_password = docker_hub_password - - super().__init__( - pipeline_name=pipeline_name, - is_local=is_local, - git_branch=git_branch, - git_revision=git_revision, - gha_workflow_run_url=gha_workflow_run_url, - dagger_logs_url=dagger_logs_url, - pipeline_start_timestamp=pipeline_start_timestamp, - ci_context=ci_context, - slack_webhook=slack_webhook, - reporting_slack_channel=reporting_slack_channel, - pull_request=pull_request, - ci_report_bucket=ci_report_bucket, - ci_gcs_credentials=ci_gcs_credentials, - ci_git_user=ci_git_user, - ci_github_access_token=ci_github_access_token, - open_report_in_browser=open_report_in_browser, - ) - - @property - def modified_files(self): - return self.connector.modified_files - - @property - def secrets_dir(self) -> Directory: # noqa D102 - return self._secrets_dir - - @secrets_dir.setter - def secrets_dir(self, secrets_dir: Directory): # noqa D102 - self._secrets_dir = secrets_dir - - @property - def updated_secrets_dir(self) -> Directory: # noqa D102 - return self._updated_secrets_dir - - @updated_secrets_dir.setter - def updated_secrets_dir(self, updated_secrets_dir: Directory): # noqa D102 - self._updated_secrets_dir = updated_secrets_dir - - @property - def connector_acceptance_test_source_dir(self) -> Directory: # noqa D102 - return self.get_repo_dir("airbyte-integrations/bases/connector-acceptance-test") - - @property - def should_save_updated_secrets(self) -> bool: # noqa D102 - return self.use_remote_secrets and self.updated_secrets_dir is not None - - @property - def host_image_export_dir_path(self) -> str: - return "." if self.is_ci else "/tmp" - - @property - def metadata_path(self) -> Path: - return self.connector.code_directory / METADATA_FILE_NAME - - @property - def metadata(self) -> dict: - return yaml.safe_load(self.metadata_path.read_text())["data"] - - @property - def docker_repository(self) -> str: - return self.metadata["dockerRepository"] - - @property - def docker_image_tag(self) -> str: - return self.metadata["dockerImageTag"] - - @property - def docker_image(self) -> str: - return f"{self.docker_repository}:{self.docker_image_tag}" - - async def get_connector_dir(self, exclude=None, include=None) -> Directory: - """Get the connector under test source code directory. - - Args: - exclude ([List[str], optional): List of files or directories to exclude from the directory. Defaults to None. - include ([List[str], optional): List of files or directories to include in the directory. Defaults to None. - - Returns: - Directory: The connector under test source code directory. - """ - vanilla_connector_dir = self.get_repo_dir(str(self.connector.code_directory), exclude=exclude, include=include) - return await vanilla_connector_dir.with_timestamps(1) - - async def __aexit__( - self, exception_type: Optional[type[BaseException]], exception_value: Optional[BaseException], traceback: Optional[TracebackType] - ) -> bool: - """Perform teardown operation for the ConnectorContext. - - On the context exit the following operations will happen: - - Upload updated connector secrets back to Google Secret Manager - - Write a test report in JSON format locally and to S3 if running in a CI environment - - Update the commit status check on GitHub if running in a CI environment. - It should gracefully handle the execution error that happens and always upload a test report and update commit status check. - Args: - exception_type (Optional[type[BaseException]]): The exception type if an exception was raised in the context execution, None otherwise. - exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. - traceback (Optional[TracebackType]): The traceback if an exception was raised in the context execution, None otherwise. - Returns: - bool: Whether the teardown operation ran successfully. - """ - self.stopped_at = datetime.utcnow() - self.state = self.determine_final_state(self.report, exception_value) - if exception_value: - self.logger.error("An error got handled by the ConnectorContext", exc_info=True) - if self.report is None: - self.logger.error("No test report was provided. This is probably due to an upstream error") - self.report = ConnectorReport(self, []) - - if self.should_save_updated_secrets: - await secrets.upload(self) - - self.report.print() - - if self.should_save_report: - await self.report.save() - - if self.report.should_be_commented_on_pr: - self.report.post_comment_on_pr() - - await asyncify(update_commit_status_check)(**self.github_commit_status) - - if self.should_send_slack_message: - await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) - - # Supress the exception if any - return True - - def create_slack_message(self) -> str: - raise NotImplementedError - - -class PublishConnectorContext(ConnectorContext): - def __init__( - self, - connector: ConnectorWithModifiedFiles, - pre_release: bool, - spec_cache_gcs_credentials: str, - spec_cache_bucket_name: str, - metadata_service_gcs_credentials: str, - metadata_bucket_name: str, - docker_hub_username: str, - docker_hub_password: str, - slack_webhook: str, - reporting_slack_channel: str, - ci_report_bucket: str, - report_output_prefix: str, - is_local: bool, - git_branch: bool, - git_revision: bool, - gha_workflow_run_url: Optional[str] = None, - dagger_logs_url: Optional[str] = None, - pipeline_start_timestamp: Optional[int] = None, - ci_context: Optional[str] = None, - ci_gcs_credentials: str = None, - pull_request: PullRequest = None, - ): - self.pre_release = pre_release - self.spec_cache_bucket_name = spec_cache_bucket_name - self.metadata_bucket_name = metadata_bucket_name - self.spec_cache_gcs_credentials = sanitize_gcs_credentials(spec_cache_gcs_credentials) - self.metadata_service_gcs_credentials = sanitize_gcs_credentials(metadata_service_gcs_credentials) - pipeline_name = f"Publish {connector.technical_name}" - pipeline_name = pipeline_name + " (pre-release)" if pre_release else pipeline_name - - super().__init__( - pipeline_name=pipeline_name, - connector=connector, - report_output_prefix=report_output_prefix, - ci_report_bucket=ci_report_bucket, - is_local=is_local, - git_branch=git_branch, - git_revision=git_revision, - gha_workflow_run_url=gha_workflow_run_url, - dagger_logs_url=dagger_logs_url, - pipeline_start_timestamp=pipeline_start_timestamp, - ci_context=ci_context, - slack_webhook=slack_webhook, - reporting_slack_channel=reporting_slack_channel, - ci_gcs_credentials=ci_gcs_credentials, - should_save_report=True, - docker_hub_username=docker_hub_username, - docker_hub_password=docker_hub_password, - ) - - @property - def docker_hub_username_secret(self) -> Secret: - return self.dagger_client.set_secret("docker_hub_username", self.docker_hub_username) - - @property - def docker_hub_password_secret(self) -> Secret: - return self.dagger_client.set_secret("docker_hub_password", self.docker_hub_password) - - @property - def metadata_service_gcs_credentials_secret(self) -> Secret: - return self.dagger_client.set_secret("metadata_service_gcs_credentials", self.metadata_service_gcs_credentials) - - @property - def spec_cache_gcs_credentials_secret(self) -> Secret: - return self.dagger_client.set_secret("spec_cache_gcs_credentials", self.spec_cache_gcs_credentials) - - @property - def docker_image_tag(self): - # get the docker image tag from the parent class - metadata_tag = super().docker_image_tag - if self.pre_release: - return f"{metadata_tag}-dev.{self.git_revision[:10]}" - else: - return metadata_tag - - def create_slack_message(self) -> str: - docker_hub_url = f"https://hub.docker.com/r/{self.connector.metadata['dockerRepository']}/tags" - message = f"*Publish <{docker_hub_url}|{self.docker_image}>*\n" - if self.is_ci: - message += f"🤖 <{self.gha_workflow_run_url}|GitHub Action workflow>\n" - else: - message += "🧑‍💻 Local run\n" - message += f"*Connector:* {self.connector.technical_name}\n" - message += f"*Version:* {self.connector.version}\n" - branch_url = f"https://github.com/airbytehq/airbyte/tree/{self.git_branch}" - message += f"*Branch:* <{branch_url}|{self.git_branch}>\n" - commit_url = f"https://github.com/airbytehq/airbyte/commit/{self.git_revision}" - message += f"*Commit:* <{commit_url}|{self.git_revision[:10]}>\n" - if self.state in [ContextState.INITIALIZED, ContextState.RUNNING]: - message += "🟠" - if self.state is ContextState.SUCCESSFUL: - message += "🟢" - if self.state in [ContextState.FAILURE, ContextState.ERROR]: - message += "🔴" - message += f" {self.state.value['description']}\n" - if self.state is ContextState.SUCCESSFUL: - message += f"⏲️ Run duration: {format_duration(self.report.run_duration)}\n" - if self.state is ContextState.FAILURE: - message += "\ncc. " # @dev-connector-ops - return message diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/commands/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/commands/groups/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/actions/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py new file mode 100644 index 0000000000000..756d5f5c518a5 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py @@ -0,0 +1,48 @@ +from pipelines.pipeline.connectors.context import ConnectorContext + + +from dagger import Container +from dagger.engine._version import CLI_VERSION as dagger_engine_version + + +import importlib.util + + +async def finalize_build(context: ConnectorContext, connector_container: Container) -> Container: + """Finalize build by adding dagger engine version label and running finalize_build.sh or finalize_build.py if present in the connector directory.""" + connector_container = connector_container.with_label("io.dagger.engine_version", dagger_engine_version) + connector_dir_with_finalize_script = await context.get_connector_dir(include=["finalize_build.sh", "finalize_build.py"]) + finalize_scripts = await connector_dir_with_finalize_script.entries() + if not finalize_scripts: + return connector_container + + # We don't want finalize scripts to override the entrypoint so we keep it in memory to reset it after finalization + original_entrypoint = await connector_container.entrypoint() + + has_finalize_bash_script = "finalize_build.sh" in finalize_scripts + has_finalize_python_script = "finalize_build.py" in finalize_scripts + if has_finalize_python_script and has_finalize_bash_script: + raise Exception("Connector has both finalize_build.sh and finalize_build.py, please remove one of them") + + if has_finalize_python_script: + context.logger.info(f"{context.connector.technical_name} has a finalize_build.py script, running it to finalize build...") + module_path = context.connector.code_directory / "finalize_build.py" + connector_finalize_module_spec = importlib.util.spec_from_file_location( + f"{context.connector.code_directory.name}_finalize", module_path + ) + connector_finalize_module = importlib.util.module_from_spec(connector_finalize_module_spec) + connector_finalize_module_spec.loader.exec_module(connector_finalize_module) + try: + connector_container = await connector_finalize_module.finalize_build(context, connector_container) + except AttributeError: + raise Exception("Connector has a finalize_build.py script but it doesn't have a finalize_build function.") + + if has_finalize_bash_script: + context.logger.info(f"{context.connector.technical_name} has finalize_build.sh script, running it to finalize build...") + connector_container = ( + connector_container.with_file("/tmp/finalize_build.sh", connector_dir_with_finalize_script.file("finalize_build.sh")) + .with_entrypoint("sh") + .with_exec(["/tmp/finalize_build.sh"]) + ) + + return connector_container.with_entrypoint(original_entrypoint) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py new file mode 100644 index 0000000000000..ccc79acfb96d6 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py @@ -0,0 +1,80 @@ +from pipelines.pipeline.connectors.context import ConnectorContext + + +from dagger import Container, Platform +BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { + "destination-clickhouse": { + "dockerfile": "clickhouse.Dockerfile", + "dbt_adapter": "dbt-clickhouse>=1.4.0", + "integration_name": "clickhouse", + "normalization_image": "airbyte/normalization-clickhouse:0.4.3", + "supports_in_connector_normalization": False, + "yum_packages": [], + }, + "destination-duckdb": { + "dockerfile": "duckdb.Dockerfile", + "dbt_adapter": "dbt-duckdb==1.0.1", + "integration_name": "duckdb", + "normalization_image": "airbyte/normalization-duckdb:0.4.3", + "supports_in_connector_normalization": False, + "yum_packages": [], + }, + "destination-mssql": { + "dockerfile": "mssql.Dockerfile", + "dbt_adapter": "dbt-sqlserver==1.0.0", + "integration_name": "mssql", + "normalization_image": "airbyte/normalization-mssql:0.4.3", + "supports_in_connector_normalization": True, + "yum_packages": [], + }, + "destination-mysql": { + "dockerfile": "mysql.Dockerfile", + "dbt_adapter": "dbt-mysql==1.0.0", + "integration_name": "mysql", + "normalization_image": "airbyte/normalization-mysql:0.4.3", + "supports_in_connector_normalization": False, + "yum_packages": [], + }, + "destination-oracle": { + "dockerfile": "oracle.Dockerfile", + "dbt_adapter": "dbt-oracle==0.4.3", + "integration_name": "oracle", + "normalization_image": "airbyte/normalization-oracle:0.4.3", + "supports_in_connector_normalization": False, + "yum_packages": [], + }, + "destination-postgres": { + "dockerfile": "Dockerfile", + "dbt_adapter": "dbt-postgres==1.0.0", + "integration_name": "postgres", + "normalization_image": "airbyte/normalization:0.4.3", + "supports_in_connector_normalization": False, + "yum_packages": [], + }, + "destination-redshift": { + "dockerfile": "redshift.Dockerfile", + "dbt_adapter": "dbt-redshift==1.0.0", + "integration_name": "redshift", + "normalization_image": "airbyte/normalization-redshift:0.4.3", + "supports_in_connector_normalization": True, + "yum_packages": [], + }, + "destination-tidb": { + "dockerfile": "tidb.Dockerfile", + "dbt_adapter": "dbt-tidb==1.0.1", + "integration_name": "tidb", + "normalization_image": "airbyte/normalization-tidb:0.4.3", + "supports_in_connector_normalization": True, + "yum_packages": [], + }, +} +DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { + **BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, + **{f"{k}-strict-encrypt": v for k, v in BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION.items()}, +} + + +def with_normalization(context: ConnectorContext, build_platform: Platform) -> Container: + return context.dagger_client.container(platform=build_platform).from_( + DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["normalization_image"] + ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py new file mode 100644 index 0000000000000..3aa661f20bbe2 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py @@ -0,0 +1,37 @@ +from pipelines.dagger.actions.python.pipx import with_installed_pipx_package +from pipelines.dagger.containers.python import with_python_base +from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS +from pipelines.pipeline.connectors.context import PipelineContext + + +from dagger import Container, Secret + + +async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: + """Install the ci_credentials package in a python environment. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. + gsm_secret (Secret): The secret holding GCP_GSM_CREDENTIALS env variable value. + + Returns: + Container: A python environment with the ci_credentials package installed. + """ + python_base_environment: Container = with_python_base(context) + ci_credentials = await with_installed_pipx_package(context, python_base_environment, INTERNAL_TOOL_PATHS.CI_CREDENTIALS.value) + ci_credentials = ci_credentials.with_env_variable("VERSION", "dagger_ci") + return ci_credentials.with_secret_variable("GCP_GSM_CREDENTIALS", gsm_secret).with_workdir("/") + + +async def with_connector_ops(context: PipelineContext) -> Container: + """Installs the connector_ops package in a Container running Python > 3.10 with git.. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the ci_connector_sources sources will be pulled. + + Returns: + Container: A python environment container with connector_ops installed. + """ + python_base_environment: Container = with_python_base(context) + + return await with_installed_pipx_package(context, python_base_environment, INTERNAL_TOOL_PATHS.CONNECTOR_OPS.value) diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/connector_changes/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py new file mode 100644 index 0000000000000..bf870f4d3275e --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -0,0 +1,267 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from pathlib import Path +import re +from pipelines.dagger.actions.python.poetry import _install_python_dependencies_from_poetry +from pipelines.dagger.containers.python import with_python_base, with_testing_dependencies +from pipelines.helpers.utils import check_path_in_workdir, get_file_contents +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext + + +from dagger import Container, Directory + + +from typing import List, Optional + + +def with_python_package( + context: PipelineContext, + python_environment: Container, + package_source_code_path: str, + exclude: Optional[List] = None, + include: Optional[List] = None, +) -> Container: + """Load a python package source code to a python environment container. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. + python_environment (Container): An existing python environment in which the package will be installed. + package_source_code_path (str): The local path to the package source code. + additional_dependency_groups (Optional[List]): extra_requires dependency of setup.py to install. Defaults to None. + exclude (Optional[List]): A list of file or directory to exclude from the python package source code. + + Returns: + Container: A python environment container with the python package source code. + """ + package_source_code_directory: Directory = context.get_repo_dir(package_source_code_path, exclude=exclude, include=include) + work_dir_path = f"/{package_source_code_path}" + container = python_environment.with_mounted_directory(work_dir_path, package_source_code_directory).with_workdir(work_dir_path) + return container + + +async def find_local_dependencies_in_setup_py(python_package: Container) -> List[str]: + """Find local dependencies of a python package in its setup.py file. + + Args: + python_package (Container): A python package container. + + Returns: + List[str]: Paths to the local dependencies relative to the airbyte repo. + """ + setup_file_content = await get_file_contents(python_package, "setup.py") + if not setup_file_content: + return [] + + local_setup_dependency_paths = [] + with_egg_info = python_package.with_exec(["python", "setup.py", "egg_info"]) + egg_info_output = await with_egg_info.stdout() + dependency_in_requires_txt = [] + for line in egg_info_output.split("\n"): + if line.startswith("writing requirements to"): + # Find the path to the requirements.txt file that was generated by calling egg_info + requires_txt_path = line.replace("writing requirements to", "").strip() + requirements_txt_content = await with_egg_info.file(requires_txt_path).contents() + dependency_in_requires_txt = requirements_txt_content.split("\n") + + for dependency_line in dependency_in_requires_txt: + if "file://" in dependency_line: + match = re.search(r"file:///(.+)", dependency_line) + if match: + local_setup_dependency_paths.append([match.group(1)][0]) + return local_setup_dependency_paths + + +async def find_local_dependencies_in_requirements_txt(python_package: Container, package_source_code_path: str) -> List[str]: + """Find local dependencies of a python package in a requirements.txt file. + + Args: + python_package (Container): A python environment container with the python package source code. + package_source_code_path (str): The local path to the python package source code. + + Returns: + List[str]: Paths to the local dependencies relative to the airbyte repo. + """ + requirements_txt_content = await get_file_contents(python_package, "requirements.txt") + if not requirements_txt_content: + return [] + + local_requirements_dependency_paths = [] + for line in requirements_txt_content.split("\n"): + # Some package declare themselves as a requirement in requirements.txt, + # #Without line != "-e ." the package will be considered a dependency of itself which can cause an infinite loop + if line.startswith("-e .") and line != "-e .": + local_dependency_path = Path(line[3:]) + package_source_code_path = Path(package_source_code_path) + local_dependency_path = str((package_source_code_path / local_dependency_path).resolve().relative_to(Path.cwd())) + local_requirements_dependency_paths.append(local_dependency_path) + return local_requirements_dependency_paths + + +async def find_local_python_dependencies( + context: PipelineContext, + package_source_code_path: str, + search_dependencies_in_setup_py: bool = True, + search_dependencies_in_requirements_txt: bool = True, +) -> List[str]: + """Find local python dependencies of a python package. The dependencies are found in the setup.py and requirements.txt files. + + Args: + context (PipelineContext): The current pipeline context, providing a dagger client and a repository directory. + package_source_code_path (str): The local path to the python package source code. + search_dependencies_in_setup_py (bool, optional): Whether to search for local dependencies in the setup.py file. Defaults to True. + search_dependencies_in_requirements_txt (bool, optional): Whether to search for local dependencies in the requirements.txt file. Defaults to True. + + Returns: + List[str]: Paths to the local dependencies relative to the airbyte repo. + """ + python_environment = with_python_base(context) + container = with_python_package(context, python_environment, package_source_code_path) + + local_dependency_paths = [] + if search_dependencies_in_setup_py: + local_dependency_paths += await find_local_dependencies_in_setup_py(container) + if search_dependencies_in_requirements_txt: + local_dependency_paths += await find_local_dependencies_in_requirements_txt(container, package_source_code_path) + + transitive_dependency_paths = [] + for local_dependency_path in local_dependency_paths: + # Transitive local dependencies installation is achieved by calling their setup.py file, not their requirements.txt file. + transitive_dependency_paths += await find_local_python_dependencies(context, local_dependency_path, True, False) + + all_dependency_paths = local_dependency_paths + transitive_dependency_paths + if all_dependency_paths: + context.logger.debug(f"Found local dependencies for {package_source_code_path}: {all_dependency_paths}") + return all_dependency_paths + + +def _install_python_dependencies_from_setup_py( + container: Container, + additional_dependency_groups: Optional[List] = None, +) -> Container: + install_connector_package_cmd = ["pip", "install", "."] + container = container.with_exec(install_connector_package_cmd) + + if additional_dependency_groups: + # e.g. .[dev,tests] + group_string = f".[{','.join(additional_dependency_groups)}]" + group_install_cmd = ["pip", "install", group_string] + + container = container.with_exec(group_install_cmd) + + return container + + +def _install_python_dependencies_from_requirements_txt(container: Container) -> Container: + install_requirements_cmd = ["pip", "install", "-r", "requirements.txt"] + return container.with_exec(install_requirements_cmd) + + +async def with_installed_python_package( + context: PipelineContext, + python_environment: Container, + package_source_code_path: str, + additional_dependency_groups: Optional[List] = None, + exclude: Optional[List] = None, + include: Optional[List] = None, +) -> Container: + """Install a python package in a python environment container. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. + python_environment (Container): An existing python environment in which the package will be installed. + package_source_code_path (str): The local path to the package source code. + additional_dependency_groups (Optional[List]): extra_requires dependency of setup.py to install. Defaults to None. + exclude (Optional[List]): A list of file or directory to exclude from the python package source code. + + Returns: + Container: A python environment container with the python package installed. + """ + container = with_python_package(context, python_environment, package_source_code_path, exclude=exclude, include=include) + + local_dependencies = await find_local_python_dependencies(context, package_source_code_path) + + for dependency_directory in local_dependencies: + container = container.with_mounted_directory("/" + dependency_directory, context.get_repo_dir(dependency_directory)) + + has_setup_py = await check_path_in_workdir(container, "setup.py") + has_requirements_txt = await check_path_in_workdir(container, "requirements.txt") + has_pyproject_toml = await check_path_in_workdir(container, "pyproject.toml") + + if has_pyproject_toml: + container = _install_python_dependencies_from_poetry(container, additional_dependency_groups) + elif has_setup_py: + container = _install_python_dependencies_from_setup_py(container, additional_dependency_groups) + elif has_requirements_txt: + container = _install_python_dependencies_from_requirements_txt(container) + + return container + + +def with_python_connector_source(context: ConnectorContext) -> Container: + """Load an airbyte connector source code in a testing environment. + + Args: + context (ConnectorContext): The current test context, providing the repository directory from which the connector sources will be pulled. + Returns: + Container: A python environment container (with the connector source code). + """ + connector_source_path = str(context.connector.code_directory) + testing_environment: Container = with_testing_dependencies(context) + + return with_python_package(context, testing_environment, connector_source_path) + + +async def apply_python_development_overrides(context: ConnectorContext, connector_container: Container) -> Container: + # Run the connector using the local cdk if flag is set + if context.use_local_cdk: + context.logger.info("Using local CDK") + # mount the local cdk + path_to_cdk = "airbyte-cdk/python/" + directory_to_mount = context.get_repo_dir(path_to_cdk) + + context.logger.info(f"Mounting CDK from {directory_to_mount}") + + # Install the airbyte-cdk package from the local directory + # We use --no-deps to avoid conflicts with the airbyte-cdk version required by the connector + connector_container = connector_container.with_mounted_directory(f"/{path_to_cdk}", directory_to_mount).with_exec( + ["pip", "install", "--no-deps", f"/{path_to_cdk}"], skip_entrypoint=True + ) + + return connector_container + + +async def with_python_connector_installed( + context: PipelineContext, + python_container: Container, + connector_source_path: str, + additional_dependency_groups: Optional[List] = None, + exclude: Optional[List] = None, + include: Optional[List] = None, +) -> Container: + """Install an airbyte python connectors dependencies.""" + container = await with_installed_python_package( + context, + python_container, + connector_source_path, + additional_dependency_groups=additional_dependency_groups, + exclude=exclude, + include=include, + ) + + container = await apply_python_development_overrides(context, container) + + return container + + +def with_pip_packages(base_container: Container, packages_to_install: List[str]) -> Container: + """Installs packages using pip + Args: + context (Container): A container with python installed + + Returns: + Container: A container with the pip packages installed. + + """ + package_install_command = ["pip", "install"] + return base_container.with_exec(package_install_command + packages_to_install) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py new file mode 100644 index 0000000000000..21cbf0751a62d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py @@ -0,0 +1,52 @@ +from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package +from pipelines.dagger.actions.python.poetry import find_local_dependencies_in_pyproject_toml +from pipelines.pipeline.connectors.context import PipelineContext + + +from dagger import Container + + +from typing import List, Optional + + +def with_pipx(base_python_container: Container) -> Container: + """Installs pipx in a python container. + + Args: + base_python_container (Container): The container to install pipx on. + + Returns: + Container: A python environment with pipx installed. + """ + python_with_pipx = with_pip_packages(base_python_container, ["pipx"]).with_env_variable("PIPX_BIN_DIR", "/usr/local/bin") + + return python_with_pipx + + +async def with_installed_pipx_package( + context: PipelineContext, + python_environment: Container, + package_source_code_path: str, + exclude: Optional[List] = None, +) -> Container: + """Install a python package in a python environment container using pipx. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the python sources will be pulled. + python_environment (Container): An existing python environment in which the package will be installed. + package_source_code_path (str): The local path to the package source code. + exclude (Optional[List]): A list of file or directory to exclude from the python package source code. + + Returns: + Container: A python environment container with the python package installed. + """ + pipx_python_environment = with_pipx(python_environment) + container = with_python_package(context, pipx_python_environment, package_source_code_path, exclude=exclude) + + local_dependencies = await find_local_dependencies_in_pyproject_toml(context, container, package_source_code_path, exclude=exclude) + for dependency_directory in local_dependencies: + container = container.with_mounted_directory("/" + dependency_directory, context.get_repo_dir(dependency_directory)) + + container = container.with_exec(["pipx", "install", f"/{package_source_code_path}"]) + + return container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py new file mode 100644 index 0000000000000..35838b1937d71 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -0,0 +1,104 @@ +import uuid +from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package +from pipelines.dagger.actions.system.common import with_debian_packages +from pipelines.dagger.containers.python import with_python_base +from pipelines.helpers.utils import get_file_contents +from pipelines.pipeline.connectors.context import PipelineContext + + +import toml +from dagger import Container, Directory + + +from pathlib import Path +from typing import List, Optional + + +async def find_local_dependencies_in_pyproject_toml( + context: PipelineContext, + base_container: Container, + pyproject_file_path: str, + exclude: Optional[List] = None, +) -> list: + """Find local dependencies of a python package in a pyproject.toml file. + + Args: + python_package (Container): A python environment container with the python package source code. + pyproject_file_path (str): The path to the pyproject.toml file. + + Returns: + list: Paths to the local dependencies relative to the current directory. + """ + python_package = with_python_package(context, base_container, pyproject_file_path) + pyproject_content_raw = await get_file_contents(python_package, "pyproject.toml") + if not pyproject_content_raw: + return [] + + pyproject_content = toml.loads(pyproject_content_raw) + local_dependency_paths = [] + for dep, value in pyproject_content["tool"]["poetry"]["dependencies"].items(): + if isinstance(value, dict) and "path" in value: + local_dependency_path = Path(value["path"]) + pyproject_file_path = Path(pyproject_file_path) + local_dependency_path = str((pyproject_file_path / local_dependency_path).resolve().relative_to(Path.cwd())) + local_dependency_paths.append(local_dependency_path) + + # Ensure we parse the child dependencies + # TODO handle more than pyproject.toml + child_local_dependencies = await find_local_dependencies_in_pyproject_toml( + context, base_container, local_dependency_path, exclude=exclude + ) + local_dependency_paths += child_local_dependencies + + return local_dependency_paths + + +def _install_python_dependencies_from_poetry( + container: Container, + additional_dependency_groups: Optional[List] = None, +) -> Container: + pip_install_poetry_cmd = ["pip", "install", "poetry"] + poetry_disable_virtual_env_cmd = ["poetry", "config", "virtualenvs.create", "false"] + poetry_install_no_venv_cmd = ["poetry", "install"] + if additional_dependency_groups: + for group in additional_dependency_groups: + poetry_install_no_venv_cmd += ["--with", group] + + return container.with_exec(pip_install_poetry_cmd).with_exec(poetry_disable_virtual_env_cmd).with_exec(poetry_install_no_venv_cmd) + + +def with_poetry(context: PipelineContext) -> Container: + """Install poetry in a python environment. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. + Returns: + Container: A python environment with poetry installed. + """ + python_base_environment: Container = with_python_base(context) + python_with_git = with_debian_packages(python_base_environment, ["git"]) + python_with_poetry = with_pip_packages(python_with_git, ["poetry"]) + + # poetry_cache: CacheVolume = context.dagger_client.cache_volume("poetry_cache") + # poetry_with_cache = python_with_poetry.with_mounted_cache("/root/.cache/pypoetry", poetry_cache, sharing=CacheSharingMode.SHARED) + + return python_with_poetry + + +def with_poetry_module(context: PipelineContext, parent_dir: Directory, module_path: str) -> Container: + """Sets up a Poetry module. + + Args: + context (PipelineContext): The current test context, providing the repository directory from which the ci_credentials sources will be pulled. + Returns: + Container: A python environment with dependencies installed using poetry. + """ + poetry_install_dependencies_cmd = ["poetry", "install"] + + python_with_poetry = with_poetry(context) + return ( + python_with_poetry.with_mounted_directory("/src", parent_dir) + .with_workdir(f"/src/{module_path}") + .with_exec(poetry_install_dependencies_cmd) + .with_env_variable("CACHEBUSTER", str(uuid.uuid4())) + ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/remote_storage.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/remote_storage.py similarity index 97% rename from airbyte-ci/connectors/pipelines/pipelines/actions/remote_storage.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/actions/remote_storage.py index 3024cf8378c0e..7995ccb2f1b10 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/remote_storage.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/remote_storage.py @@ -9,7 +9,7 @@ from typing import List, Optional, Tuple from dagger import Client, File, Secret -from pipelines.utils import get_exec_result, secret_host_variable, with_exit_code +from pipelines.helpers.utils import get_exec_result, secret_host_variable, with_exit_code GOOGLE_CLOUD_SDK_TAG = "425.0.0-slim" diff --git a/airbyte-ci/connectors/pipelines/pipelines/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py similarity index 57% rename from airbyte-ci/connectors/pipelines/pipelines/actions/secrets.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index 985ca064b5b96..a97ad7bf4c637 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -6,15 +6,16 @@ from __future__ import annotations import datetime -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable -from dagger import Secret -from pipelines.actions import environments -from pipelines.utils import get_file_contents, get_secret_host_variable +from dagger import Container, Secret +from pipelines.dagger.actions import internal_tools +from pipelines.helpers.utils import get_file_contents, get_secret_host_variable +from pipelines.pipeline.connectors.context import PipelineContext if TYPE_CHECKING: from dagger import Container - from pipelines.contexts import ConnectorContext + from pipelines.pipeline.connectors.context import ConnectorContext async def get_secrets_to_mask(ci_credentials_with_downloaded_secrets: Container) -> list[str]: @@ -44,7 +45,7 @@ async def download(context: ConnectorContext, gcp_gsm_env_variable_name: str = " """ gsm_secret = get_secret_host_variable(context.dagger_client, gcp_gsm_env_variable_name) secrets_path = f"/{context.connector.code_directory}/secrets" - ci_credentials = await environments.with_ci_credentials(context, gsm_secret) + ci_credentials = await internal_tools.with_ci_credentials(context, gsm_secret) with_downloaded_secrets = ( ci_credentials.with_exec(["mkdir", "-p", secrets_path]) .with_env_variable( @@ -81,7 +82,7 @@ async def upload(context: ConnectorContext, gcp_gsm_env_variable_name: str = "GC gsm_secret = get_secret_host_variable(context.dagger_client, gcp_gsm_env_variable_name) secrets_path = f"/{context.connector.code_directory}/secrets" - ci_credentials = await environments.with_ci_credentials(context, gsm_secret) + ci_credentials = await internal_tools.with_ci_credentials(context, gsm_secret) return await ci_credentials.with_directory(secrets_path, context.updated_secrets_dir).with_exec( ["ci_credentials", context.connector.technical_name, "update-secrets"] @@ -102,3 +103,50 @@ async def get_connector_secrets(context: ConnectorContext) -> dict[str, Secret]: else: raise NotImplementedError("Local secrets are not implemented yet. See https://github.com/airbytehq/airbyte/issues/25621") return connector_secrets + + +async def mounted_connector_secrets(context: PipelineContext, secret_directory_path: str) -> Callable[[Container], Container]: + # By default, mount the secrets properly as dagger secret files. + # + # This will cause the contents of these files to be scrubbed from the logs. This scrubbing comes at the cost of + # unavoidable latency in the log output, see next paragraph for details as to why. This is fine in a CI environment + # however this becomes a nuisance locally: the developer wants the logs to be displayed to them in an as timely + # manner as possible. Since the secrets aren't really secret in that case anyway, we mount them in the container as + # regular files instead. + # + # The buffering behavior that comes into play when logs are scrubbed is both unavoidable and not configurable. + # It's fundamentally unavoidable because dagger needs to match a bunch of regexes (one per secret) and therefore + # needs to buffer at least as many bytes as the longest of all possible matches. Still, this isn't that long in + # practice in our case. The real problem is that the buffering is not configurable: dagger relies on a golang + # library called transform [1] to perform the regexp matching on a stream and this library hard-codes a buffer + # size of 4096 bytes for each regex [2]. + # + # Remove the special local case whenever dagger implements scrubbing differently [3,4]. + # + # [1] https://golang.org/x/text/transform + # [2] https://cs.opensource.google/go/x/text/+/refs/tags/v0.13.0:transform/transform.go;l=130 + # [3] https://github.com/dagger/dagger/blob/v0.6.4/cmd/shim/main.go#L294 + # [4] https://github.com/airbytehq/airbyte/issues/30394 + # + if context.is_local: + # Special case for local development. + # Query dagger for the contents of the secrets and mount these strings as files in the container. + contents = {} + for secret_file_name, secret in context.connector_secrets.items(): + contents[secret_file_name] = await secret.plaintext() + + def with_secrets_mounted_as_regular_files(container: Container) -> Container: + container = container.with_exec(["mkdir", "-p", secret_directory_path], skip_entrypoint=True) + for secret_file_name, secret_content_str in contents.items(): + container = container.with_new_file(f"{secret_directory_path}/{secret_file_name}", secret_content_str, permissions=0o600) + return container + + return with_secrets_mounted_as_regular_files + + def with_secrets_mounted_as_dagger_secrets(container: Container) -> Container: + container = container.with_exec(["mkdir", "-p", secret_directory_path], skip_entrypoint=True) + for secret_file_name, secret in context.connector_secrets.items(): + container = container.with_mounted_secret(f"{secret_directory_path}/{secret_file_name}", secret) + return container + + return with_secrets_mounted_as_dagger_secrets diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipelines/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipelines/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py new file mode 100644 index 0000000000000..7d3b3835cb602 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from dagger import Container + + +from typing import List + + +def with_debian_packages(base_container: Container, packages_to_install: List[str]) -> Container: + """Installs packages using apt-get. + Args: + context (Container): A alpine based container. + + Returns: + Container: A container with the packages installed. + + """ + update_packages_command = ["apt-get", "update"] + package_install_command = ["apt-get", "install", "-y"] + return base_container.with_exec(update_packages_command).with_exec(package_install_command + packages_to_install) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py new file mode 100644 index 0000000000000..073c789063854 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py @@ -0,0 +1,139 @@ +import json +from typing import Callable +import uuid +from pipelines import consts +from pipelines.consts import DOCKER_HOST_NAME, DOCKER_HOST_PORT, DOCKER_TMP_VOLUME_NAME +from pipelines.helpers.utils import sh_dash_c + + +from dagger import Client, Container, File + +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext + + +def with_global_dockerd_service(dagger_client: Client) -> Container: + """Create a container with a docker daemon running. + We expose its 2375 port to use it as a docker host for docker-in-docker use cases. + Args: + dagger_client (Client): The dagger client used to create the container. + Returns: + Container: The container running dockerd as a service + """ + return ( + dagger_client.container().from_(consts.DOCKER_DIND_IMAGE) + # We set this env var because we need to use a non-default zombie reaper setting. + # The reason for this is that by default it will want to set its parent process ID to 1 when reaping. + # This won't be possible because of container-ception: dind is running inside the dagger engine. + # See https://github.com/krallin/tini#subreaping for details. + .with_env_variable("TINI_SUBREAPER", "") + # Similarly, because of container-ception, we have to use the fuse-overlayfs storage engine. + .with_exec( + sh_dash_c( + [ + # Update package metadata. + "apk update", + # Install the storage driver package. + "apk add fuse-overlayfs", + # Update daemon config with storage driver. + "mkdir /etc/docker", + '(echo {\\"storage-driver\\": \\"fuse-overlayfs\\"} > /etc/docker/daemon.json)', + ] + ) + ) + # Expose the docker host port. + .with_exposed_port(DOCKER_HOST_PORT) + # Mount the docker cache volumes. + .with_mounted_cache("/tmp", dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME)) + # Run the docker daemon and bind it to the exposed TCP port. + .with_exec( + ["dockerd", "--log-level=error", f"--host=tcp://0.0.0.0:{DOCKER_HOST_PORT}", "--tls=false"], insecure_root_capabilities=True + ) + ) + + +def with_bound_docker_host( + context: ConnectorContext, + container: Container, +) -> Container: + """Bind a container to a docker host. It will use the dockerd service as a docker host. + + Args: + context (ConnectorContext): The current connector context. + container (Container): The container to bind to the docker host. + Returns: + Container: The container bound to the docker host. + """ + return ( + container.with_env_variable("DOCKER_HOST", f"tcp://{DOCKER_HOST_NAME}:{DOCKER_HOST_PORT}") + .with_service_binding(DOCKER_HOST_NAME, context.dockerd_service) + .with_mounted_cache("/tmp", context.dagger_client.cache_volume(DOCKER_TMP_VOLUME_NAME)) + ) + + +def bound_docker_host(context: ConnectorContext) -> Callable[[Container], Container]: + def bound_docker_host_inner(container: Container) -> Container: + return with_bound_docker_host(context, container) + + return bound_docker_host_inner + + +def with_docker_cli(context: ConnectorContext) -> Container: + """Create a container with the docker CLI installed and bound to a persistent docker host. + + Args: + context (ConnectorContext): The current connector context. + + Returns: + Container: A docker cli container bound to a docker host. + """ + docker_cli = context.dagger_client.container().from_(consts.DOCKER_CLI_IMAGE) + return with_bound_docker_host(context, docker_cli) + + +async def load_image_to_docker_host(context: ConnectorContext, tar_file: File, image_tag: str): + """Load a docker image tar archive to the docker host. + + Args: + context (ConnectorContext): The current connector context. + tar_file (File): The file object holding the docker image tar archive. + image_tag (str): The tag to create on the image if it has no tag. + """ + # Hacky way to make sure the image is always loaded + tar_name = f"{str(uuid.uuid4())}.tar" + docker_cli = with_docker_cli(context).with_mounted_file(tar_name, tar_file) + + image_load_output = await docker_cli.with_exec(["docker", "load", "--input", tar_name]).stdout() + # Not tagged images only have a sha256 id the load output shares. + if "sha256:" in image_load_output: + image_id = image_load_output.replace("\n", "").replace("Loaded image ID: sha256:", "") + await docker_cli.with_exec(["docker", "tag", image_id, image_tag]) + image_sha = json.loads(await docker_cli.with_exec(["docker", "inspect", image_tag]).stdout())[0].get("Id") + return image_sha + + +def with_crane( + context: PipelineContext, +) -> Container: + """Crane is a tool to analyze and manipulate container images. + We can use it to extract the image manifest and the list of layers or list the existing tags on an image repository. + https://github.com/google/go-containerregistry/tree/main/cmd/crane + """ + + # We use the debug image as it contains a shell which we need to properly use environment variables + # https://github.com/google/go-containerregistry/tree/main/cmd/crane#images + base_container = context.dagger_client.container().from_("gcr.io/go-containerregistry/crane/debug:v0.15.1") + + if context.docker_hub_username_secret and context.docker_hub_password_secret: + base_container = ( + base_container.with_secret_variable("DOCKER_HUB_USERNAME", context.docker_hub_username_secret).with_secret_variable( + "DOCKER_HUB_PASSWORD", context.docker_hub_password_secret + ) + # We need to use skip_entrypoint=True to avoid the entrypoint to be overridden by the crane command + # We use sh -c to be able to use environment variables in the command + # This is a workaround as the default crane entrypoint doesn't support environment variables + .with_exec( + sh_dash_c(["crane auth login index.docker.io -u $DOCKER_HUB_USERNAME -p $DOCKER_HUB_PASSWORD"]), skip_entrypoint=True + ) + ) + + return base_container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py new file mode 100644 index 0000000000000..9fb064232f4cc --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -0,0 +1,174 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from pipelines.dagger.actions.connector.hooks import finalize_build +from pipelines.consts import AMAZONCORRETTO_IMAGE +from pipelines.dagger.actions.connector.normalization import DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, with_normalization +from pipelines.helpers.utils import sh_dash_c +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext + + +from dagger import CacheVolume, Container, File, Platform + + +def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: + return ( + context.dagger_client.container(platform=build_platform) + .from_("amazonlinux:2022.0.20220831.1") + .with_workdir("/airbyte") + .with_file("base.sh", context.get_repo_dir("airbyte-integrations/bases/base", include=["base.sh"]).file("base.sh")) + .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") + .with_label("io.airbyte.version", "0.1.0") + .with_label("io.airbyte.name", "airbyte/integration-base") + ) + + +def with_integration_base_java(context: PipelineContext, build_platform: Platform) -> Container: + integration_base = with_integration_base(context, build_platform) + yum_packages_to_install = [ + "tar", # required to untar java connector binary distributions. + "openssl", # required because we need to ssh and scp sometimes. + "findutils", # required for xargs, which is shipped as part of findutils. + ] + return ( + context.dagger_client.container(platform=build_platform) + # Use a linux+jdk base image with long-term support, such as amazoncorretto. + .from_(AMAZONCORRETTO_IMAGE) + # Install a bunch of packages as early as possible. + .with_exec( + sh_dash_c( + [ + # Update first, but in the same .with_exec step as the package installation. + # Otherwise, we risk caching stale package URLs. + "yum update -y", + # + f"yum install -y {' '.join(yum_packages_to_install)}", + # Remove any dangly bits. + "yum clean all", + ] + ) + ) + # Add what files we need to the /airbyte directory. + # Copy base.sh from the airbyte/integration-base image. + .with_directory("/airbyte", integration_base.directory("/airbyte")) + .with_workdir("/airbyte") + # Download a utility jar from the internet. + .with_file("dd-java-agent.jar", context.dagger_client.http("https://dtdg.co/latest-java-tracer")) + # Copy javabase.sh from the git repo. + .with_file("javabase.sh", context.get_repo_dir("airbyte-integrations/bases/base-java", include=["javabase.sh"]).file("javabase.sh")) + # Set a bunch of env variables used by base.sh. + .with_env_variable("AIRBYTE_SPEC_CMD", "/airbyte/javabase.sh --spec") + .with_env_variable("AIRBYTE_CHECK_CMD", "/airbyte/javabase.sh --check") + .with_env_variable("AIRBYTE_DISCOVER_CMD", "/airbyte/javabase.sh --discover") + .with_env_variable("AIRBYTE_READ_CMD", "/airbyte/javabase.sh --read") + .with_env_variable("AIRBYTE_WRITE_CMD", "/airbyte/javabase.sh --write") + .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/base.sh") + # Set image labels. + .with_label("io.airbyte.version", "0.1.2") + .with_label("io.airbyte.name", "airbyte/integration-base-java") + ) + + +def with_integration_base_java_and_normalization(context: PipelineContext, build_platform: Platform) -> Container: + yum_packages_to_install = [ + "python3", + "python3-devel", + "jq", + "sshpass", + "git", + ] + + additional_yum_packages = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["yum_packages"] + yum_packages_to_install += additional_yum_packages + + dbt_adapter_package = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["dbt_adapter"] + normalization_integration_name = DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["integration_name"] + + pip_cache: CacheVolume = context.dagger_client.cache_volume("pip_cache") + + return ( + with_integration_base_java(context, build_platform) + .with_exec( + sh_dash_c( + [ + "yum update -y", + f"yum install -y {' '.join(yum_packages_to_install)}", + "yum clean all", + "alternatives --install /usr/bin/python python /usr/bin/python3 60", + ] + ) + ) + .with_mounted_cache("/root/.cache/pip", pip_cache) + .with_exec( + sh_dash_c( + [ + "python -m ensurepip --upgrade", + # Workaround for https://github.com/yaml/pyyaml/issues/601 + "pip3 install 'Cython<3.0' 'pyyaml~=5.4' --no-build-isolation", + # Required for dbt https://github.com/dbt-labs/dbt-core/issues/7075 + "pip3 install 'pytz~=2023.3'", + f"pip3 install {dbt_adapter_package}", + # amazon linux 2 isn't compatible with urllib3 2.x, so force 1.x + "pip3 install 'urllib3<2'", + ] + ) + ) + .with_directory("airbyte_normalization", with_normalization(context, build_platform).directory("/airbyte")) + .with_workdir("airbyte_normalization") + .with_exec(sh_dash_c(["mv * .."])) + .with_workdir("/airbyte") + .with_exec(["rm", "-rf", "airbyte_normalization"]) + .with_workdir("/airbyte/normalization_code") + .with_exec(["pip3", "install", "."]) + .with_workdir("/airbyte/normalization_code/dbt-template/") + .with_exec(["dbt", "deps"]) + .with_workdir("/airbyte") + .with_file( + "run_with_normalization.sh", + context.get_repo_dir("airbyte-integrations/bases/base-java", include=["run_with_normalization.sh"]).file( + "run_with_normalization.sh" + ), + ) + .with_env_variable("AIRBYTE_NORMALIZATION_INTEGRATION", normalization_integration_name) + .with_env_variable("AIRBYTE_ENTRYPOINT", "/airbyte/run_with_normalization.sh") + ) + + +async def with_airbyte_java_connector(context: ConnectorContext, connector_java_tar_file: File, build_platform: Platform) -> Container: + application = context.connector.technical_name + + build_stage = ( + with_integration_base_java(context, build_platform) + .with_workdir("/airbyte") + .with_env_variable("APPLICATION", context.connector.technical_name) + .with_file(f"{application}.tar", connector_java_tar_file) + .with_exec( + sh_dash_c( + [ + f"tar xf {application}.tar --strip-components=1", + f"rm -rf {application}.tar", + ] + ) + ) + ) + + if ( + context.connector.supports_normalization + and DESTINATION_NORMALIZATION_BUILD_CONFIGURATION[context.connector.technical_name]["supports_in_connector_normalization"] + ): + base = with_integration_base_java_and_normalization(context, build_platform) + entrypoint = ["/airbyte/run_with_normalization.sh"] + else: + base = with_integration_base_java(context, build_platform) + entrypoint = ["/airbyte/base.sh"] + + connector_container = ( + base.with_workdir("/airbyte") + .with_env_variable("APPLICATION", application) + .with_mounted_directory("built_artifacts", build_stage.directory("/airbyte")) + .with_exec(sh_dash_c(["mv built_artifacts/* ."])) + .with_label("io.airbyte.version", context.metadata["dockerImageTag"]) + .with_label("io.airbyte.name", context.metadata["dockerRepository"]) + .with_entrypoint(entrypoint) + ) + return await finalize_build(context, connector_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py new file mode 100644 index 0000000000000..288f361a39b6a --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py @@ -0,0 +1,60 @@ +from pipelines.consts import CONNECTOR_TESTING_REQUIREMENTS, LICENSE_SHORT_FILE_PATH, PYPROJECT_TOML_FILE_PATH +from pipelines.helpers.utils import sh_dash_c +from pipelines.pipeline.connectors.context import PipelineContext + + +from dagger import CacheVolume, Container + + +def with_python_base(context: PipelineContext, python_version: str = "3.10") -> Container: + """Build a Python container with a cache volume for pip cache. + + Args: + context (PipelineContext): The current test context, providing a dagger client and a repository directory. + python_image_name (str, optional): The python image to use to build the python base environment. Defaults to "python:3.9-slim". + + Raises: + ValueError: Raised if the python_image_name is not a python image. + + Returns: + Container: The python base environment container. + """ + + pip_cache: CacheVolume = context.dagger_client.cache_volume("pip_cache") + + base_container = ( + context.dagger_client.container() + .from_(f"python:{python_version}-slim") + .with_mounted_cache("/root/.cache/pip", pip_cache) + .with_exec( + sh_dash_c( + [ + "apt-get update", + "apt-get install -y build-essential cmake g++ libffi-dev libstdc++6 git", + "pip install pip==23.1.2", + ] + ) + ) + ) + + return base_container + + +def with_testing_dependencies(context: PipelineContext) -> Container: + """Build a testing environment by installing testing dependencies on top of a python base environment. + + Args: + context (PipelineContext): The current test context, providing a dagger client and a repository directory. + + Returns: + Container: The testing environment container. + """ + python_environment: Container = with_python_base(context) + pyproject_toml_file = context.get_repo_dir(".", include=[PYPROJECT_TOML_FILE_PATH]).file(PYPROJECT_TOML_FILE_PATH) + license_short_file = context.get_repo_dir(".", include=[LICENSE_SHORT_FILE_PATH]).file(LICENSE_SHORT_FILE_PATH) + + return ( + python_environment.with_exec(["pip", "install"] + CONNECTOR_TESTING_REQUIREMENTS) + .with_file(f"/{PYPROJECT_TOML_FILE_PATH}", pyproject_toml_file) + .with_file(f"/{LICENSE_SHORT_FILE_PATH}", license_short_file) + ) diff --git a/airbyte-ci/connectors/pipelines/pipelines/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/gradle.py deleted file mode 100644 index 637ae18c7551d..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/gradle.py +++ /dev/null @@ -1,163 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from __future__ import annotations - -from abc import ABC -from typing import ClassVar, List - -from dagger import CacheSharingMode, CacheVolume -from pipelines import hacks -from pipelines.actions import environments -from pipelines.bases import Step, StepResult -from pipelines.consts import AMAZONCORRETTO_IMAGE -from pipelines.contexts import PipelineContext -from pipelines.utils import sh_dash_c - - -class GradleTask(Step, ABC): - """ - A step to run a Gradle task. - - Attributes: - title (str): The step title. - gradle_task_name (str): The Gradle task name to run. - bind_to_docker_host (bool): Whether to install the docker client and bind it to the host. - mount_connector_secrets (bool): Whether to mount connector secrets. - """ - - DEFAULT_GRADLE_TASK_OPTIONS = ("--no-daemon", "--scan", "--build-cache", "--console=plain") - - gradle_task_name: ClassVar[str] - bind_to_docker_host: ClassVar[bool] = False - mount_connector_secrets: ClassVar[bool] = False - - def __init__(self, context: PipelineContext) -> None: - super().__init__(context) - - @property - def connector_java_build_cache(self) -> CacheVolume: - # TODO: remove this once we finish the project to boost source-postgres CI performance. - # We should use a static gradle-cache volume name. - cache_volume_name = hacks.get_gradle_cache_volume_name(self.context, self.logger) - return self.context.dagger_client.cache_volume(cache_volume_name) - - @property - def build_include(self) -> List[str]: - """Retrieve the list of source code directory required to run a Java connector Gradle task. - - The list is different according to the connector type. - - Returns: - List[str]: List of directories or files to be mounted to the container to run a Java connector Gradle task. - """ - return [ - str(dependency_directory) - for dependency_directory in self.context.connector.get_local_dependency_paths(with_test_dependencies=True) - ] - - def _get_gradle_command(self, task: str) -> List[str]: - return sh_dash_c( - [ - # The gradle command is chained in between a couple of rsyncs which load from- and store to the cache volume. - "(rsync -a --stats /root/gradle-cache/ /root/.gradle || true)", - f"./gradlew {' '.join(self.DEFAULT_GRADLE_TASK_OPTIONS)} {task}", - "(rsync -a --stats /root/.gradle/ /root/gradle-cache || true)", - ] - ) - - async def _run(self) -> StepResult: - include = [ - ".root", - ".env", - "build.gradle", - "deps.toml", - "gradle.properties", - "gradle", - "gradlew", - "LICENSE_SHORT", - "settings.gradle", - "build.gradle", - "tools/gradle", - "spotbugs-exclude-filter-file.xml", - "buildSrc", - "tools/bin/build_image.sh", - "tools/lib/lib.sh", - "tools/gradle/codestyle", - "pyproject.toml", - "airbyte-cdk/java/airbyte-cdk/**", - ] + self.build_include - - yum_packages_to_install = [ - "docker", # required by :integrationTestJava. - "findutils", # gradle requires xargs, which is shipped in findutils. - "jq", # required by :airbyte-connector-test-harnesses:acceptance-test-harness to inspect docker images. - "npm", # required by :format. - "python3.11-pip", # required by :format. - "rsync", # required for gradle cache synchronization. - ] - - # Define a gradle container which will be cached and re-used for all tasks. - # We should do our best to cram any generic & expensive layers in here. - gradle_container = ( - self.dagger_client.container() - # Use a linux+jdk base image with long-term support, such as amazoncorretto. - .from_(AMAZONCORRETTO_IMAGE) - # Install a bunch of packages as early as possible. - .with_exec( - sh_dash_c( - [ - # Update first, but in the same .with_exec step as the package installation. - # Otherwise, we risk caching stale package URLs. - "yum update -y", - f"yum install -y {' '.join(yum_packages_to_install)}", - # Remove any dangly bits. - "yum clean all", - # Deliberately soft-remove docker, so that the `docker` CLI is unavailable by default. - # This is a defensive choice to enforce the expectation that, as a general rule, gradle tasks do not rely on docker. - "yum remove -y --noautoremove docker", # remove docker package but not its dependencies - "yum install -y --downloadonly docker", # have docker package in place for quick install - ] - ) - ) - # Set GRADLE_HOME and GRADLE_USER_HOME to the directory which will be rsync-ed with the gradle cache volume. - .with_env_variable("GRADLE_HOME", "/root/.gradle") - .with_env_variable("GRADLE_USER_HOME", "/root/.gradle") - # Set RUN_IN_AIRBYTE_CI to tell gradle how to configure its build cache. - # This is consumed by settings.gradle in the repo root. - .with_env_variable("RUN_IN_AIRBYTE_CI", "1") - # TODO: remove this once we finish the project to boost source-postgres CI performance. - .with_env_variable("CACHEBUSTER", hacks.get_cachebuster(self.context, self.logger)) - # Mount the gradle cache volume. - # We deliberately don't mount it at $GRADLE_HOME, instead we load it there and store it from there using rsync. - # This is because the volume is accessed concurrently by all GradleTask instances. - # Hence, why we synchronize the writes by setting the `sharing` parameter to LOCKED. - .with_mounted_cache("/root/gradle-cache", self.connector_java_build_cache, sharing=CacheSharingMode.LOCKED) - # Mount the parts of the repo which interest us in /airbyte. - .with_workdir("/airbyte") - .with_mounted_directory("/airbyte", self.context.get_repo_dir(".", include=include)) - .with_mounted_directory(str(self.context.connector.code_directory), await self.context.get_connector_dir()) - # Disable the Ryuk container because it needs privileged docker access that does not work: - .with_env_variable("TESTCONTAINERS_RYUK_DISABLED", "true") - # Run gradle once to populate the container's local maven repository. - # This step is useful also to serve as a basic sanity check and to warm the gradle cache. - # This will download gradle itself, a bunch of poms and jars, compile the gradle plugins, configure tasks, etc. - .with_exec(self._get_gradle_command(":airbyte-cdk:java:airbyte-cdk:publishSnapshotIfNeeded")) - ) - - # From this point on, we add layers which are task-dependent. - if self.mount_connector_secrets: - gradle_container = gradle_container.with_( - await environments.mounted_connector_secrets(self.context, f"{self.context.connector.code_directory}/secrets") - ) - if self.bind_to_docker_host: - # If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container. - gradle_container = environments.with_bound_docker_host(self.context, gradle_container) - # This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed. - gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"]) - - # Run the gradle task that we actually care about. - connector_task = f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}" - gradle_container = gradle_container.with_exec(self._get_gradle_command(connector_task)) - return await self.get_step_result(gradle_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/hacks.py b/airbyte-ci/connectors/pipelines/pipelines/hacks.py index 61af0ed050454..1053b3ad8d858 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/hacks.py +++ b/airbyte-ci/connectors/pipelines/pipelines/hacks.py @@ -13,7 +13,7 @@ if TYPE_CHECKING: from dagger import Client, Container - from pipelines.contexts import ConnectorContext + from pipelines.pipeline.connectors.context import ConnectorContext async def cache_latest_cdk(dagger_client: Client, pip_cache_volume_name: str = "pip_cache") -> None: diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/connector_changes/metadata_change_helpers.py rename to airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py new file mode 100644 index 0000000000000..3ecdf3d4fa2cc --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -0,0 +1,58 @@ +from connector_ops.utils import Connector + + +from pathlib import Path +from typing import FrozenSet, Set, Union +from pipelines import main_logger, Union +from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS + + + +def get_connector_modified_files(connector: Connector, all_modified_files: Set[Path]) -> FrozenSet[Path]: + connector_modified_files = set() + for modified_file in all_modified_files: + modified_file_path = Path(modified_file) + if modified_file_path.is_relative_to(connector.code_directory): + connector_modified_files.add(modified_file) + return frozenset(connector_modified_files) + + +def _find_modified_connectors( + file_path: Union[str, Path], all_connectors: Set[Connector], dependency_scanning: bool = True +) -> Set[Connector]: + """Find all connectors impacted by the file change.""" + modified_connectors = set() + + for connector in all_connectors: + if Path(file_path).is_relative_to(Path(connector.code_directory)): + main_logger.info(f"Adding connector '{connector}' due to connector file modification: {file_path}.") + modified_connectors.add(connector) + + if dependency_scanning: + for connector_dependency in connector.get_local_dependency_paths(): + if Path(file_path).is_relative_to(Path(connector_dependency)): + # Add the connector to the modified connectors + modified_connectors.add(connector) + main_logger.info(f"Adding connector '{connector}' due to dependency modification: '{file_path}'.") + return modified_connectors + + +def _is_ignored_file(file_path: Union[str, Path]) -> bool: + """Check if the provided file has an ignored extension.""" + return Path(file_path).suffix in IGNORED_FILE_EXTENSIONS + + +def get_modified_connectors(modified_files: Set[Path], all_connectors: Set[Connector], dependency_scanning: bool) -> Set[Connector]: + """Create a mapping of modified connectors (key) and modified files (value). + If dependency scanning is enabled any modification to a dependency will trigger connector pipeline for all connectors that depend on it. + It currently works only for Java connectors . + It's especially useful to trigger tests of strict-encrypt variant when a change is made to the base connector. + Or to tests all jdbc connectors when a change is made to source-jdbc or base-java. + We'll consider extending the dependency resolution to Python connectors once we confirm that it's needed and feasible in term of scale. + """ + # Ignore files with certain extensions + modified_connectors = set() + for modified_file in modified_files: + if not _is_ignored_file(modified_file): + modified_connectors.update(_find_modified_connectors(modified_file, all_connectors, dependency_scanning)) + return modified_connectors diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py new file mode 100644 index 0000000000000..510cdbcec28e0 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py @@ -0,0 +1,50 @@ +from pipelines import main_logger +from pipelines.consts import GCS_PUBLIC_DOMAIN + + +from google.cloud.storage import storage +from google.oauth2 import service_account + + +import json +from pathlib import Path +from typing import Optional, Tuple + + +def upload_to_gcs(file_path: Path, bucket_name: str, object_name: str, credentials: str) -> Tuple[str, str]: + """Upload a file to a GCS bucket. + + Args: + file_path (Path): The path to the file to upload. + bucket_name (str): The name of the GCS bucket. + object_name (str): The name of the object in the GCS bucket. + credentials (str): The GCS credentials as a JSON string. + """ + # Exit early if file does not exist + if not file_path.exists(): + main_logger.warning(f"File {file_path} does not exist. Skipping upload to GCS.") + return "", "" + + credentials = service_account.Credentials.from_service_account_info(json.loads(credentials)) + client = storage.Client(credentials=credentials) + bucket = client.get_bucket(bucket_name) + blob = bucket.blob(object_name) + blob.upload_from_filename(str(file_path)) + gcs_uri = f"gs://{bucket_name}/{object_name}" + public_url = f"{GCS_PUBLIC_DOMAIN}/{bucket_name}/{object_name}" + return gcs_uri, public_url + + +def sanitize_gcs_credentials(raw_value: Optional[str]) -> Optional[str]: + """Try to parse the raw string input that should contain a json object with the GCS credentials. + It will raise an exception if the parsing fails and help us to fail fast on invalid credentials input. + + Args: + raw_value (str): A string representing a json object with the GCS credentials. + + Returns: + str: The raw value string if it was successfully parsed. + """ + if raw_value is None: + return None + return json.dumps(json.loads(raw_value)) diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py new file mode 100644 index 0000000000000..e82244932b15f --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py @@ -0,0 +1,120 @@ +import anyio +from dagger import Connection +from typing import List, Set +import git +from github import PullRequest + +from pipelines.helpers.utils import AIRBYTE_REPO_URL, DAGGER_CONFIG, DIFF_FILTER + + +def get_current_git_revision() -> str: # noqa D103 + return git.Repo().head.object.hexsha + + +def get_current_git_branch() -> str: # noqa D103 + return git.Repo().active_branch.name + + +async def get_modified_files_in_branch_remote( + current_git_branch: str, current_git_revision: str, diffed_branch: str = "origin/master" +) -> Set[str]: + """Use git diff to spot the modified files on the remote branch.""" + async with Connection(DAGGER_CONFIG) as dagger_client: + modified_files = await ( + dagger_client.container() + .from_("alpine/git:latest") + .with_workdir("/repo") + .with_exec(["init"]) + .with_env_variable("CACHEBUSTER", current_git_revision) + .with_exec( + [ + "remote", + "add", + "--fetch", + "--track", + diffed_branch.split("/")[-1], + "--track", + current_git_branch, + "origin", + AIRBYTE_REPO_URL, + ] + ) + .with_exec(["checkout", "-t", f"origin/{current_git_branch}"]) + .with_exec(["diff", f"--diff-filter={DIFF_FILTER}", "--name-only", f"{diffed_branch}...{current_git_revision}"]) + .stdout() + ) + return set(modified_files.split("\n")) + + +def get_modified_files_in_branch_local(current_git_revision: str, diffed_branch: str = "master") -> Set[str]: + """Use git diff and git status to spot the modified files on the local branch.""" + airbyte_repo = git.Repo() + modified_files = airbyte_repo.git.diff( + f"--diff-filter={DIFF_FILTER}", "--name-only", f"{diffed_branch}...{current_git_revision}" + ).split("\n") + status_output = airbyte_repo.git.status("--porcelain") + for not_committed_change in status_output.split("\n"): + file_path = not_committed_change.strip().split(" ")[-1] + if file_path: + modified_files.append(file_path) + return set(modified_files) + + +def get_modified_files_in_branch(current_git_branch: str, current_git_revision: str, diffed_branch: str, is_local: bool = True) -> Set[str]: + """Retrieve the list of modified files on the branch.""" + if is_local: + return get_modified_files_in_branch_local(current_git_revision, diffed_branch) + else: + return anyio.run(get_modified_files_in_branch_remote, current_git_branch, current_git_revision, diffed_branch) + + +async def get_modified_files_in_commit_remote(current_git_branch: str, current_git_revision: str) -> Set[str]: + async with Connection(DAGGER_CONFIG) as dagger_client: + modified_files = await ( + dagger_client.container() + .from_("alpine/git:latest") + .with_workdir("/repo") + .with_exec(["init"]) + .with_env_variable("CACHEBUSTER", current_git_revision) + .with_exec( + [ + "remote", + "add", + "--fetch", + "--track", + current_git_branch, + "origin", + AIRBYTE_REPO_URL, + ] + ) + .with_exec(["checkout", "-t", f"origin/{current_git_branch}"]) + .with_exec(["diff-tree", "--no-commit-id", "--name-only", current_git_revision, "-r"]) + .stdout() + ) + return set(modified_files.split("\n")) + + +def get_modified_files_in_commit_local(current_git_revision: str) -> Set[str]: + airbyte_repo = git.Repo() + modified_files = airbyte_repo.git.diff_tree("--no-commit-id", "--name-only", current_git_revision, "-r").split("\n") + return set(modified_files) + + +def get_modified_files_in_commit(current_git_branch: str, current_git_revision: str, is_local: bool = True) -> Set[str]: + if is_local: + return get_modified_files_in_commit_local(current_git_revision) + else: + return anyio.run(get_modified_files_in_commit_remote, current_git_branch, current_git_revision) + + +def get_modified_files_in_pull_request(pull_request: PullRequest) -> List[str]: + """Retrieve the list of modified files in a pull request.""" + return [f.filename for f in pull_request.get_files()] + + + + + + + + diff --git a/airbyte-ci/connectors/pipelines/pipelines/github.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/github.py rename to airbyte-ci/connectors/pipelines/pipelines/helpers/github.py index fd6bb7e47530f..d04996da8d2bd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/github.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py @@ -11,7 +11,7 @@ from connector_ops.utils import console from pipelines import main_logger -from pipelines.bases import CIContext +from pipelines.models.bases import CIContext if TYPE_CHECKING: from logging import Logger diff --git a/airbyte-ci/connectors/pipelines/pipelines/sentry_utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/sentry_utils.py rename to airbyte-ci/connectors/pipelines/pipelines/helpers/sentry_utils.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/slack.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/slack.py rename to airbyte-ci/connectors/pipelines/pipelines/helpers/slack.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py index c2456122778b9..4b2bf10d56489 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py @@ -9,10 +9,10 @@ from typing import TYPE_CHECKING, List, Tuple, Union import asyncer -from pipelines.bases import Step, StepStatus +from pipelines.models.bases import Step, StepStatus if TYPE_CHECKING: - from pipelines.bases import StepResult + from pipelines.models.steps import StepResult async def run_steps( diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py new file mode 100644 index 0000000000000..9c10f7689e8c5 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py @@ -0,0 +1,325 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module groups util function used in pipelines.""" +from __future__ import annotations + +import contextlib +import datetime +import os +import re +import sys +import unicodedata +from glob import glob +from io import TextIOWrapper +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple + +import anyio +import asyncer +import click +from connector_ops.utils import get_changed_connectors +from dagger import Client, Config, Container, DaggerError, ExecError, File, ImageLayerCompression, QueryError, Secret +from more_itertools import chunked +from pipelines import consts +from pipelines.helpers import sentry_utils + +if TYPE_CHECKING: + from pipelines.pipeline.connectors.context import ConnectorContext + +DAGGER_CONFIG = Config(log_output=sys.stderr) +AIRBYTE_REPO_URL = "https://github.com/airbytehq/airbyte.git" +METADATA_FILE_NAME = "metadata.yaml" +METADATA_ICON_FILE_NAME = "icon.svg" +DIFF_FILTER = "MADRT" # Modified, Added, Deleted, Renamed, Type changed +IGNORED_FILE_EXTENSIONS = [".md"] + + + +# This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented +async def check_path_in_workdir(container: Container, path: str) -> bool: + """Check if a local path is mounted to the working directory of a container. + + Args: + container (Container): The container on which we want the check the path existence. + path (str): Directory or file path we want to check the existence in the container working directory. + + Returns: + bool: Whether the path exists in the container working directory. + """ + workdir = (await container.with_exec(["pwd"], skip_entrypoint=True).stdout()).strip() + mounts = await container.mounts() + if workdir in mounts: + expected_file_path = Path(workdir[1:]) / path + return expected_file_path.is_file() or expected_file_path.is_dir() + else: + return False + + +def secret_host_variable(client: Client, name: str, default: str = ""): + """Add a host environment variable as a secret in a container. + + Example: + container.with_(secret_host_variable(client, "MY_SECRET")) + + Args: + client (Client): The dagger client. + name (str): The name of the environment variable. The same name will be + used in the container, for the secret name and for the host variable. + default (str): The default value to use if the host variable is not set. Defaults to "". + + Returns: + Callable[[Container], Container]: A function that can be used in a `Container.with_()` method. + """ + + def _secret_host_variable(container: Container): + return container.with_secret_variable(name, get_secret_host_variable(client, name, default)) + + return _secret_host_variable + + +def get_secret_host_variable(client: Client, name: str, default: str = "") -> Secret: + """Creates a dagger.Secret from a host environment variable. + + Args: + client (Client): The dagger client. + name (str): The name of the environment variable. The same name will be used for the secret. + default (str): The default value to use if the host variable is not set. Defaults to "". + + Returns: + Secret: A dagger secret. + """ + return client.set_secret(name, os.environ.get(name, default)) + + +# This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented +async def get_file_contents(container: Container, path: str) -> Optional[str]: + """Retrieve a container file contents. + + Args: + container (Container): The container hosting the file you want to read. + path (str): Path, in the container, to the file you want to read. + + Returns: + Optional[str]: The file content if the file exists in the container, None otherwise. + """ + try: + return await container.file(path).contents() + except QueryError as e: + if "no such file or directory" not in str(e): + # this error could come from a network issue + raise + return None + + +@contextlib.contextmanager +def catch_exec_error_group(): + try: + yield + except anyio.ExceptionGroup as eg: + for e in eg.exceptions: + if isinstance(e, ExecError): + raise e + raise + + +async def get_container_output(container: Container) -> Tuple[str, str]: + """Retrieve both stdout and stderr of a container, concurrently. + + Args: + container (Container): The container to execute. + + Returns: + Tuple[str, str]: The stdout and stderr of the container, respectively. + """ + with catch_exec_error_group(): + async with asyncer.create_task_group() as task_group: + soon_stdout = task_group.soonify(container.stdout)() + soon_stderr = task_group.soonify(container.stderr)() + return soon_stdout.value, soon_stderr.value + + +async def get_exec_result(container: Container) -> Tuple[int, str, str]: + """Retrieve the exit_code along with stdout and stderr of a container by handling the ExecError. + + Note: It is preferrable to not worry about the exit code value and just capture + ExecError to handle errors. This is offered as a convenience when the exit code + value is actually needed. + + If the container has a file at /exit_code, the exit code will be read from it. + See hacks.never_fail_exec for more details. + + Args: + container (Container): The container to execute. + + Returns: + Tuple[int, str, str]: The exit_code, stdout and stderr of the container, respectively. + """ + try: + exit_code = 0 + in_file_exit_code = await get_file_contents(container, "/exit_code") + if in_file_exit_code: + exit_code = int(in_file_exit_code) + return exit_code, *(await get_container_output(container)) + except ExecError as e: + return e.exit_code, e.stdout, e.stderr + + +async def with_exit_code(container: Container) -> int: + """Read the container exit code. + + Args: + container (Container): The container from which you want to read the exit code. + + Returns: + int: The exit code. + """ + try: + await container + except ExecError as e: + return e.exit_code + return 0 + + +async def with_stderr(container: Container) -> str: + """Retrieve the stderr of a container even on execution error.""" + try: + return await container.stderr() + except ExecError as e: + return e.stderr + + +async def with_stdout(container: Container) -> str: + """Retrieve the stdout of a container even on execution error.""" + try: + return await container.stdout() + except ExecError as e: + return e.stdout + + +def get_current_epoch_time() -> int: # noqa D103 + return round(datetime.datetime.utcnow().timestamp()) + + +def slugify(value: Any, allow_unicode: bool = False): + """ + Taken from https://github.com/django/django/blob/master/django/utils/text.py. + + Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated + dashes to single dashes. Remove characters that aren't alphanumerics, + underscores, or hyphens. Convert to lowercase. Also strip leading and + trailing whitespace, dashes, and underscores. + """ + value = str(value) + if allow_unicode: + value = unicodedata.normalize("NFKC", value) + else: + value = unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii") + value = re.sub(r"[^\w\s-]", "", value.lower()) + return re.sub(r"[-\s]+", "-", value).strip("-_") + + +def key_value_text_to_dict(text: str) -> dict: + kv = {} + for line in text.split("\n"): + if "=" in line: + try: + k, v = line.split("=") + except ValueError: + continue + kv[k] = v + return kv + + +async def key_value_file_to_dict(file: File) -> dict: + return key_value_text_to_dict(await file.contents()) + + +async def get_dockerfile_labels(dockerfile: File) -> dict: + return {k.replace("LABEL ", ""): v for k, v in (await key_value_file_to_dict(dockerfile)).items() if k.startswith("LABEL")} + + +async def get_version_from_dockerfile(dockerfile: File) -> str: + dockerfile_labels = await get_dockerfile_labels(dockerfile) + try: + return dockerfile_labels["io.airbyte.version"] + except KeyError: + raise Exception("Could not get the version from the Dockerfile labels.") + + +def create_and_open_file(file_path: Path) -> TextIOWrapper: + """Create a file and open it for writing. + + Args: + file_path (Path): The path to the file to create. + + Returns: + File: The file object. + """ + file_path.parent.mkdir(parents=True, exist_ok=True) + file_path.touch() + return file_path.open("w") + + +async def execute_concurrently(steps: List[Callable], concurrency=5): + tasks = [] + # Asyncer does not have builtin semaphore, so control concurrency via chunks of steps + # Anyio has semaphores but does not have the soonify method which allow access to results via the value task attribute. + for chunk in chunked(steps, concurrency): + async with asyncer.create_task_group() as task_group: + tasks += [task_group.soonify(step)() for step in chunk] + return [task.value for task in tasks] + + +async def export_container_to_tarball( + context: ConnectorContext, container: Container, tar_file_name: Optional[str] = None +) -> Tuple[Optional[File], Optional[Path]]: + """Save the container image to the host filesystem as a tar archive. + + Exporting a container image as a tar archive allows user to have a dagger built container image available on their host filesystem. + They can load this tar file to their main docker host with 'docker load'. + This mechanism is also used to share dagger built containers with other steps like AcceptanceTest that have their own dockerd service. + We 'docker load' this tar file to AcceptanceTest's docker host to make sure the container under test image is available for testing. + + Returns: + Tuple[Optional[File], Optional[Path]]: A tuple with the file object holding the tar archive on the host and its path. + """ + if tar_file_name is None: + tar_file_name = f"{context.connector.technical_name}_{context.git_revision}.tar" + tar_file_name = slugify(tar_file_name) + local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}") + export_success = await container.export(str(local_path), forced_compression=ImageLayerCompression.Gzip) + if export_success: + exported_file = ( + context.dagger_client.host().directory(context.host_image_export_dir_path, include=[tar_file_name]).file(tar_file_name) + ) + return exported_file, local_path + else: + return None, None + + +def format_duration(time_delta: datetime.timedelta) -> str: + total_seconds = time_delta.total_seconds() + if total_seconds < 60: + return "{:.2f}s".format(total_seconds) + minutes = int(total_seconds // 60) + seconds = int(total_seconds % 60) + return "{:02d}mn{:02d}s".format(minutes, seconds) + + +def sh_dash_c(lines: List[str]) -> List[str]: + """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" + return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] + + +def transform_strs_to_paths(str_paths: List[str]) -> List[Path]: + """Transform a list of string paths to a list of Path objects. + + Args: + str_paths (List[str]): A list of string paths. + + Returns: + List[Path]: A list of Path objects. + """ + return [Path(str_path) for str_path in str_paths] diff --git a/airbyte-ci/connectors/pipelines/pipelines/tools/internal.py b/airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/tools/internal.py rename to airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py new file mode 100644 index 0000000000000..d08d7b84978ae --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py @@ -0,0 +1,303 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""Module declaring context related classes.""" + +import logging +import os +from datetime import datetime +from enum import Enum +from glob import glob +from types import TracebackType +from typing import List, Optional + +from asyncer import asyncify +from dagger import Client, Directory, File, Secret +from github import PullRequest +from pipelines import hacks +from pipelines.helpers.gcs import sanitize_gcs_credentials +from pipelines.models.bases import CIContext, Report +from pipelines.helpers.github import update_commit_status_check +from pipelines.helpers.slack import send_message_to_webhook +from pipelines.helpers.utils import AIRBYTE_REPO_URL + + +class ContextState(Enum): + """Enum to characterize the current context state, values are used for external representation on GitHub commit checks.""" + + INITIALIZED = {"github_state": "pending", "description": "Pipelines are being initialized..."} + RUNNING = {"github_state": "pending", "description": "Pipelines are running..."} + ERROR = {"github_state": "error", "description": "Something went wrong while running the Pipelines."} + SUCCESSFUL = {"github_state": "success", "description": "All Pipelines ran successfully."} + FAILURE = {"github_state": "failure", "description": "Pipeline failed."} + + +class PipelineContext: + """The pipeline context is used to store configuration for a specific pipeline run.""" + + PRODUCTION = bool(os.environ.get("PRODUCTION", False)) # Set this to True to enable production mode (e.g. to send PR comments) + + DEFAULT_EXCLUDED_FILES = ( + [".git", "airbyte-ci/connectors/pipelines/*"] + + glob("**/build", recursive=True) + + glob("**/.venv", recursive=True) + + glob("**/secrets", recursive=True) + + glob("**/__pycache__", recursive=True) + + glob("**/*.egg-info", recursive=True) + + glob("**/.vscode", recursive=True) + + glob("**/.pytest_cache", recursive=True) + + glob("**/.eggs", recursive=True) + + glob("**/.mypy_cache", recursive=True) + + glob("**/.DS_Store", recursive=True) + + glob("**/airbyte_ci_logs", recursive=True) + + glob("**/.gradle", recursive=True) + ) + + def __init__( + self, + pipeline_name: str, + is_local: bool, + git_branch: str, + git_revision: str, + gha_workflow_run_url: Optional[str] = None, + dagger_logs_url: Optional[str] = None, + pipeline_start_timestamp: Optional[int] = None, + ci_context: Optional[str] = None, + is_ci_optional: bool = False, + slack_webhook: Optional[str] = None, + reporting_slack_channel: Optional[str] = None, + pull_request: PullRequest = None, + ci_report_bucket: Optional[str] = None, + ci_gcs_credentials: Optional[str] = None, + ci_git_user: Optional[str] = None, + ci_github_access_token: Optional[str] = None, + open_report_in_browser: bool = True, + ): + """Initialize a pipeline context. + + Args: + pipeline_name (str): The pipeline name. + is_local (bool): Whether the context is for a local run or a CI run. + git_branch (str): The current git branch name. + git_revision (str): The current git revision, commit hash. + gha_workflow_run_url (Optional[str], optional): URL to the github action workflow run. Only valid for CI run. Defaults to None. + dagger_logs_url (Optional[str], optional): URL to the dagger logs. Only valid for CI run. Defaults to None. + pipeline_start_timestamp (Optional[int], optional): Timestamp at which the pipeline started. Defaults to None. + ci_context (Optional[str], optional): Pull requests, workflow dispatch or nightly build. Defaults to None. + is_ci_optional (bool, optional): Whether the CI is optional. Defaults to False. + slack_webhook (Optional[str], optional): Slack webhook to send messages to. Defaults to None. + reporting_slack_channel (Optional[str], optional): Slack channel to send messages to. Defaults to None. + pull_request (PullRequest, optional): The pull request object if the pipeline was triggered by a pull request. Defaults to None. + """ + self.pipeline_name = pipeline_name + self.is_local = is_local + self.git_branch = git_branch + self.git_revision = git_revision + self.gha_workflow_run_url = gha_workflow_run_url + self.dagger_logs_url = dagger_logs_url + self.pipeline_start_timestamp = pipeline_start_timestamp + self.created_at = datetime.utcnow() + self.ci_context = ci_context + self.state = ContextState.INITIALIZED + self.is_ci_optional = is_ci_optional + self.slack_webhook = slack_webhook + self.reporting_slack_channel = reporting_slack_channel + self.pull_request = pull_request + self.logger = logging.getLogger(self.pipeline_name) + self.dagger_client = None + self._report = None + self.dockerd_service = None + self.ci_gcs_credentials = sanitize_gcs_credentials(ci_gcs_credentials) if ci_gcs_credentials else None + self.ci_report_bucket = ci_report_bucket + self.ci_git_user = ci_git_user + self.ci_github_access_token = ci_github_access_token + self.started_at = None + self.stopped_at = None + self.secrets_to_mask = [] + self.open_report_in_browser = open_report_in_browser + update_commit_status_check(**self.github_commit_status) + + @property + def dagger_client(self) -> Client: # noqa D102 + return self._dagger_client + + @dagger_client.setter + def dagger_client(self, dagger_client: Client): # noqa D102 + self._dagger_client = dagger_client + + @property + def is_ci(self): # noqa D102 + return self.is_local is False + + @property + def is_pr(self): # noqa D102 + return self.ci_context == CIContext.PULL_REQUEST + + @property + def repo(self): # noqa D102 + return self.dagger_client.git(AIRBYTE_REPO_URL, keep_git_dir=True) + + @property + def report(self) -> Report: # noqa D102 + return self._report + + @report.setter + def report(self, report: Report): # noqa D102 + self._report = report + + @property + def ci_gcs_credentials_secret(self) -> Secret: + return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials) + + @property + def ci_github_access_token_secret(self) -> Secret: + return self.dagger_client.set_secret("ci_github_access_token", self.ci_github_access_token) + + @property + def github_commit_status(self) -> dict: + """Build a dictionary used as kwargs to the update_commit_status_check function.""" + return { + "sha": self.git_revision, + "state": self.state.value["github_state"], + "target_url": self.gha_workflow_run_url, + "description": self.state.value["description"], + "context": self.pipeline_name, + "should_send": self.is_pr, + "logger": self.logger, + "is_optional": self.is_ci_optional, + } + + @property + def should_send_slack_message(self) -> bool: + return self.slack_webhook is not None and self.reporting_slack_channel is not None + + @property + def has_dagger_cloud_token(self) -> bool: + return "_EXPERIMENTAL_DAGGER_CLOUD_TOKEN" in os.environ + + @property + def dagger_cloud_url(self) -> str: + """Gets the link to the Dagger Cloud runs page for the current commit.""" + if self.is_local or not self.has_dagger_cloud_token: + return None + + return f"https://alpha.dagger.cloud/changeByPipelines?filter=dagger.io/git.ref:{self.git_revision}" + + def get_repo_file(self, file_path: str) -> File: + """Get a file from the current repository. + + The file is extracted from the host file system. + + Args: + file_path (str): Path to the file to get. + + Returns: + Path: The selected repo file. + """ + return self.dagger_client.host().file(file_path) + + def get_repo_dir(self, subdir: str = ".", exclude: Optional[List[str]] = None, include: Optional[List[str]] = None) -> Directory: + """Get a directory from the current repository. + + The directory is extracted from the host file system. + A couple of files or directories that could corrupt builds are exclude by default (check DEFAULT_EXCLUDED_FILES). + + Args: + subdir (str, optional): Path to the subdirectory to get. Defaults to "." to get the full repository. + exclude ([List[str], optional): List of files or directories to exclude from the directory. Defaults to None. + include ([List[str], optional): List of files or directories to include in the directory. Defaults to None. + + Returns: + Directory: The selected repo directory. + """ + if exclude is None: + exclude = self.DEFAULT_EXCLUDED_FILES + else: + exclude += self.DEFAULT_EXCLUDED_FILES + exclude = list(set(exclude)) + exclude.sort() # sort to make sure the order is always the same to not burst the cache. Casting exclude to set can change the order + if subdir != ".": + subdir = f"{subdir}/" if not subdir.endswith("/") else subdir + exclude = [f.replace(subdir, "") for f in exclude if subdir in f] + return self.dagger_client.host().directory(subdir, exclude=exclude, include=include) + + def create_slack_message(self) -> str: + raise NotImplementedError() + + async def __aenter__(self): + """Perform setup operation for the PipelineContext. + + Updates the current commit status on Github. + + Raises: + Exception: An error is raised when the context was not initialized with a Dagger client + Returns: + PipelineContext: A running instance of the PipelineContext. + """ + if self.dagger_client is None: + raise Exception("A Pipeline can't be entered with an undefined dagger_client") + self.state = ContextState.RUNNING + self.started_at = datetime.utcnow() + self.logger.info("Caching the latest CDK version...") + await hacks.cache_latest_cdk(self.dagger_client) + await asyncify(update_commit_status_check)(**self.github_commit_status) + if self.should_send_slack_message: + await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) + return self + + @staticmethod + def determine_final_state(report: Optional[Report], exception_value: Optional[BaseException]) -> ContextState: + """Determine the final state of the context from the report or the exception value. + + Args: + report (Optional[Report]): The pipeline report if any. + exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. + Returns: + ContextState: The final state of the context. + """ + if exception_value is not None or report is None: + return ContextState.ERROR + if report is not None and report.failed_steps: + return ContextState.FAILURE + if report is not None and report.success: + return ContextState.SUCCESSFUL + raise Exception( + f"The final state of the context could not be determined for the report and exception value provided. Report: {report}, Exception: {exception_value}" + ) + + async def __aexit__( + self, exception_type: Optional[type[BaseException]], exception_value: Optional[BaseException], traceback: Optional[TracebackType] + ) -> bool: + """Perform teardown operation for the PipelineContext. + + On the context exit the following operations will happen: + - Log the error value if an error was handled. + - Log the test report. + - Update the commit status check on GitHub if running in a CI environment. + + It should gracefully handle all the execution errors that happened and always upload a test report and update commit status check. + + Args: + exception_type (Optional[type[BaseException]]): The exception type if an exception was raised in the context execution, None otherwise. + exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. + traceback (Optional[TracebackType]): The traceback if an exception was raised in the context execution, None otherwise. + Returns: + bool: Whether the teardown operation ran successfully. + """ + self.state = self.determine_final_state(self.report, exception_value) + self.stopped_at = datetime.utcnow() + + if exception_value: + self.logger.error("An error was handled by the Pipeline", exc_info=True) + if self.report is None: + self.logger.error("No test report was provided. This is probably due to an upstream error") + self.report = Report(self, steps_results=[]) + + self.report.print() + + await asyncify(update_commit_status_check)(**self.github_commit_status) + if self.should_send_slack_message: + await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) + # supress the exception if it was handled + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py new file mode 100644 index 0000000000000..4c28527fc9929 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py @@ -0,0 +1,182 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module declare base / abstract models to be reused in a pipeline lifecycle.""" + +from __future__ import annotations + +import anyio +import json + +from dataclasses import dataclass, field +from datetime import datetime, timedelta + + +from typing import List + +from anyio import Path +from connector_ops.utils import console +from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT +from pipelines.models.contexts import PipelineContext +from pipelines.dagger.actions import remote_storage +from pipelines.models.steps import StepResult, StepStatus +from pipelines.helpers.utils import format_duration +from rich.console import Group +from rich.panel import Panel +from rich.style import Style +from rich.table import Table +from rich.text import Text + +@dataclass(frozen=True) +class Report: + """A dataclass to build reports to share pipelines executions results with the user.""" + + pipeline_context: PipelineContext + steps_results: List[StepResult] + created_at: datetime = field(default_factory=datetime.utcnow) + name: str = "REPORT" + filename: str = "output" + + @property + def report_output_prefix(self) -> str: # noqa D102 + return self.pipeline_context.report_output_prefix + + @property + def json_report_file_name(self) -> str: # noqa D102 + return self.filename + ".json" + + @property + def json_report_remote_storage_key(self) -> str: # noqa D102 + return f"{self.report_output_prefix}/{self.json_report_file_name}" + + @property + def failed_steps(self) -> List[StepResult]: # noqa D102 + return [step_result for step_result in self.steps_results if step_result.status is StepStatus.FAILURE] + + @property + def successful_steps(self) -> List[StepResult]: # noqa D102 + return [step_result for step_result in self.steps_results if step_result.status is StepStatus.SUCCESS] + + @property + def skipped_steps(self) -> List[StepResult]: # noqa D102 + return [step_result for step_result in self.steps_results if step_result.status is StepStatus.SKIPPED] + + @property + def success(self) -> bool: # noqa D102 + return len(self.failed_steps) == 0 and (len(self.skipped_steps) > 0 or len(self.successful_steps) > 0) + + @property + def run_duration(self) -> timedelta: # noqa D102 + return self.pipeline_context.stopped_at - self.pipeline_context.started_at + + @property + def lead_duration(self) -> timedelta: # noqa D102 + return self.pipeline_context.stopped_at - self.pipeline_context.created_at + + @property + def remote_storage_enabled(self) -> bool: # noqa D102 + return self.pipeline_context.is_ci + + async def save_local(self, filename: str, content: str) -> Path: + """Save the report files locally.""" + local_path = anyio.Path(f"{LOCAL_REPORTS_PATH_ROOT}/{self.report_output_prefix}/{filename}") + await local_path.parents[0].mkdir(parents=True, exist_ok=True) + await local_path.write_text(content) + return local_path + + async def save_remote(self, local_path: Path, remote_key: str, content_type: str = None) -> int: + gcs_cp_flags = None if content_type is None else [f"--content-type={content_type}"] + local_file = self.pipeline_context.dagger_client.host().directory(".", include=[str(local_path)]).file(str(local_path)) + report_upload_exit_code, _, _ = await remote_storage.upload_to_gcs( + dagger_client=self.pipeline_context.dagger_client, + file_to_upload=local_file, + key=remote_key, + bucket=self.pipeline_context.ci_report_bucket, + gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, + flags=gcs_cp_flags, + ) + gcs_uri = "gs://" + self.pipeline_context.ci_report_bucket + "/" + remote_key + public_url = f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{remote_key}" + if report_upload_exit_code != 0: + self.pipeline_context.logger.error(f"Uploading {local_path} to {gcs_uri} failed.") + else: + self.pipeline_context.logger.info(f"Uploading {local_path} to {gcs_uri} succeeded. Public URL: {public_url}") + return report_upload_exit_code + + async def save(self) -> None: + """Save the report files.""" + local_json_path = await self.save_local(self.json_report_file_name, self.to_json()) + absolute_path = await local_json_path.absolute() + self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}") + if self.remote_storage_enabled: + await self.save_remote(local_json_path, self.json_report_remote_storage_key, "application/json") + + def to_json(self) -> str: + """Create a JSON representation of the report. + + Returns: + str: The JSON representation of the report. + """ + return json.dumps( + { + "pipeline_name": self.pipeline_context.pipeline_name, + "run_timestamp": self.pipeline_context.started_at.isoformat(), + "run_duration": self.run_duration.total_seconds(), + "success": self.success, + "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], + "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], + "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], + "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, + "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, + "pipeline_end_timestamp": round(self.pipeline_context.stopped_at.timestamp()), + "pipeline_duration": round(self.pipeline_context.stopped_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, + "git_branch": self.pipeline_context.git_branch, + "git_revision": self.pipeline_context.git_revision, + "ci_context": self.pipeline_context.ci_context, + "pull_request_url": self.pipeline_context.pull_request.html_url if self.pipeline_context.pull_request else None, + "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, + } + ) + + def print(self): + """Print the test report to the console in a nice way.""" + pipeline_name = self.pipeline_context.pipeline_name + main_panel_title = Text(f"{pipeline_name.upper()} - {self.name}") + main_panel_title.stylize(Style(color="blue", bold=True)) + duration_subtitle = Text(f"⏲️ Total pipeline duration for {pipeline_name}: {format_duration(self.run_duration)}") + step_results_table = Table(title="Steps results") + step_results_table.add_column("Step") + step_results_table.add_column("Result") + step_results_table.add_column("Finished after") + + for step_result in self.steps_results: + step = Text(step_result.step.title) + step.stylize(step_result.status.get_rich_style()) + result = Text(step_result.status.value) + result.stylize(step_result.status.get_rich_style()) + + if step_result.status is StepStatus.SKIPPED: + step_results_table.add_row(step, result, "N/A") + else: + run_time = format_duration((step_result.created_at - step_result.step.started_at)) + step_results_table.add_row(step, result, run_time) + + to_render = [step_results_table] + if self.failed_steps: + sub_panels = [] + for failed_step in self.failed_steps: + errors = Text(failed_step.stderr) + panel_title = Text(f"{pipeline_name} {failed_step.step.title.lower()} failures") + panel_title.stylize(Style(color="red", bold=True)) + sub_panel = Panel(errors, title=panel_title) + sub_panels.append(sub_panel) + failures_group = Group(*sub_panels) + to_render.append(failures_group) + + if self.pipeline_context.dagger_cloud_url: + self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") + + main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) + console.print(main_panel) + diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py new file mode 100644 index 0000000000000..ed7294d2aef4d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py @@ -0,0 +1,585 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# +from __future__ import annotations + +from abc import abstractmethod +from dataclasses import dataclass +from datetime import timedelta +from pathlib import Path +from typing import List, Optional, Any, Enum + +import anyio +import asyncer +import dagger +import datetime +import logging +from dagger import DaggerError, Container +from pipelines.dagger.actions.python.poetry import with_poetry_module +import pipelines.dagger.actions.system.docker +from pipelines.dagger.containers.python import with_python_base +from pipelines.models.steps import Step, StepResult +from pipelines import main_logger +from pipelines.dagger.actions.python.pipx import with_installed_pipx_package +from pipelines.helpers.utils import format_duration, get_exec_result +from pipelines.models.contexts import PipelineContext + + + +from abc import ABC +from typing import ClassVar, List + +from rich.style import Style + +from dagger import CacheSharingMode, CacheVolume +from pipelines import hacks +from pipelines.dagger.actions import secrets +from pipelines.consts import AMAZONCORRETTO_IMAGE +from pipelines.helpers.utils import sh_dash_c + + +@dataclass +class MountPath: + path: Path + optional: bool = False + + def _cast_fields(self): + self.path = Path(self.path) + self.optional = bool(self.optional) + + def _check_exists(self): + if not self.path.exists(): + message = f"{self.path} does not exist." + if self.optional: + main_logger.warning(message) + else: + raise FileNotFoundError(message) + + def __post_init__(self): + self._cast_fields() + self._check_exists() + + def __str__(self): + return str(self.path) + + @property + def is_file(self) -> bool: + return self.path.is_file() + + +@dataclass(frozen=True) +class StepResult: + """A dataclass to capture the result of a step.""" + + step: Step + status: StepStatus + created_at: datetime = field(default_factory=datetime.utcnow) + stderr: Optional[str] = None + stdout: Optional[str] = None + output_artifact: Any = None + exc_info: Optional[Exception] = None + + def __repr__(self) -> str: # noqa D105 + return f"{self.step.title}: {self.status.value}" + + def __str__(self) -> str: # noqa D105 + return f"{self.step.title}: {self.status.value}\n\nSTDOUT:\n{self.stdout}\n\nSTDERR:\n{self.stderr}" + + def __post_init__(self): + if self.stderr: + super().__setattr__("stderr", self.redact_secrets_from_string(self.stderr)) + if self.stdout: + super().__setattr__("stdout", self.redact_secrets_from_string(self.stdout)) + + def redact_secrets_from_string(self, value: str) -> str: + for secret in self.step.context.secrets_to_mask: + value = value.replace(secret, "********") + return value + + +class StepStatus(Enum): + """An Enum to characterize the success, failure or skipping of a Step.""" + + SUCCESS = "Successful" + FAILURE = "Failed" + SKIPPED = "Skipped" + + def get_rich_style(self) -> Style: + """Match color used in the console output to the step status.""" + if self is StepStatus.SUCCESS: + return Style(color="green") + if self is StepStatus.FAILURE: + return Style(color="red", bold=True) + if self is StepStatus.SKIPPED: + return Style(color="yellow") + + def get_emoji(self) -> str: + """Match emoji used in the console output to the step status.""" + if self is StepStatus.SUCCESS: + return "✅" + if self is StepStatus.FAILURE: + return "❌" + if self is StepStatus.SKIPPED: + return "🟡" + + def __str__(self) -> str: # noqa D105 + return self.value + + +class Step(ABC): + """An abstract class to declare and run pipeline step.""" + + title: ClassVar[str] + max_retries: ClassVar[int] = 0 + max_dagger_error_retries: ClassVar[int] = 3 + should_log: ClassVar[bool] = True + success_exit_code: ClassVar[int] = 0 + skipped_exit_code: ClassVar[int] = None + # The max duration of a step run. If the step run for more than this duration it will be considered as timed out. + # The default of 5 hours is arbitrary and can be changed if needed. + max_duration: ClassVar[timedelta] = timedelta(hours=5) + + retry_delay = timedelta(seconds=10) + + def __init__(self, context: PipelineContext) -> None: # noqa D107 + self.context = context + self.retry_count = 0 + self.started_at = None + self.stopped_at = None + + @property + def run_duration(self) -> timedelta: + if self.started_at and self.stopped_at: + return self.stopped_at - self.started_at + else: + return timedelta(seconds=0) + + @property + def logger(self) -> logging.Logger: + if self.should_log: + return logging.getLogger(f"{self.context.pipeline_name} - {self.title}") + else: + disabled_logger = logging.getLogger() + disabled_logger.disabled = True + return disabled_logger + + @property + def dagger_client(self) -> Container: + return self.context.dagger_client.pipeline(self.title) + + async def log_progress(self, completion_event: anyio.Event) -> None: + """Log the step progress every 30 seconds until the step is done.""" + while not completion_event.is_set(): + duration = datetime.utcnow() - self.started_at + elapsed_seconds = duration.total_seconds() + if elapsed_seconds > 30 and round(elapsed_seconds) % 30 == 0: + self.logger.info(f"⏳ Still running... (duration: {format_duration(duration)})") + await anyio.sleep(1) + + async def run_with_completion(self, completion_event: anyio.Event, *args, **kwargs) -> StepResult: + """Run the step with a timeout and set the completion event when the step is done.""" + try: + with anyio.fail_after(self.max_duration.total_seconds()): + result = await self._run(*args, **kwargs) + completion_event.set() + return result + except TimeoutError: + self.retry_count = self.max_retries + 1 + self.logger.error(f"🚨 {self.title} timed out after {self.max_duration}. No additional retry will happen.") + completion_event.set() + return self._get_timed_out_step_result() + + @sentry_utils.with_step_context + async def run(self, *args, **kwargs) -> StepResult: + """Public method to run the step. It output a step result. + + If an unexpected dagger error happens it outputs a failed step result with the exception payload. + + Returns: + StepResult: The step result following the step run. + """ + self.logger.info(f"🚀 Start {self.title}") + self.started_at = datetime.utcnow() + completion_event = anyio.Event() + try: + async with asyncer.create_task_group() as task_group: + soon_result = task_group.soonify(self.run_with_completion)(completion_event, *args, **kwargs) + task_group.soonify(self.log_progress)(completion_event) + step_result = soon_result.value + except DaggerError as e: + self.logger.error("Step failed with an unexpected dagger error", exc_info=e) + step_result = StepResult(self, StepStatus.FAILURE, stderr=str(e), exc_info=e) + + self.stopped_at = datetime.utcnow() + self.log_step_result(step_result) + + lets_retry = self.should_retry(step_result) + step_result = await self.retry(step_result, *args, **kwargs) if lets_retry else step_result + return step_result + + def should_retry(self, step_result: StepResult) -> bool: + """Return True if the step should be retried.""" + if step_result.status is not StepStatus.FAILURE: + return False + max_retries = self.max_dagger_error_retries if step_result.exc_info else self.max_retries + return self.retry_count < max_retries and max_retries > 0 + + async def retry(self, step_result, *args, **kwargs) -> StepResult: + self.retry_count += 1 + self.logger.warn( + f"Failed with error: {step_result.stderr}.\nRetry #{self.retry_count} in {self.retry_delay.total_seconds()} seconds..." + ) + await anyio.sleep(self.retry_delay.total_seconds()) + return await self.run(*args, **kwargs) + + def log_step_result(self, result: StepResult) -> None: + """Log the step result. + + Args: + result (StepResult): The step result to log. + """ + duration = format_duration(self.run_duration) + if result.status is StepStatus.FAILURE: + self.logger.info(f"{result.status.get_emoji()} failed (duration: {duration})") + if result.status is StepStatus.SKIPPED: + self.logger.info(f"{result.status.get_emoji()} was skipped (duration: {duration})") + if result.status is StepStatus.SUCCESS: + self.logger.info(f"{result.status.get_emoji()} was successful (duration: {duration})") + + @abstractmethod + async def _run(self, *args, **kwargs) -> StepResult: + """Implement the execution of the step and return a step result. + + Returns: + StepResult: The result of the step run. + """ + raise NotImplementedError("Steps must define a '_run' attribute.") + + def skip(self, reason: str = None) -> StepResult: + """Declare a step as skipped. + + Args: + reason (str, optional): Reason why the step was skipped. + + Returns: + StepResult: A skipped step result. + """ + return StepResult(self, StepStatus.SKIPPED, stdout=reason) + + def get_step_status_from_exit_code( + self, + exit_code: int, + ) -> StepStatus: + """Map an exit code to a step status. + + Args: + exit_code (int): A process exit code. + + Raises: + ValueError: Raised if the exit code is not mapped to a step status. + + Returns: + StepStatus: The step status inferred from the exit code. + """ + if exit_code == self.success_exit_code: + return StepStatus.SUCCESS + elif self.skipped_exit_code is not None and exit_code == self.skipped_exit_code: + return StepStatus.SKIPPED + else: + return StepStatus.FAILURE + + async def get_step_result(self, container: Container) -> StepResult: + """Concurrent retrieval of exit code, stdout and stdout of a container. + + Create a StepResult object from these objects. + + Args: + container (Container): The container from which we want to infer a step result/ + + Returns: + StepResult: Failure or success with stdout and stderr. + """ + exit_code, stdout, stderr = await get_exec_result(container) + return StepResult( + self, + self.get_step_status_from_exit_code(exit_code), + stderr=stderr, + stdout=stdout, + output_artifact=container, + ) + + def _get_timed_out_step_result(self) -> StepResult: + return StepResult( + self, + StepStatus.FAILURE, + stdout=f"Timed out after the max duration of {format_duration(self.max_duration)}. Please checkout the Dagger logs to see what happened.", + ) + + +class NoOpStep(Step): + """A step that does nothing.""" + + title = "No Op" + should_log = False + + def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: + super().__init__(context) + self.step_status = step_status + + async def _run(self, *args, **kwargs) -> StepResult: + return StepResult(self, self.step_status) + + +class SimpleDockerStep(Step): + def __init__( + self, + title: str, + context: PipelineContext, + paths_to_mount: List[MountPath] = [], + internal_tools: List[MountPath] = [], + secrets: dict[str, dagger.Secret] = {}, + env_variables: dict[str, str] = {}, + working_directory: str = "/", + command: Optional[List[str]] = None, + ): + """A simple step that runs a given command in a container. + + Args: + title (str): name of the step + context (PipelineContext): context of the step + paths_to_mount (List[MountPath], optional): directory paths to mount. Defaults to []. + internal_tools (List[MountPath], optional): internal tools to install. Defaults to []. + secrets (dict[str, dagger.Secret], optional): secrets to add to container. Defaults to {}. + env_variables (dict[str, str], optional): env variables to set in container. Defaults to {}. + working_directory (str, optional): working directory to run the command in. Defaults to "/". + command (Optional[List[str]], optional): The default command to run. Defaults to None. + """ + self.title = title + super().__init__(context) + + self.paths_to_mount = paths_to_mount + self.working_directory = working_directory + self.internal_tools = internal_tools + self.secrets = secrets + self.env_variables = env_variables + self.command = command + + def _mount_paths(self, container: dagger.Container) -> dagger.Container: + for path_to_mount in self.paths_to_mount: + if path_to_mount.optional and not path_to_mount.path.exists(): + continue + + path_string = str(path_to_mount) + destination_path = f"/{path_string}" + if path_to_mount.is_file: + file_to_load = self.context.get_repo_file(path_string) + container = container.with_mounted_file(destination_path, file_to_load) + else: + container = container.with_mounted_directory(destination_path, self.context.get_repo_dir(path_string)) + return container + + async def _install_internal_tools(self, container: dagger.Container) -> dagger.Container: + for internal_tool in self.internal_tools: + container = await with_installed_pipx_package(self.context, container, str(internal_tool)) + return container + + def _set_workdir(self, container: dagger.Container) -> dagger.Container: + return container.with_workdir(self.working_directory) + + def _set_env_variables(self, container: dagger.Container) -> dagger.Container: + for key, value in self.env_variables.items(): + container = container.with_env_variable(key, value) + return container + + def _set_secrets(self, container: dagger.Container) -> dagger.Container: + for key, value in self.secrets.items(): + container = container.with_secret_variable(key, value) + return container + + async def init_container(self) -> dagger.Container: + # TODO (ben): Replace with python base container when available + container = with_python_base(self.context) + + container = self._mount_paths(container) + container = self._set_env_variables(container) + container = self._set_secrets(container) + container = await self._install_internal_tools(container) + container = self._set_workdir(container) + + return container + + async def _run(self, command=None) -> StepResult: + command_to_run = command or self.command + if not command_to_run: + raise ValueError(f"No command given to the {self.title} step") + + container_to_run = await self.init_container() + return await self.get_step_result(container_to_run.with_exec(command_to_run)) + +class PoetryRunStep(Step): + def __init__(self, context: PipelineContext, title: str, parent_dir_path: str, module_path: str): + """A simple step that runs a given command inside a poetry project. + + Args: + context (PipelineContext): context of the step + title (str): name of the step + parent_dir_path (str): The path to the parent directory of the poetry project + module_path (str): The path to the poetry project + """ + self.title = title + super().__init__(context) + + parent_dir = self.context.get_repo_dir(parent_dir_path) + module_path = module_path + self.poetry_run_container = with_poetry_module(self.context, parent_dir, module_path).with_entrypoint(["poetry", "run"]) + + async def _run(self, poetry_run_args: list) -> StepResult: + poetry_run_exec = self.poetry_run_container.with_exec(poetry_run_args) + return await self.get_step_result(poetry_run_exec) + + +class GradleTask(Step, ABC): + """ + A step to run a Gradle task. + + Attributes: + title (str): The step title. + gradle_task_name (str): The Gradle task name to run. + bind_to_docker_host (bool): Whether to install the docker client and bind it to the host. + mount_connector_secrets (bool): Whether to mount connector secrets. + """ + + DEFAULT_GRADLE_TASK_OPTIONS = ("--no-daemon", "--scan", "--build-cache", "--console=plain") + + gradle_task_name: ClassVar[str] + bind_to_docker_host: ClassVar[bool] = False + mount_connector_secrets: ClassVar[bool] = False + + def __init__(self, context: PipelineContext) -> None: + super().__init__(context) + + @property + def connector_java_build_cache(self) -> CacheVolume: + # TODO: remove this once we finish the project to boost source-postgres CI performance. + # We should use a static gradle-cache volume name. + cache_volume_name = hacks.get_gradle_cache_volume_name(self.context, self.logger) + return self.context.dagger_client.cache_volume(cache_volume_name) + + @property + def build_include(self) -> List[str]: + """Retrieve the list of source code directory required to run a Java connector Gradle task. + + The list is different according to the connector type. + + Returns: + List[str]: List of directories or files to be mounted to the container to run a Java connector Gradle task. + """ + return [ + str(dependency_directory) + for dependency_directory in self.context.connector.get_local_dependency_paths(with_test_dependencies=True) + ] + + def _get_gradle_command(self, task: str) -> List[str]: + return sh_dash_c( + [ + # The gradle command is chained in between a couple of rsyncs which load from- and store to the cache volume. + "(rsync -a --stats /root/gradle-cache/ /root/.gradle || true)", + f"./gradlew {' '.join(self.DEFAULT_GRADLE_TASK_OPTIONS)} {task}", + "(rsync -a --stats /root/.gradle/ /root/gradle-cache || true)", + ] + ) + + async def _run(self) -> StepResult: + include = [ + ".root", + ".env", + "build.gradle", + "deps.toml", + "gradle.properties", + "gradle", + "gradlew", + "LICENSE_SHORT", + "settings.gradle", + "build.gradle", + "tools/gradle", + "spotbugs-exclude-filter-file.xml", + "buildSrc", + "tools/bin/build_image.sh", + "tools/lib/lib.sh", + "tools/gradle/codestyle", + "pyproject.toml", + "airbyte-cdk/java/airbyte-cdk/**", + ] + self.build_include + + yum_packages_to_install = [ + "docker", # required by :integrationTestJava. + "findutils", # gradle requires xargs, which is shipped in findutils. + "jq", # required by :airbyte-connector-test-harnesses:acceptance-test-harness to inspect docker images. + "npm", # required by :format. + "python3.11-pip", # required by :format. + "rsync", # required for gradle cache synchronization. + ] + + # Define a gradle container which will be cached and re-used for all tasks. + # We should do our best to cram any generic & expensive layers in here. + gradle_container = ( + self.dagger_client.container() + # Use a linux+jdk base image with long-term support, such as amazoncorretto. + .from_(AMAZONCORRETTO_IMAGE) + # Install a bunch of packages as early as possible. + .with_exec( + sh_dash_c( + [ + # Update first, but in the same .with_exec step as the package installation. + # Otherwise, we risk caching stale package URLs. + "yum update -y", + f"yum install -y {' '.join(yum_packages_to_install)}", + # Remove any dangly bits. + "yum clean all", + # Deliberately soft-remove docker, so that the `docker` CLI is unavailable by default. + # This is a defensive choice to enforce the expectation that, as a general rule, gradle tasks do not rely on docker. + "yum remove -y --noautoremove docker", # remove docker package but not its dependencies + "yum install -y --downloadonly docker", # have docker package in place for quick install + ] + ) + ) + # Set GRADLE_HOME and GRADLE_USER_HOME to the directory which will be rsync-ed with the gradle cache volume. + .with_env_variable("GRADLE_HOME", "/root/.gradle") + .with_env_variable("GRADLE_USER_HOME", "/root/.gradle") + # Set RUN_IN_AIRBYTE_CI to tell gradle how to configure its build cache. + # This is consumed by settings.gradle in the repo root. + .with_env_variable("RUN_IN_AIRBYTE_CI", "1") + # TODO: remove this once we finish the project to boost source-postgres CI performance. + .with_env_variable("CACHEBUSTER", hacks.get_cachebuster(self.context, self.logger)) + # Mount the gradle cache volume. + # We deliberately don't mount it at $GRADLE_HOME, instead we load it there and store it from there using rsync. + # This is because the volume is accessed concurrently by all GradleTask instances. + # Hence, why we synchronize the writes by setting the `sharing` parameter to LOCKED. + .with_mounted_cache("/root/gradle-cache", self.connector_java_build_cache, sharing=CacheSharingMode.LOCKED) + # Mount the parts of the repo which interest us in /airbyte. + .with_workdir("/airbyte") + .with_mounted_directory("/airbyte", self.context.get_repo_dir(".", include=include)) + .with_mounted_directory(str(self.context.connector.code_directory), await self.context.get_connector_dir()) + # Disable the Ryuk container because it needs privileged docker access that does not work: + .with_env_variable("TESTCONTAINERS_RYUK_DISABLED", "true") + # Run gradle once to populate the container's local maven repository. + # This step is useful also to serve as a basic sanity check and to warm the gradle cache. + # This will download gradle itself, a bunch of poms and jars, compile the gradle plugins, configure tasks, etc. + .with_exec(self._get_gradle_command(":airbyte-cdk:java:airbyte-cdk:publishSnapshotIfNeeded")) + ) + + # From this point on, we add layers which are task-dependent. + if self.mount_connector_secrets: + gradle_container = gradle_container.with_( + await secrets.mounted_connector_secrets(self.context, f"{self.context.connector.code_directory}/secrets") + ) + if self.bind_to_docker_host: + # If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container. + gradle_container = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container) + # This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed. + gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"]) + + # Run the gradle task that we actually care about. + connector_task = f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}" + gradle_container = gradle_container.with_exec(self._get_gradle_command(connector_task)) + return await self.get_step_result(gradle_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py new file mode 100644 index 0000000000000..058a15a7a9d45 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py @@ -0,0 +1,57 @@ +from pipelines.pipeline.connectors.builds.steps import run_connector_build_pipeline +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +@connectors.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") +@click.option( + "--use-host-gradle-dist-tar", + is_flag=True, + help="Use gradle distTar output from host for java connectors.", + default=False, + type=bool, +) +@click.pass_context +def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool: + """Runs a build pipeline for the selected connectors.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Build connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + use_local_cdk=ctx.obj.get("use_local_cdk"), + open_report_in_browser=ctx.obj.get("open_report_in_browser"), + use_host_gradle_dist_tar=use_host_gradle_dist_tar, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + if use_host_gradle_dist_tar and not ctx.obj["is_local"]: + raise Exception("flag --use-host-gradle-dist-tar requires --is-local") + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_build_pipeline, + "Build Pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + ) + + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py similarity index 83% rename from airbyte-ci/connectors/pipelines/pipelines/builds/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py index 68cce376d3e7d..430c5e2182175 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py @@ -9,11 +9,12 @@ import anyio from connector_ops.utils import ConnectorLanguage -from pipelines.bases import ConnectorReport, StepResult -from pipelines.builds import java_connectors, python_connectors -from pipelines.builds.common import LoadContainerToLocalDockerHost, StepStatus +from pipelines.models.bases import ConnectorReport, StepResult +from pipelines.pipeline.connectors.builds.steps import python_connectors +from pipelines.pipeline.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.contexts import ConnectorContext +from pipelines.pipeline.connectors.builds.steps import java_connectors +from pipelines.pipeline.connectors.context import ConnectorContext class NoBuildStepForLanguageError(Exception): diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/build_customization.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/builds/build_customization.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py similarity index 94% rename from airbyte-ci/connectors/pipelines/pipelines/builds/common.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py index 2299ddbb251ad..7f424fe1a6f71 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py @@ -7,10 +7,10 @@ import docker from dagger import Container, ExecError, Platform, QueryError -from pipelines.bases import Step, StepResult, StepStatus +from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.consts import BUILD_PLATFORMS -from pipelines.contexts import ConnectorContext -from pipelines.utils import export_container_to_tarball +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.helpers.utils import export_container_to_tarball class BuildConnectorImagesBase(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py similarity index 87% rename from airbyte-ci/connectors/pipelines/pipelines/builds/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py index 01266cd2100c8..7af361e035e41 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py @@ -5,12 +5,12 @@ from typing import List, Optional, Tuple, Union from dagger import Container, Directory, ExecError, File, Host, Platform, QueryError -from pipelines.actions import environments -from pipelines.bases import StepResult, StepStatus -from pipelines.builds.common import BuildConnectorImagesBase +from pipelines.dagger.containers import java +from pipelines.models.steps import StepResult, StepStatus +from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.contexts import ConnectorContext -from pipelines.gradle import GradleTask +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipelines.models.steps import GradleTask class BuildConnectorDistributionTar(GradleTask): @@ -46,7 +46,7 @@ async def _run(self, dist_dir: Directory) -> StepResult: return await super()._run(dist_tar) async def _build_connector(self, platform: Platform, dist_tar: File) -> Container: - return await environments.with_airbyte_java_connector(self.context, dist_tar, platform) + return await java.with_airbyte_java_connector(self.context, dist_tar, platform) async def run_connector_build(context: ConnectorContext) -> StepResult: diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py similarity index 81% rename from airbyte-ci/connectors/pipelines/pipelines/builds/normalization.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py index 3494086eee8c1..d238cb71a28fb 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py @@ -3,9 +3,9 @@ # from dagger import Platform -from pipelines.actions import environments -from pipelines.bases import Step, StepResult, StepStatus -from pipelines.contexts import ConnectorContext +from pipelines.dagger.actions.connector import normalization +from pipelines.models.steps import Step, StepResult, StepStatus +from pipelines.pipeline.connectors.context import ConnectorContext # TODO this class could be deleted @@ -28,7 +28,7 @@ def __init__(self, context: ConnectorContext, normalization_image: str, build_pl async def _run(self) -> StepResult: if self.use_dev_normalization: - build_normalization_container = environments.with_normalization(self.context, self.build_platform) + build_normalization_container = normalization.with_normalization(self.context, self.build_platform) else: build_normalization_container = self.context.dagger_client.container().from_(self.normalization_image) return StepResult(self, StepStatus.SUCCESS, output_artifact=build_normalization_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py similarity index 91% rename from airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py index 466e732ef907c..9b6e4cf51e1fc 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/builds/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py @@ -4,11 +4,12 @@ from dagger import Container, Platform -from pipelines.actions.environments import apply_python_development_overrides, with_python_connector_installed -from pipelines.bases import StepResult -from pipelines.builds import build_customization -from pipelines.builds.common import BuildConnectorImagesBase -from pipelines.contexts import ConnectorContext +from pipelines.dagger.actions.python.common import with_python_connector_installed +from pipelines.dagger.actions.python.common import apply_python_development_overrides +from pipelines.models.steps import StepResult +from pipelines.pipeline.connectors.builds.steps import build_customization +from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase +from pipelines.pipeline.connectors.context import ConnectorContext class BuildConnectorImages(BuildConnectorImagesBase): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py new file mode 100644 index 0000000000000..89b8fdc270a91 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py @@ -0,0 +1,60 @@ +from pipelines.pipeline.connectors.bump_version.pipeline import run_connector_version_bump_pipeline +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +@connectors.command(cls=DaggerPipelineCommand, help="Bump a connector version: update metadata.yaml and changelog.") +@click.argument("bump-type", type=click.Choice(["patch", "minor", "major"])) +@click.argument("pull-request-number", type=str) +@click.argument("changelog-entry", type=str) +@click.pass_context +def bump_version( + ctx: click.Context, + bump_type: str, + pull_request_number: str, + changelog_entry: str, +) -> bool: + """Bump a connector version: update metadata.yaml and changelog.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_version_bump_pipeline, + "Version bump pipeline pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + bump_type, + changelog_entry, + pull_request_number, + ) + + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py index 7aa500cc4bd37..4d522beb5e518 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/version_bump.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py @@ -1,62 +1,24 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# -import datetime from copy import deepcopy - -import semver from dagger import Container +import datetime +import semver from pipelines import consts -from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus -from pipelines.contexts import ConnectorContext - -from . import metadata_change_helpers - - -class BumpDockerImageTagInMetadata(Step): - title = "Upgrade the dockerImageTag to the latest version in metadata.yaml" - - def __init__( - self, - context: ConnectorContext, - repo_dir: Container, - new_version: str, - ): - super().__init__(context) - self.repo_dir = repo_dir - self.new_version = new_version - - @staticmethod - def get_metadata_with_bumped_version(previous_version: str, new_version: str, current_metadata: dict) -> dict: - updated_metadata = deepcopy(current_metadata) - updated_metadata["data"]["dockerImageTag"] = new_version - # Bump strict versions - if current_metadata["data"].get("registries", {}).get("cloud", {}).get("dockerImageTag") == previous_version: - updated_metadata["data"]["registries"]["cloud"]["dockerImageTag"] = new_version - return updated_metadata +from pipelines.helpers.connectors import metadata_change_helpers +from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.pipeline.connectors.context import ConnectorContext - async def _run(self) -> StepResult: - metadata_path = self.context.connector.metadata_file_path - current_metadata = await metadata_change_helpers.get_current_metadata(self.repo_dir, metadata_path) - current_version = metadata_change_helpers.get_current_version(current_metadata) - if current_version is None: - return StepResult( - self, - StepStatus.SKIPPED, - stdout="Can't retrieve the connector current version.", - output_artifact=self.repo_dir, - ) - updated_metadata = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata) - repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata( - self.repo_dir, metadata_path, updated_metadata - ) - return StepResult( - self, - StepStatus.SUCCESS, - stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}", - output_artifact=repo_dir_with_updated_metadata, - ) +def get_bumped_version(version: str, bump_type: str) -> str: + current_version = semver.VersionInfo.parse(version) + if bump_type == "patch": + new_version = current_version.bump_patch() + elif bump_type == "minor": + new_version = current_version.bump_minor() + elif bump_type == "major": + new_version = current_version.bump_major() + else: + raise ValueError(f"Unknown bump type: {bump_type}") + return str(new_version) class AddChangelogEntry(Step): @@ -118,17 +80,50 @@ def add_changelog_entry(self, og_doc_content) -> str: return "\n".join(lines) -def get_bumped_version(version: str, bump_type: str) -> str: - current_version = semver.VersionInfo.parse(version) - if bump_type == "patch": - new_version = current_version.bump_patch() - elif bump_type == "minor": - new_version = current_version.bump_minor() - elif bump_type == "major": - new_version = current_version.bump_major() - else: - raise ValueError(f"Unknown bump type: {bump_type}") - return str(new_version) +class BumpDockerImageTagInMetadata(Step): + title = "Upgrade the dockerImageTag to the latest version in metadata.yaml" + + def __init__( + self, + context: ConnectorContext, + repo_dir: Container, + new_version: str, + ): + super().__init__(context) + self.repo_dir = repo_dir + self.new_version = new_version + + @staticmethod + def get_metadata_with_bumped_version(previous_version: str, new_version: str, current_metadata: dict) -> dict: + updated_metadata = deepcopy(current_metadata) + updated_metadata["data"]["dockerImageTag"] = new_version + # Bump strict versions + if current_metadata["data"].get("registries", {}).get("cloud", {}).get("dockerImageTag") == previous_version: + updated_metadata["data"]["registries"]["cloud"]["dockerImageTag"] = new_version + return updated_metadata + + async def _run(self) -> StepResult: + metadata_path = self.context.connector.metadata_file_path + current_metadata = await metadata_change_helpers.get_current_metadata(self.repo_dir, metadata_path) + current_version = metadata_change_helpers.get_current_version(current_metadata) + if current_version is None: + return StepResult( + self, + StepStatus.SKIPPED, + stdout="Can't retrieve the connector current version.", + output_artifact=self.repo_dir, + ) + updated_metadata = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata) + repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata( + self.repo_dir, metadata_path, updated_metadata + ) + + return StepResult( + self, + StepStatus.SUCCESS, + stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}", + output_artifact=repo_dir_with_updated_metadata, + ) async def run_connector_version_bump_pipeline( diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py new file mode 100644 index 0000000000000..b7836a7dd3069 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -0,0 +1,204 @@ +import os +from pathlib import Path +from pipelines import main_logger +from pipelines.commands.groups.connectors import ALL_CONNECTORS + + +import click +from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo + + +from typing import List, Set, Tuple +from pipelines.helpers.connectors.modifed import get_modified_connectors + +from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import get_connector_modified_files + +ALL_CONNECTORS = get_all_connectors_in_repo() + + +def log_selected_connectors(selected_connectors_with_modified_files: List[ConnectorWithModifiedFiles]) -> None: + if selected_connectors_with_modified_files: + selected_connectors_names = [c.technical_name for c in selected_connectors_with_modified_files] + main_logger.info(f"Will run on the following {len(selected_connectors_names)} connectors: {', '.join(selected_connectors_names)}.") + else: + main_logger.info("No connectors to run.") + + +def get_selected_connectors_with_modified_files( + selected_names: Tuple[str], + selected_support_levels: Tuple[str], + selected_languages: Tuple[str], + modified: bool, + metadata_changes_only: bool, + metadata_query: str, + modified_files: Set[Path], + enable_dependency_scanning: bool = False, +) -> List[ConnectorWithModifiedFiles]: + """Get the connectors that match the selected criteria. + + Args: + selected_names (Tuple[str]): Selected connector names. + selected_support_levels (Tuple[str]): Selected connector support levels. + selected_languages (Tuple[str]): Selected connector languages. + modified (bool): Whether to select the modified connectors. + metadata_changes_only (bool): Whether to select only the connectors with metadata changes. + modified_files (Set[Path]): The modified files. + enable_dependency_scanning (bool): Whether to enable the dependency scanning. + Returns: + List[ConnectorWithModifiedFiles]: The connectors that match the selected criteria. + """ + + if metadata_changes_only and not modified: + main_logger.info("--metadata-changes-only overrides --modified") + modified = True + + selected_modified_connectors = ( + get_modified_connectors(modified_files, ALL_CONNECTORS, enable_dependency_scanning) if modified else set() + ) + selected_connectors_by_name = {c for c in ALL_CONNECTORS if c.technical_name in selected_names} + selected_connectors_by_support_level = {connector for connector in ALL_CONNECTORS if connector.support_level in selected_support_levels} + selected_connectors_by_language = {connector for connector in ALL_CONNECTORS if connector.language in selected_languages} + selected_connectors_by_query = ( + {connector for connector in ALL_CONNECTORS if connector.metadata_query_match(metadata_query)} if metadata_query else set() + ) + + non_empty_connector_sets = [ + connector_set + for connector_set in [ + selected_connectors_by_name, + selected_connectors_by_support_level, + selected_connectors_by_language, + selected_connectors_by_query, + selected_modified_connectors, + ] + if connector_set + ] + # The selected connectors are the intersection of the selected connectors by name, support_level, language, simpleeval query and modified. + selected_connectors = set.intersection(*non_empty_connector_sets) if non_empty_connector_sets else set() + + selected_connectors_with_modified_files = [] + for connector in selected_connectors: + connector_with_modified_files = ConnectorWithModifiedFiles( + technical_name=connector.technical_name, modified_files=get_connector_modified_files(connector, modified_files) + ) + if not metadata_changes_only: + selected_connectors_with_modified_files.append(connector_with_modified_files) + else: + if connector_with_modified_files.has_metadata_change: + selected_connectors_with_modified_files.append(connector_with_modified_files) + return selected_connectors_with_modified_files + + +def validate_environment(is_local: bool, use_remote_secrets: bool): + """Check if the required environment variables exist.""" + if is_local: + if not Path(".git").is_dir(): + raise click.UsageError("You need to run this command from the repository root.") + else: + required_env_vars_for_ci = [ + "GCP_GSM_CREDENTIALS", + "CI_REPORT_BUCKET_NAME", + "CI_GITHUB_ACCESS_TOKEN", + ] + for required_env_var in required_env_vars_for_ci: + if os.getenv(required_env_var) is None: + raise click.UsageError(f"When running in a CI context a {required_env_var} environment variable must be set.") + if use_remote_secrets and os.getenv("GCP_GSM_CREDENTIALS") is None: + raise click.UsageError( + "You have to set the GCP_GSM_CREDENTIALS if you want to download secrets from GSM. Set the --use-remote-secrets option to false otherwise." + ) + + +@click.group(help="Commands related to connectors and connector acceptance tests.") +@click.option("--use-remote-secrets", default=True) # specific to connectors +@click.option( + "--name", + "names", + multiple=True, + help="Only test a specific connector. Use its technical name. e.g source-pokeapi.", + type=click.Choice([c.technical_name for c in ALL_CONNECTORS]), +) +@click.option("--language", "languages", multiple=True, help="Filter connectors to test by language.", type=click.Choice(ConnectorLanguage)) +@click.option( + "--support-level", + "support_levels", + multiple=True, + help="Filter connectors to test by support_level.", + type=click.Choice(SupportLevelEnum), +) +@click.option("--modified/--not-modified", help="Only test modified connectors in the current branch.", default=False, type=bool) +@click.option( + "--metadata-changes-only/--not-metadata-changes-only", + help="Only test connectors with modified metadata files in the current branch.", + default=False, + type=bool, +) +@click.option( + "--metadata-query", + help="Filter connectors by metadata query using `simpleeval`. e.g. 'data.ab_internal.ql == 200'", + type=str, +) +@click.option("--concurrency", help="Number of connector tests pipeline to run in parallel.", default=5, type=int) +@click.option( + "--execute-timeout", + help="The maximum time in seconds for the execution of a Dagger request before an ExecuteTimeoutError is raised. Passing None results in waiting forever.", + default=None, + type=int, +) +@click.option( + "--enable-dependency-scanning/--disable-dependency-scanning", + help="When enabled, the dependency scanning will be performed to detect the connectors to test according to a dependency change.", + default=False, + type=bool, +) +@click.option( + "--use-local-cdk", + is_flag=True, + help=("Build with the airbyte-cdk from the local repository. " "This is useful for testing changes to the CDK."), + default=False, + type=bool, +) +@click.option( + "--enable-report-auto-open/--disable-report-auto-open", + is_flag=True, + help=("When enabled, finishes by opening a browser window to display an HTML report."), + default=True, + type=bool, +) +@click.pass_context +def connectors( + ctx: click.Context, + use_remote_secrets: bool, + names: Tuple[str], + languages: Tuple[ConnectorLanguage], + support_levels: Tuple[str], + modified: bool, + metadata_changes_only: bool, + metadata_query: str, + concurrency: int, + execute_timeout: int, + enable_dependency_scanning: bool, + use_local_cdk: bool, + enable_report_auto_open: bool, +): + """Group all the connectors-ci command.""" + validate_environment(ctx.obj["is_local"], use_remote_secrets) + + ctx.ensure_object(dict) + ctx.obj["use_remote_secrets"] = use_remote_secrets + ctx.obj["concurrency"] = concurrency + ctx.obj["execute_timeout"] = execute_timeout + ctx.obj["use_local_cdk"] = use_local_cdk + ctx.obj["open_report_in_browser"] = enable_report_auto_open + ctx.obj["selected_connectors_with_modified_files"] = get_selected_connectors_with_modified_files( + names, + support_levels, + languages, + modified, + metadata_changes_only, + metadata_query, + ctx.obj["modified_files"], + enable_dependency_scanning, + ) + log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py new file mode 100644 index 0000000000000..c01465a319ef4 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -0,0 +1,234 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""Module declaring context related classes.""" + +from datetime import datetime +from types import TracebackType +from typing import Optional + +import yaml +from anyio import Path +from asyncer import asyncify +from dagger import Directory +from github import PullRequest +from pipelines.dagger.actions import secrets +from pipelines.models.bases import ConnectorReport, ConnectorWithModifiedFiles +from pipelines.models.context import PipelineContext +from pipelines.helpers.github import update_commit_status_check +from pipelines.helpers.slack import send_message_to_webhook +from pipelines.helpers.utils import METADATA_FILE_NAME + + +class ConnectorContext(PipelineContext): + """The connector context is used to store configuration for a specific connector pipeline run.""" + + DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE = "airbyte/connector-acceptance-test:dev" + + def __init__( + self, + pipeline_name: str, + connector: ConnectorWithModifiedFiles, + is_local: bool, + git_branch: bool, + git_revision: bool, + report_output_prefix: str, + use_remote_secrets: bool = True, + ci_report_bucket: Optional[str] = None, + ci_gcs_credentials: Optional[str] = None, + ci_git_user: Optional[str] = None, + ci_github_access_token: Optional[str] = None, + connector_acceptance_test_image: Optional[str] = DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE, + gha_workflow_run_url: Optional[str] = None, + dagger_logs_url: Optional[str] = None, + pipeline_start_timestamp: Optional[int] = None, + ci_context: Optional[str] = None, + slack_webhook: Optional[str] = None, + reporting_slack_channel: Optional[str] = None, + pull_request: PullRequest = None, + should_save_report: bool = True, + fail_fast: bool = False, + fast_tests_only: bool = False, + code_tests_only: bool = False, + use_local_cdk: bool = False, + use_host_gradle_dist_tar: bool = False, + open_report_in_browser: bool = True, + docker_hub_username: Optional[str] = None, + docker_hub_password: Optional[str] = None, + ): + """Initialize a connector context. + + Args: + connector (Connector): The connector under test. + is_local (bool): Whether the context is for a local run or a CI run. + git_branch (str): The current git branch name. + git_revision (str): The current git revision, commit hash. + report_output_prefix (str): The S3 key to upload the test report to. + use_remote_secrets (bool, optional): Whether to download secrets for GSM or use the local secrets. Defaults to True. + connector_acceptance_test_image (Optional[str], optional): The image to use to run connector acceptance tests. Defaults to DEFAULT_CONNECTOR_ACCEPTANCE_TEST_IMAGE. + gha_workflow_run_url (Optional[str], optional): URL to the github action workflow run. Only valid for CI run. Defaults to None. + dagger_logs_url (Optional[str], optional): URL to the dagger logs. Only valid for CI run. Defaults to None. + pipeline_start_timestamp (Optional[int], optional): Timestamp at which the pipeline started. Defaults to None. + ci_context (Optional[str], optional): Pull requests, workflow dispatch or nightly build. Defaults to None. + slack_webhook (Optional[str], optional): The slack webhook to send messages to. Defaults to None. + reporting_slack_channel (Optional[str], optional): The slack channel to send messages to. Defaults to None. + pull_request (PullRequest, optional): The pull request object if the pipeline was triggered by a pull request. Defaults to None. + fail_fast (bool, optional): Whether to fail fast. Defaults to False. + fast_tests_only (bool, optional): Whether to run only fast tests. Defaults to False. + code_tests_only (bool, optional): Whether to ignore non-code tests like QA and metadata checks. Defaults to False. + use_host_gradle_dist_tar (bool, optional): Used when developing java connectors with gradle. Defaults to False. + open_report_in_browser (bool, optional): Open HTML report in browser window. Defaults to True. + docker_hub_username (Optional[str], optional): Docker Hub username to use to read registries. Defaults to None. + docker_hub_password (Optional[str], optional): Docker Hub password to use to read registries. Defaults to None. + """ + + self.pipeline_name = pipeline_name + self.connector = connector + self.use_remote_secrets = use_remote_secrets + self.connector_acceptance_test_image = connector_acceptance_test_image + self.report_output_prefix = report_output_prefix + self._secrets_dir = None + self._updated_secrets_dir = None + self.cdk_version = None + self.should_save_report = should_save_report + self.fail_fast = fail_fast + self.fast_tests_only = fast_tests_only + self.code_tests_only = code_tests_only + self.use_local_cdk = use_local_cdk + self.use_host_gradle_dist_tar = use_host_gradle_dist_tar + self.open_report_in_browser = open_report_in_browser + self.docker_hub_username = docker_hub_username + self.docker_hub_password = docker_hub_password + + super().__init__( + pipeline_name=pipeline_name, + is_local=is_local, + git_branch=git_branch, + git_revision=git_revision, + gha_workflow_run_url=gha_workflow_run_url, + dagger_logs_url=dagger_logs_url, + pipeline_start_timestamp=pipeline_start_timestamp, + ci_context=ci_context, + slack_webhook=slack_webhook, + reporting_slack_channel=reporting_slack_channel, + pull_request=pull_request, + ci_report_bucket=ci_report_bucket, + ci_gcs_credentials=ci_gcs_credentials, + ci_git_user=ci_git_user, + ci_github_access_token=ci_github_access_token, + open_report_in_browser=open_report_in_browser, + ) + + @property + def modified_files(self): + return self.connector.modified_files + + @property + def secrets_dir(self) -> Directory: # noqa D102 + return self._secrets_dir + + @secrets_dir.setter + def secrets_dir(self, secrets_dir: Directory): # noqa D102 + self._secrets_dir = secrets_dir + + @property + def updated_secrets_dir(self) -> Directory: # noqa D102 + return self._updated_secrets_dir + + @updated_secrets_dir.setter + def updated_secrets_dir(self, updated_secrets_dir: Directory): # noqa D102 + self._updated_secrets_dir = updated_secrets_dir + + @property + def connector_acceptance_test_source_dir(self) -> Directory: # noqa D102 + return self.get_repo_dir("airbyte-integrations/bases/connector-acceptance-test") + + @property + def should_save_updated_secrets(self) -> bool: # noqa D102 + return self.use_remote_secrets and self.updated_secrets_dir is not None + + @property + def host_image_export_dir_path(self) -> str: + return "." if self.is_ci else "/tmp" + + @property + def metadata_path(self) -> Path: + return self.connector.code_directory / METADATA_FILE_NAME + + @property + def metadata(self) -> dict: + return yaml.safe_load(self.metadata_path.read_text())["data"] + + @property + def docker_repository(self) -> str: + return self.metadata["dockerRepository"] + + @property + def docker_image_tag(self) -> str: + return self.metadata["dockerImageTag"] + + @property + def docker_image(self) -> str: + return f"{self.docker_repository}:{self.docker_image_tag}" + + async def get_connector_dir(self, exclude=None, include=None) -> Directory: + """Get the connector under test source code directory. + + Args: + exclude ([List[str], optional): List of files or directories to exclude from the directory. Defaults to None. + include ([List[str], optional): List of files or directories to include in the directory. Defaults to None. + + Returns: + Directory: The connector under test source code directory. + """ + vanilla_connector_dir = self.get_repo_dir(str(self.connector.code_directory), exclude=exclude, include=include) + return await vanilla_connector_dir.with_timestamps(1) + + async def __aexit__( + self, exception_type: Optional[type[BaseException]], exception_value: Optional[BaseException], traceback: Optional[TracebackType] + ) -> bool: + """Perform teardown operation for the ConnectorContext. + + On the context exit the following operations will happen: + - Upload updated connector secrets back to Google Secret Manager + - Write a test report in JSON format locally and to S3 if running in a CI environment + - Update the commit status check on GitHub if running in a CI environment. + It should gracefully handle the execution error that happens and always upload a test report and update commit status check. + Args: + exception_type (Optional[type[BaseException]]): The exception type if an exception was raised in the context execution, None otherwise. + exception_value (Optional[BaseException]): The exception value if an exception was raised in the context execution, None otherwise. + traceback (Optional[TracebackType]): The traceback if an exception was raised in the context execution, None otherwise. + Returns: + bool: Whether the teardown operation ran successfully. + """ + self.stopped_at = datetime.utcnow() + self.state = self.determine_final_state(self.report, exception_value) + if exception_value: + self.logger.error("An error got handled by the ConnectorContext", exc_info=True) + if self.report is None: + self.logger.error("No test report was provided. This is probably due to an upstream error") + self.report = ConnectorReport(self, []) + + if self.should_save_updated_secrets: + await secrets.upload(self) + + self.report.print() + + if self.should_save_report: + await self.report.save() + + if self.report.should_be_commented_on_pr: + self.report.post_comment_on_pr() + + await asyncify(update_commit_status_check)(**self.github_commit_status) + + if self.should_send_slack_message: + await asyncify(send_message_to_webhook)(self.create_slack_message(), self.reporting_slack_channel, self.slack_webhook) + + # Supress the exception if any + return True + + def create_slack_message(self) -> str: + raise NotImplementedError + diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py new file mode 100644 index 0000000000000..2e75d7142b2cf --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py @@ -0,0 +1,41 @@ +from pipelines.pipeline.connectors.commands import connectors +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import click +from connector_ops.utils import console +from rich.table import Table +from rich.text import Text + + +@connectors.command(cls=DaggerPipelineCommand, help="List all selected connectors.") +@click.pass_context +def list( + ctx: click.Context, +): + selected_connectors = sorted(ctx.obj["selected_connectors_with_modified_files"], key=lambda x: x.technical_name) + table = Table(title=f"{len(selected_connectors)} selected connectors") + table.add_column("Modified") + table.add_column("Connector") + table.add_column("Language") + table.add_column("Release stage") + table.add_column("Version") + table.add_column("Folder") + + for connector in selected_connectors: + modified = "X" if connector.modified_files else "" + connector_name = Text(connector.technical_name) + language = Text(connector.language.value) if connector.language else "N/A" + try: + support_level = Text(connector.support_level) + except Exception: + support_level = "N/A" + try: + version = Text(connector.version) + except Exception: + version = "N/A" + folder = Text(str(connector.code_directory)) + table.add_row(modified, connector_name, language, support_level, version, folder) + + console.print(table) + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py new file mode 100644 index 0000000000000..8cb33f5b07dac --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py @@ -0,0 +1,75 @@ +from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +@connectors.command( + cls=DaggerPipelineCommand, + help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", +) +@click.argument("pull-request-number", type=str) +@click.option( + "--docker-hub-username", + help="Your username to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_USERNAME", +) +@click.option( + "--docker-hub-password", + help="Your password to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_PASSWORD", +) +@click.pass_context +def migrate_to_base_image( + ctx: click.Context, + pull_request_number: str, + docker_hub_username: str, + docker_hub_password: str, +) -> bool: + """Bump a connector version: update metadata.yaml, changelog and delete legacy files.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_migration_to_base_image_pipeline, + "Migration to base image pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + pull_request_number, + ) + + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py similarity index 97% rename from airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index 30bccbac3d023..258a937867262 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/connector_changes/base_image_version_migration.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -10,11 +10,12 @@ from dagger import Directory from jinja2 import Template from pipelines import consts -from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus -from pipelines.connector_changes.version_bump import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version -from pipelines.contexts import ConnectorContext, PipelineContext +from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.pipeline.connectors.bump_version.pipeline import BumpDockerImageTagInMetadata +from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, get_bumped_version +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext -from . import metadata_change_helpers +from ....helpers.connectors import metadata_change_helpers class UpgradeBaseImageMetadata(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipelines/connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py similarity index 93% rename from airbyte-ci/connectors/pipelines/pipelines/pipelines/connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py index 43303bdf86124..eb933636f004f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipelines/connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py @@ -12,11 +12,11 @@ import dagger from connector_ops.utils import ConnectorLanguage from dagger import Config -from pipelines.actions import environments -from pipelines.bases import NoOpStep, Report, StepResult, StepStatus +from pipelines.dagger.actions.system import docker +from pipelines.models.bases import NoOpStep, Report, StepResult, StepStatus from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT -from pipelines.contexts import ConnectorContext, ContextState -from pipelines.utils import create_and_open_file +from pipelines.pipeline.connectors.context import ConnectorContext, ContextState +from pipelines.helpers.utils import create_and_open_file GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" @@ -88,7 +88,7 @@ async def run_connectors_pipelines( # HACK: This is to get a long running dockerd service to be shared across all the connectors pipelines # Using the "normal" service binding leads to restart of dockerd during pipeline run that can cause corrupted docker state # See https://github.com/airbytehq/airbyte/issues/27233 - dockerd_service = environments.with_global_dockerd_service(dagger_client) + dockerd_service = docker.with_global_dockerd_service(dagger_client) async with anyio.create_task_group() as tg_main: tg_main.start_soon(dockerd_service.sync) await ( # Wait for the docker service to be ready diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py new file mode 100644 index 0000000000000..677bbaebd7d0d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -0,0 +1,135 @@ +from pipelines import main_logger +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ContextState +from pipelines.pipeline.connectors.publish.context import PublishConnectorContext +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.publish import reorder_contexts, run_connector_publish_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +@connectors.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") +@click.option("--pre-release/--main-release", help="Use this flag if you want to publish pre-release images.", default=True, type=bool) +@click.option( + "--spec-cache-gcs-credentials", + help="The service account key to upload files to the GCS bucket hosting spec cache.", + type=click.STRING, + required=True, + envvar="SPEC_CACHE_GCS_CREDENTIALS", +) +@click.option( + "--spec-cache-bucket-name", + help="The name of the GCS bucket where specs will be cached.", + type=click.STRING, + required=True, + envvar="SPEC_CACHE_BUCKET_NAME", +) +@click.option( + "--metadata-service-gcs-credentials", + help="The service account key to upload files to the GCS bucket hosting the metadata files.", + type=click.STRING, + required=True, + envvar="METADATA_SERVICE_GCS_CREDENTIALS", +) +@click.option( + "--metadata-service-bucket-name", + help="The name of the GCS bucket where metadata files will be uploaded.", + type=click.STRING, + required=True, + envvar="METADATA_SERVICE_BUCKET_NAME", +) +@click.option( + "--docker-hub-username", + help="Your username to connect to DockerHub.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_USERNAME", +) +@click.option( + "--docker-hub-password", + help="Your password to connect to DockerHub.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_PASSWORD", +) +@click.option( + "--slack-webhook", + help="The Slack webhook URL to send notifications to.", + type=click.STRING, + envvar="SLACK_WEBHOOK", +) +@click.option( + "--slack-channel", + help="The Slack webhook URL to send notifications to.", + type=click.STRING, + envvar="SLACK_CHANNEL", + default="#connector-publish-updates", +) +@click.pass_context +def publish( + ctx: click.Context, + pre_release: bool, + spec_cache_gcs_credentials: str, + spec_cache_bucket_name: str, + metadata_service_bucket_name: str, + metadata_service_gcs_credentials: str, + docker_hub_username: str, + docker_hub_password: str, + slack_webhook: str, + slack_channel: str, +): + ctx.obj["spec_cache_gcs_credentials"] = spec_cache_gcs_credentials + ctx.obj["spec_cache_bucket_name"] = spec_cache_bucket_name + ctx.obj["metadata_service_bucket_name"] = metadata_service_bucket_name + ctx.obj["metadata_service_gcs_credentials"] = metadata_service_gcs_credentials + if ctx.obj["is_local"]: + click.confirm( + "Publishing from a local environment is not recommended and requires to be logged in Airbyte's DockerHub registry, do you want to continue?", + abort=True, + ) + + publish_connector_contexts = reorder_contexts( + [ + PublishConnectorContext( + connector=connector, + pre_release=pre_release, + spec_cache_gcs_credentials=spec_cache_gcs_credentials, + spec_cache_bucket_name=spec_cache_bucket_name, + metadata_service_gcs_credentials=metadata_service_gcs_credentials, + metadata_bucket_name=metadata_service_bucket_name, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + slack_webhook=slack_webhook, + reporting_slack_channel=slack_channel, + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + pull_request=ctx.obj.get("pull_request"), + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + ) + + main_logger.warn("Concurrency is forced to 1. For stability reasons we disable parallel publish pipelines.") + ctx.obj["concurrency"] = 1 + + publish_connector_contexts = anyio.run( + run_connectors_pipelines, + publish_connector_contexts, + run_connector_publish_pipeline, + "Publishing connectors", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + ) + return all(context.state is ContextState.SUCCESSFUL for context in publish_connector_contexts) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py new file mode 100644 index 0000000000000..9cd339b17f2d9 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -0,0 +1,120 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""Module declaring context related classes.""" + +from typing import Optional + +from dagger import Secret +from github import PullRequest +from pipelines.helpers.gcs import sanitize_gcs_credentials +from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.utils import format_duration +from pipelines.pipeline.connectors import ConnectorContext +from pipelines.models.contexts import ContextState + + +class PublishConnectorContext(ConnectorContext): + def __init__( + self, + connector: ConnectorWithModifiedFiles, + pre_release: bool, + spec_cache_gcs_credentials: str, + spec_cache_bucket_name: str, + metadata_service_gcs_credentials: str, + metadata_bucket_name: str, + docker_hub_username: str, + docker_hub_password: str, + slack_webhook: str, + reporting_slack_channel: str, + ci_report_bucket: str, + report_output_prefix: str, + is_local: bool, + git_branch: bool, + git_revision: bool, + gha_workflow_run_url: Optional[str] = None, + dagger_logs_url: Optional[str] = None, + pipeline_start_timestamp: Optional[int] = None, + ci_context: Optional[str] = None, + ci_gcs_credentials: str = None, + pull_request: PullRequest = None, + ): + self.pre_release = pre_release + self.spec_cache_bucket_name = spec_cache_bucket_name + self.metadata_bucket_name = metadata_bucket_name + self.spec_cache_gcs_credentials = sanitize_gcs_credentials(spec_cache_gcs_credentials) + self.metadata_service_gcs_credentials = sanitize_gcs_credentials(metadata_service_gcs_credentials) + pipeline_name = f"Publish {connector.technical_name}" + pipeline_name = pipeline_name + " (pre-release)" if pre_release else pipeline_name + + super().__init__( + pipeline_name=pipeline_name, + connector=connector, + report_output_prefix=report_output_prefix, + ci_report_bucket=ci_report_bucket, + is_local=is_local, + git_branch=git_branch, + git_revision=git_revision, + gha_workflow_run_url=gha_workflow_run_url, + dagger_logs_url=dagger_logs_url, + pipeline_start_timestamp=pipeline_start_timestamp, + ci_context=ci_context, + slack_webhook=slack_webhook, + reporting_slack_channel=reporting_slack_channel, + ci_gcs_credentials=ci_gcs_credentials, + should_save_report=True, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + ) + + @property + def docker_hub_username_secret(self) -> Secret: + return self.dagger_client.set_secret("docker_hub_username", self.docker_hub_username) + + @property + def docker_hub_password_secret(self) -> Secret: + return self.dagger_client.set_secret("docker_hub_password", self.docker_hub_password) + + @property + def metadata_service_gcs_credentials_secret(self) -> Secret: + return self.dagger_client.set_secret("metadata_service_gcs_credentials", self.metadata_service_gcs_credentials) + + @property + def spec_cache_gcs_credentials_secret(self) -> Secret: + return self.dagger_client.set_secret("spec_cache_gcs_credentials", self.spec_cache_gcs_credentials) + + @property + def docker_image_tag(self): + # get the docker image tag from the parent class + metadata_tag = super().docker_image_tag + if self.pre_release: + return f"{metadata_tag}-dev.{self.git_revision[:10]}" + else: + return metadata_tag + + def create_slack_message(self) -> str: + docker_hub_url = f"https://hub.docker.com/r/{self.connector.metadata['dockerRepository']}/tags" + message = f"*Publish <{docker_hub_url}|{self.docker_image}>*\n" + if self.is_ci: + message += f"🤖 <{self.gha_workflow_run_url}|GitHub Action workflow>\n" + else: + message += "🧑‍💻 Local run\n" + message += f"*Connector:* {self.connector.technical_name}\n" + message += f"*Version:* {self.connector.version}\n" + branch_url = f"https://github.com/airbytehq/airbyte/tree/{self.git_branch}" + message += f"*Branch:* <{branch_url}|{self.git_branch}>\n" + commit_url = f"https://github.com/airbytehq/airbyte/commit/{self.git_revision}" + message += f"*Commit:* <{commit_url}|{self.git_revision[:10]}>\n" + if self.state in [ContextState.INITIALIZED, ContextState.RUNNING]: + message += "🟠" + if self.state is ContextState.SUCCESSFUL: + message += "🟢" + if self.state in [ContextState.FAILURE, ContextState.ERROR]: + message += "🔴" + message += f" {self.state.value['description']}\n" + if self.state is ContextState.SUCCESSFUL: + message += f"⏲️ Run duration: {format_duration(self.report.run_duration)}\n" + if self.state is ContextState.FAILURE: + message += "\ncc. " # @dev-connector-ops + return message diff --git a/airbyte-ci/connectors/pipelines/pipelines/publish.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py similarity index 95% rename from airbyte-ci/connectors/pipelines/pipelines/publish.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py index 10cf62e575689..26daeeb1ee7be 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/publish.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py @@ -9,22 +9,27 @@ import anyio from airbyte_protocol.models.airbyte_protocol import ConnectorSpecification from dagger import Container, ExecError, File, ImageLayerCompression, QueryError -from pipelines import builds, consts -from pipelines.actions import environments -from pipelines.actions.remote_storage import upload_to_gcs -from pipelines.bases import ConnectorReport, Step, StepResult, StepStatus -from pipelines.contexts import PublishConnectorContext -from pipelines.pipelines import metadata +from pipelines import consts +from pipelines.dagger.actions.system import docker +from pipelines.dagger.actions.remote_storage import upload_to_gcs +from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.pipeline.connectors.builds import steps +from pipelines.pipeline.connectors.publish.context import PublishConnectorContext +from pipelines.pipeline.metadata.pipeline import MetadataValidation, MetadataUpload from pydantic import ValidationError +class InvalidSpecOutputError(Exception): + pass + + class CheckConnectorImageDoesNotExist(Step): title = "Check if the connector docker image does not exist on the registry." async def _run(self) -> StepResult: docker_repository, docker_tag = self.context.docker_image.split(":") crane_ls = ( - environments.with_crane( + docker.with_crane( self.context, ) .with_env_variable("CACHEBUSTER", str(uuid.uuid4())) @@ -86,7 +91,7 @@ async def check_if_image_only_has_gzip_layers(self) -> bool: We use crane to inspect the manifest of the image and check if it only has gzip layers. """ for platform in consts.BUILD_PLATFORMS: - inspect = environments.with_crane(self.context).with_exec( + inspect = docker.with_crane(self.context).with_exec( ["manifest", "--platform", f"{str(platform)}", f"docker.io/{self.context.docker_image}"] ) try: @@ -130,10 +135,6 @@ async def _run(self, attempt: int = 3) -> StepResult: return StepResult(self, status=StepStatus.FAILURE, stderr=str(e)) -class InvalidSpecOutputError(Exception): - pass - - class UploadSpecToCache(Step): title = "Upload connector spec to spec cache bucket" default_spec_file_name = "spec.json" @@ -203,6 +204,8 @@ async def _run(self, built_connector: Container) -> StepResult: return StepResult(self, status=StepStatus.SUCCESS, stdout="Uploaded connector spec to spec cache bucket.") +## Pipeline + async def run_connector_publish_pipeline(context: PublishConnectorContext, semaphore: anyio.Semaphore) -> ConnectorReport: """Run a publish pipeline for a single connector. @@ -217,7 +220,7 @@ async def run_connector_publish_pipeline(context: PublishConnectorContext, semap ConnectorReport: The reports holding publish results. """ - metadata_upload_step = metadata.MetadataUpload( + metadata_upload_step = MetadataUpload( context=context, metadata_service_gcs_credentials_secret=context.metadata_service_gcs_credentials_secret, docker_hub_username_secret=context.docker_hub_username_secret, @@ -238,7 +241,7 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: results = [] - metadata_validation_results = await metadata.MetadataValidation(context).run() + metadata_validation_results = await MetadataValidation(context).run() results.append(metadata_validation_results) # Exit early if the metadata file is invalid. @@ -266,7 +269,7 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport: if check_connector_image_results.status is not StepStatus.SUCCESS: return create_connector_report(results) - build_connector_results = await builds.run_connector_build(context) + build_connector_results = await steps.run_connector_build(context) results.append(build_connector_results) # Exit early if the connector image failed to build diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py new file mode 100644 index 0000000000000..051c70d5a3797 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -0,0 +1,107 @@ +from pipelines import main_logger +from pipelines.helpers.github import update_global_commit_status_check_for_tests +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ConnectorContext, ContextState +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +import sys + + +@connectors.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") +@click.option( + "--code-tests-only", + is_flag=True, + help=("Only execute code tests. " "Metadata checks, QA, and acceptance tests will be skipped."), + default=False, + type=bool, +) +@click.option( + "--fail-fast", + help="When enabled, tests will fail fast.", + default=False, + type=bool, + is_flag=True, +) +@click.option( + "--fast-tests-only", + help="When enabled, slow tests are skipped.", + default=False, + type=bool, + is_flag=True, +) +@click.pass_context +def test( + ctx: click.Context, + code_tests_only: bool, + fail_fast: bool, + fast_tests_only: bool, +) -> bool: + """Runs a test pipeline for the selected connectors. + + Args: + ctx (click.Context): The click context. + """ + if ctx.obj["is_ci"] and ctx.obj["pull_request"] and ctx.obj["pull_request"].draft: + main_logger.info("Skipping connectors tests for draft pull request.") + sys.exit(0) + + if ctx.obj["selected_connectors_with_modified_files"]: + update_global_commit_status_check_for_tests(ctx.obj, "pending") + else: + main_logger.warn("No connector were selected for testing.") + update_global_commit_status_check_for_tests(ctx.obj, "success") + return True + + connectors_tests_contexts = [ + ConnectorContext( + pipeline_name=f"Testing connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + pull_request=ctx.obj.get("pull_request"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + fail_fast=fail_fast, + fast_tests_only=fast_tests_only, + code_tests_only=code_tests_only, + use_local_cdk=ctx.obj.get("use_local_cdk"), + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + try: + anyio.run( + run_connectors_pipelines, + [connector_context for connector_context in connectors_tests_contexts], + run_connector_test_pipeline, + "Test Pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + ) + except Exception as e: + main_logger.error("An error occurred while running the test pipeline", exc_info=e) + update_global_commit_status_check_for_tests(ctx.obj, "failure") + return False + + @ctx.call_on_close + def send_commit_status_check() -> None: + if ctx.obj["is_ci"]: + global_success = all(connector_context.state is ContextState.SUCCESSFUL for connector_context in connectors_tests_contexts) + update_global_commit_status_check_for_tests(ctx.obj, "success" if global_success else "failure") + + # If we reach this point, it means that all the connectors have been tested so the pipeline did its job and can exit with success. + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/tests/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py index c8ef5020903fe..8fa15c0d8d0e5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py @@ -9,11 +9,12 @@ import anyio import asyncer from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.bases import ConnectorReport, StepResult -from pipelines.contexts import ConnectorContext -from pipelines.pipelines.metadata import MetadataValidation -from pipelines.tests import java_connectors, python_connectors -from pipelines.tests.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck +from pipelines.models.bases import ConnectorReport, StepResult +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.test.steps import java_connectors +from pipelines.pipeline.metadata.pipeline import MetadataValidation +from pipelines.pipeline.connectors.test.steps import python_connectors +from pipelines.pipeline.connectors.test.steps.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck LANGUAGE_MAPPING = { "run_all_tests": { diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/tests/common.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py index 253a9257434c8..f56fc880aa9e3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py @@ -16,9 +16,10 @@ from connector_ops.utils import Connector from dagger import Container, Directory, File from pipelines import hacks -from pipelines.actions import environments -from pipelines.bases import CIContext, Step, StepResult, StepStatus -from pipelines.utils import METADATA_FILE_NAME +from pipelines.dagger.actions import secrets +import pipelines.dagger.actions.internal_tools +from pipelines.models.bases import CIContext, Step, StepResult, StepStatus +from pipelines.helpers.utils import METADATA_FILE_NAME class VersionCheck(Step, ABC): @@ -143,7 +144,7 @@ async def _run(self) -> StepResult: Returns: StepResult: Failure or success of the QA checks with stdout and stderr. """ - connector_ops = await environments.with_connector_ops(self.context) + connector_ops = await pipelines.dagger.actions.internal_tools.with_connector_ops(self.context) include = [ str(self.context.connector.code_directory), str(self.context.connector.documentation_file_path), @@ -272,7 +273,7 @@ async def _build_connector_acceptance_test(self, connector_under_test_image_tar: .with_env_variable("CONNECTOR_UNDER_TEST_IMAGE_TAR_PATH", "/dagger_share/connector_under_test_image.tar") .with_workdir("/test_input") .with_mounted_directory("/test_input", test_input) - .with_(await environments.mounted_connector_secrets(self.context, "/test_input/secrets")) + .with_(await secrets.mounted_connector_secrets(self.context, "/test_input/secrets")) ) if "_EXPERIMENTAL_DAGGER_RUNNER_HOST" in os.environ: self.context.logger.info("Using experimental dagger runner host to run CAT with dagger-in-dagger") diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py similarity index 86% rename from airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py index d668c47314ce1..4505d9c2ee198 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py @@ -9,15 +9,15 @@ import anyio import asyncer from dagger import Directory, File, QueryError -from pipelines.actions import environments, secrets -from pipelines.bases import StepResult, StepStatus -from pipelines.builds.java_connectors import BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path -from pipelines.builds.normalization import BuildOrPullNormalization +from pipelines.dagger.actions.system import docker, secrets +from pipelines.models.steps import StepResult, StepStatus +from pipelines.pipeline.connectors.builds.steps.java_connectors import BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path +from pipelines.pipeline.connectors.builds.steps.normalization import BuildOrPullNormalization from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.contexts import ConnectorContext -from pipelines.gradle import GradleTask -from pipelines.tests.common import AcceptanceTests -from pipelines.utils import export_container_to_tarball +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipelines.models.steps import GradleTask +from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests +from pipelines.helpers.utils import export_container_to_tarball class IntegrationTests(GradleTask): @@ -31,13 +31,13 @@ class IntegrationTests(GradleTask): async def _load_normalization_image(self, normalization_tar_file: File): normalization_image_tag = f"{self.context.connector.normalization_repository}:dev" self.context.logger.info("Load the normalization image to the docker host.") - await environments.load_image_to_docker_host(self.context, normalization_tar_file, normalization_image_tag) + await docker.load_image_to_docker_host(self.context, normalization_tar_file, normalization_image_tag) self.context.logger.info("Successfully loaded the normalization image to the docker host.") async def _load_connector_image(self, connector_tar_file: File): connector_image_tag = f"airbyte/{self.context.connector.technical_name}:dev" self.context.logger.info("Load the connector image to the docker host") - await environments.load_image_to_docker_host(self.context, connector_tar_file, connector_image_tag) + await docker.load_image_to_docker_host(self.context, connector_tar_file, connector_image_tag) self.context.logger.info("Successfully loaded the connector image to the docker host.") async def _run(self, connector_tar_file: File, normalization_tar_file: Optional[File]) -> StepResult: diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py index a24eba9f1555a..4fa6b73c81686 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/tests/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py @@ -9,13 +9,16 @@ import asyncer from dagger import Container, File -from pipelines.actions import environments, secrets -from pipelines.bases import Step, StepResult, StepStatus -from pipelines.builds.python_connectors import BuildConnectorImages +from pipelines.actions import environments +import pipelines.dagger.actions.python.common +from pipelines.dagger.actions import secrets +import pipelines.dagger.actions.system.docker +from pipelines.models.steps import Step, StepResult, StepStatus +from pipelines.pipeline.connectors.builds.steps.python_connectors import BuildConnectorImages from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.contexts import ConnectorContext -from pipelines.tests.common import AcceptanceTests -from pipelines.utils import export_container_to_tarball +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests +from pipelines.helpers.utils import export_container_to_tarball class CodeFormatChecks(Step): @@ -41,7 +44,7 @@ async def _run(self) -> StepResult: Returns: StepResult: Failure or success of the code format checks with stdout and stderr. """ - connector_under_test = environments.with_python_connector_source(self.context) + connector_under_test = pipelines.dagger.actions.python.common.with_python_connector_source(self.context) formatter = ( connector_under_test.with_exec(["echo", "Running black"]) @@ -92,7 +95,7 @@ async def _run(self, connector_under_test: Container) -> StepResult: pytest_command = self.get_pytest_command(test_config_file_name) if self.bind_to_docker_host: - test_environment = environments.with_bound_docker_host(self.context, test_environment) + test_environment = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, test_environment) test_execution = test_environment.with_exec(pytest_command) @@ -161,11 +164,11 @@ async def install_testing_environment( Returns: Callable: The decorator to use with the with_ method of a container. """ - secret_mounting_function = await environments.mounted_connector_secrets(self.context, "secrets") + secret_mounting_function = await secrets.mounted_connector_secrets(self.context, "secrets") container_with_test_deps = ( # Install the connector python package in /test_environment with the extra dependencies - await environments.with_python_connector_installed( + await pipelines.dagger.actions.python.common.with_python_connector_installed( self.context, # Reset the entrypoint to run non airbyte commands built_connector_container.with_entrypoint([]), diff --git a/airbyte-ci/connectors/pipelines/pipelines/tests/templates/test_report.html.j2 b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/templates/test_report.html.j2 similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/tests/templates/test_report.html.j2 rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/templates/test_report.html.j2 diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py new file mode 100644 index 0000000000000..c80cd83897e2d --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py @@ -0,0 +1,67 @@ +from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline +from pipelines.pipeline.connectors.commands import connectors +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + + +import anyio +import click + + +@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") +@click.option("--set-if-not-exists", default=True) +@click.option( + "--docker-hub-username", + help="Your username to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_USERNAME", +) +@click.option( + "--docker-hub-password", + help="Your password to connect to DockerHub to read the registries.", + type=click.STRING, + required=True, + envvar="DOCKER_HUB_PASSWORD", +) +@click.pass_context +def upgrade_base_image(ctx: click.Context, set_if_not_exists: bool, docker_hub_username: str, docker_hub_password: str) -> bool: + """Upgrades the base image version used by the selected connectors.""" + + connectors_contexts = [ + ConnectorContext( + pipeline_name=f"Upgrade base image versions of connector {connector.technical_name}", + connector=connector, + is_local=ctx.obj["is_local"], + git_branch=ctx.obj["git_branch"], + git_revision=ctx.obj["git_revision"], + ci_report_bucket=ctx.obj["ci_report_bucket_name"], + report_output_prefix=ctx.obj["report_output_prefix"], + use_remote_secrets=ctx.obj["use_remote_secrets"], + gha_workflow_run_url=ctx.obj.get("gha_workflow_run_url"), + dagger_logs_url=ctx.obj.get("dagger_logs_url"), + pipeline_start_timestamp=ctx.obj.get("pipeline_start_timestamp"), + ci_context=ctx.obj.get("ci_context"), + ci_gcs_credentials=ctx.obj["ci_gcs_credentials"], + ci_git_user=ctx.obj["ci_git_user"], + ci_github_access_token=ctx.obj["ci_github_access_token"], + open_report_in_browser=False, + docker_hub_username=docker_hub_username, + docker_hub_password=docker_hub_password, + ) + for connector in ctx.obj["selected_connectors_with_modified_files"] + ] + + anyio.run( + run_connectors_pipelines, + connectors_contexts, + run_connector_base_image_upgrade_pipeline, + "Upgrade base image pipeline", + ctx.obj["concurrency"], + ctx.obj["dagger_logs_path"], + ctx.obj["execute_timeout"], + set_if_not_exists, + ) + + return True diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/metadata.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py similarity index 88% rename from airbyte-ci/connectors/pipelines/pipelines/commands/groups/metadata.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py index dcbfe4ba41af7..a27814fa3dc52 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/metadata.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py @@ -1,11 +1,10 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# +from pipelines.pipelines.metadata import run_metadata_orchestrator_deploy_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand + import anyio import click -from pipelines.pipelines.metadata import run_metadata_orchestrator_deploy_pipeline -from pipelines.utils import DaggerPipelineCommand + # MAIN GROUP @@ -16,9 +15,6 @@ def metadata(ctx: click.Context): pass -# DEPLOY GROUP - - @metadata.group(help="Commands related to deploying components of the metadata service.") @click.pass_context def deploy(ctx: click.Context): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipelines/metadata.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/pipelines/metadata.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index aab9bd69ccc9e..367afab001f90 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipelines/metadata.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -1,24 +1,24 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# import uuid from typing import Optional import dagger -from pipelines.actions.environments import with_pip_packages, with_python_base -from pipelines.bases import Report, Step, StepResult +from pipelines.dagger.actions.python.common import with_pip_packages +from pipelines.dagger.containers.python import with_python_base +from pipelines.models.bases import Report, Step, StepResult from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH -from pipelines.contexts import ConnectorContext, PipelineContext +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.helpers.steps import run_steps -from pipelines.steps.poetry_run_step import PoetryRunStep -from pipelines.steps.simple_docker_step import MountPath, SimpleDockerStep +from pipelines.models.steps import PoetryRunStep, MountPath, SimpleDockerStep +from pipelines.tools.internal import INTERNAL_TOOL_PATHS +from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable +from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.models.steps import MountPath, SimpleDockerStep from pipelines.tools.internal import INTERNAL_TOOL_PATHS -from pipelines.utils import DAGGER_CONFIG, get_secret_host_variable # STEPS - class MetadataValidation(SimpleDockerStep): def __init__(self, context: ConnectorContext): super().__init__( @@ -40,7 +40,6 @@ def __init__(self, context: ConnectorContext): ], ) - class MetadataUpload(SimpleDockerStep): # When the metadata service exits with this code, it means the metadata is valid but the upload was skipped because the metadata is already uploaded skipped_exit_code = 5 diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/git.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py similarity index 74% rename from airbyte-ci/connectors/pipelines/pipelines/git.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py index 01c1d180d4f7f..5f8dad9b7a821 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/git.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py @@ -2,11 +2,39 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dagger import Client, Directory, Secret -from pipelines.actions import environments -from pipelines.bases import Step, StepResult -from pipelines.github import AIRBYTE_GITHUB_REPO -from pipelines.utils import sh_dash_c +from dagger import Client, Container, Directory, Secret +from pipelines.models.steps import Step, StepResult +from pipelines.helpers.github import AIRBYTE_GITHUB_REPO +from pipelines.helpers.utils import sh_dash_c + + +def with_git(dagger_client, ci_git_user: str = "octavia") -> Container: + return ( + dagger_client.container() + .from_("alpine:latest") + .with_exec( + sh_dash_c( + [ + "apk update", + "apk add git tar wget", + f"git config --global user.email {ci_git_user}@users.noreply.github.com", + f"git config --global user.name {ci_git_user}", + "git config --global --add --bool push.autoSetupRemote true", + ] + ) + ) + .with_workdir("/ghcli") + .with_exec( + sh_dash_c( + [ + "wget https://github.com/cli/cli/releases/download/v2.30.0/gh_2.30.0_linux_amd64.tar.gz -O ghcli.tar.gz", + "tar --strip-components=1 -xf ghcli.tar.gz", + "rm ghcli.tar.gz", + "cp bin/gh /usr/local/bin/gh", + ] + ) + ) + ) class GitPushChanges(Step): @@ -51,7 +79,7 @@ async def _run( self, changed_directory: Directory, changed_directory_path: str, commit_message: str, skip_ci: bool = True ) -> StepResult: diff = ( - environments.with_git(self.dagger_client, self.context.ci_github_access_token_secret, self.ci_git_user) + with_git(self.dagger_client, self.context.ci_github_access_token_secret, self.ci_git_user) .with_secret_variable("AUTHENTICATED_REPO_URL", self.authenticated_repo_url) .with_mounted_directory("/airbyte", self.airbyte_repo) .with_workdir("/airbyte") @@ -105,7 +133,7 @@ def git_branch(self) -> str: async def _run(self, commit_message: str, skip_ci: bool = True) -> StepResult: push_empty_commit = ( - environments.with_git(self.dagger_client, self.ci_github_access_token_secret, self.ci_git_user) + with_git(self.dagger_client, self.ci_github_access_token_secret, self.ci_git_user) .with_secret_variable("AUTHENTICATED_REPO_URL", self.authenticated_repo_url) .with_mounted_directory("/airbyte", self.airbyte_repo) .with_workdir("/airbyte") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py new file mode 100644 index 0000000000000..3149b0511a64f --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py @@ -0,0 +1,23 @@ +from pipelines.pipeline.test.pipeline import run_test + + +import anyio +import click + + +@click.command() +@click.argument("poetry_package_path") +@click.option("--test-directory", default="tests", help="The directory containing the tests to run.") +def test( + poetry_package_path: str, + test_directory: str, +): + """Runs the tests for the given airbyte-ci package. + + Args: + poetry_package_path (str): Path to the poetry package to test, relative to airbyte-ci directory. + test_directory (str): The directory containing the tests to run. + """ + success = anyio.run(run_test, poetry_package_path, test_directory) + if not success: + click.Abort() diff --git a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/tests.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py similarity index 79% rename from airbyte-ci/connectors/pipelines/pipelines/commands/groups/tests.py rename to airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py index dd789fc7b6702..d5ac46aaccc03 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/commands/groups/tests.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py @@ -1,38 +1,13 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -""" -Module exposing the tests command to test airbyte-ci projects. -""" +from pipelines.consts import DOCKER_VERSION +from pipelines.helpers.utils import sh_dash_c -import logging -import os -import sys -import anyio -import click import dagger -from pipelines.consts import DOCKER_VERSION -from pipelines.utils import sh_dash_c -@click.command() -@click.argument("poetry_package_path") -@click.option("--test-directory", default="tests", help="The directory containing the tests to run.") -def test( - poetry_package_path: str, - test_directory: str, -): - """Runs the tests for the given airbyte-ci package. - - Args: - poetry_package_path (str): Path to the poetry package to test, relative to airbyte-ci directory. - test_directory (str): The directory containing the tests to run. - """ - success = anyio.run(run_test, poetry_package_path, test_directory) - if not success: - click.Abort() +import logging +import os +import sys async def run_test(poetry_package_path: str, test_directory: str) -> bool: diff --git a/airbyte-ci/connectors/pipelines/pipelines/steps/poetry_run_step.py b/airbyte-ci/connectors/pipelines/pipelines/steps/poetry_run_step.py deleted file mode 100644 index 8aa8b1925e33c..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/steps/poetry_run_step.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from pipelines.actions.environments import with_poetry_module -from pipelines.bases import Step, StepResult -from pipelines.contexts import PipelineContext - - -class PoetryRunStep(Step): - def __init__(self, context: PipelineContext, title: str, parent_dir_path: str, module_path: str): - """A simple step that runs a given command inside a poetry project. - - Args: - context (PipelineContext): context of the step - title (str): name of the step - parent_dir_path (str): The path to the parent directory of the poetry project - module_path (str): The path to the poetry project - """ - self.title = title - super().__init__(context) - - parent_dir = self.context.get_repo_dir(parent_dir_path) - module_path = module_path - self.poetry_run_container = with_poetry_module(self.context, parent_dir, module_path).with_entrypoint(["poetry", "run"]) - - async def _run(self, poetry_run_args: list) -> StepResult: - poetry_run_exec = self.poetry_run_container.with_exec(poetry_run_args) - return await self.get_step_result(poetry_run_exec) diff --git a/airbyte-ci/connectors/pipelines/pipelines/steps/simple_docker_step.py b/airbyte-ci/connectors/pipelines/pipelines/steps/simple_docker_step.py deleted file mode 100644 index 0cfcd114873bd..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/steps/simple_docker_step.py +++ /dev/null @@ -1,129 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from dataclasses import dataclass -from pathlib import Path -from typing import List, Optional - -import dagger -from pipelines import main_logger -from pipelines.actions.environments import with_installed_pipx_package, with_python_base -from pipelines.bases import Step, StepResult -from pipelines.contexts import PipelineContext - - -@dataclass -class MountPath: - path: Path - optional: bool = False - - def _cast_fields(self): - self.path = Path(self.path) - self.optional = bool(self.optional) - - def _check_exists(self): - if not self.path.exists(): - message = f"{self.path} does not exist." - if self.optional: - main_logger.warning(message) - else: - raise FileNotFoundError(message) - - def __post_init__(self): - self._cast_fields() - self._check_exists() - - def __str__(self): - return str(self.path) - - @property - def is_file(self) -> bool: - return self.path.is_file() - - -class SimpleDockerStep(Step): - def __init__( - self, - title: str, - context: PipelineContext, - paths_to_mount: List[MountPath] = [], - internal_tools: List[MountPath] = [], - secrets: dict[str, dagger.Secret] = {}, - env_variables: dict[str, str] = {}, - working_directory: str = "/", - command: Optional[List[str]] = None, - ): - """A simple step that runs a given command in a container. - - Args: - title (str): name of the step - context (PipelineContext): context of the step - paths_to_mount (List[MountPath], optional): directory paths to mount. Defaults to []. - internal_tools (List[MountPath], optional): internal tools to install. Defaults to []. - secrets (dict[str, dagger.Secret], optional): secrets to add to container. Defaults to {}. - env_variables (dict[str, str], optional): env variables to set in container. Defaults to {}. - working_directory (str, optional): working directory to run the command in. Defaults to "/". - command (Optional[List[str]], optional): The default command to run. Defaults to None. - """ - self.title = title - super().__init__(context) - - self.paths_to_mount = paths_to_mount - self.working_directory = working_directory - self.internal_tools = internal_tools - self.secrets = secrets - self.env_variables = env_variables - self.command = command - - def _mount_paths(self, container: dagger.Container) -> dagger.Container: - for path_to_mount in self.paths_to_mount: - if path_to_mount.optional and not path_to_mount.path.exists(): - continue - - path_string = str(path_to_mount) - destination_path = f"/{path_string}" - if path_to_mount.is_file: - file_to_load = self.context.get_repo_file(path_string) - container = container.with_mounted_file(destination_path, file_to_load) - else: - container = container.with_mounted_directory(destination_path, self.context.get_repo_dir(path_string)) - return container - - async def _install_internal_tools(self, container: dagger.Container) -> dagger.Container: - for internal_tool in self.internal_tools: - container = await with_installed_pipx_package(self.context, container, str(internal_tool)) - return container - - def _set_workdir(self, container: dagger.Container) -> dagger.Container: - return container.with_workdir(self.working_directory) - - def _set_env_variables(self, container: dagger.Container) -> dagger.Container: - for key, value in self.env_variables.items(): - container = container.with_env_variable(key, value) - return container - - def _set_secrets(self, container: dagger.Container) -> dagger.Container: - for key, value in self.secrets.items(): - container = container.with_secret_variable(key, value) - return container - - async def init_container(self) -> dagger.Container: - # TODO (ben): Replace with python base container when available - container = with_python_base(self.context) - - container = self._mount_paths(container) - container = self._set_env_variables(container) - container = self._set_secrets(container) - container = await self._install_internal_tools(container) - container = self._set_workdir(container) - - return container - - async def _run(self, command=None) -> StepResult: - command_to_run = command or self.command - if not command_to_run: - raise ValueError(f"No command given to the {self.title} step") - - container_to_run = await self.init_container() - return await self.get_step_result(container_to_run.with_exec(command_to_run)) diff --git a/airbyte-ci/connectors/pipelines/pipelines/utils.py b/airbyte-ci/connectors/pipelines/pipelines/utils.py deleted file mode 100644 index 80396266c73bf..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/utils.py +++ /dev/null @@ -1,640 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""This module groups util function used in pipelines.""" -from __future__ import annotations - -import contextlib -import datetime -import json -import os -import re -import sys -import unicodedata -from glob import glob -from io import TextIOWrapper -from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, FrozenSet, List, Optional, Set, Tuple, Union - -import anyio -import asyncer -import click -import git -from connector_ops.utils import get_changed_connectors -from dagger import Client, Config, Connection, Container, DaggerError, ExecError, File, ImageLayerCompression, QueryError, Secret -from google.cloud import storage -from google.oauth2 import service_account -from more_itertools import chunked -from pipelines import consts, main_logger, sentry_utils -from pipelines.consts import GCS_PUBLIC_DOMAIN - -if TYPE_CHECKING: - from connector_ops.utils import Connector - from github import PullRequest - from pipelines.contexts import ConnectorContext - -DAGGER_CONFIG = Config(log_output=sys.stderr) -AIRBYTE_REPO_URL = "https://github.com/airbytehq/airbyte.git" -METADATA_FILE_NAME = "metadata.yaml" -METADATA_ICON_FILE_NAME = "icon.svg" -DIFF_FILTER = "MADRT" # Modified, Added, Deleted, Renamed, Type changed -IGNORED_FILE_EXTENSIONS = [".md"] -STATIC_REPORT_PREFIX = "airbyte-ci" - - -# This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented -async def check_path_in_workdir(container: Container, path: str) -> bool: - """Check if a local path is mounted to the working directory of a container. - - Args: - container (Container): The container on which we want the check the path existence. - path (str): Directory or file path we want to check the existence in the container working directory. - - Returns: - bool: Whether the path exists in the container working directory. - """ - workdir = (await container.with_exec(["pwd"], skip_entrypoint=True).stdout()).strip() - mounts = await container.mounts() - if workdir in mounts: - expected_file_path = Path(workdir[1:]) / path - return expected_file_path.is_file() or expected_file_path.is_dir() - else: - return False - - -def secret_host_variable(client: Client, name: str, default: str = ""): - """Add a host environment variable as a secret in a container. - - Example: - container.with_(secret_host_variable(client, "MY_SECRET")) - - Args: - client (Client): The dagger client. - name (str): The name of the environment variable. The same name will be - used in the container, for the secret name and for the host variable. - default (str): The default value to use if the host variable is not set. Defaults to "". - - Returns: - Callable[[Container], Container]: A function that can be used in a `Container.with_()` method. - """ - - def _secret_host_variable(container: Container): - return container.with_secret_variable(name, get_secret_host_variable(client, name, default)) - - return _secret_host_variable - - -def get_secret_host_variable(client: Client, name: str, default: str = "") -> Secret: - """Creates a dagger.Secret from a host environment variable. - - Args: - client (Client): The dagger client. - name (str): The name of the environment variable. The same name will be used for the secret. - default (str): The default value to use if the host variable is not set. Defaults to "". - - Returns: - Secret: A dagger secret. - """ - return client.set_secret(name, os.environ.get(name, default)) - - -# This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented -async def get_file_contents(container: Container, path: str) -> Optional[str]: - """Retrieve a container file contents. - - Args: - container (Container): The container hosting the file you want to read. - path (str): Path, in the container, to the file you want to read. - - Returns: - Optional[str]: The file content if the file exists in the container, None otherwise. - """ - try: - return await container.file(path).contents() - except QueryError as e: - if "no such file or directory" not in str(e): - # this error could come from a network issue - raise - return None - - -@contextlib.contextmanager -def catch_exec_error_group(): - try: - yield - except anyio.ExceptionGroup as eg: - for e in eg.exceptions: - if isinstance(e, ExecError): - raise e - raise - - -async def get_container_output(container: Container) -> Tuple[str, str]: - """Retrieve both stdout and stderr of a container, concurrently. - - Args: - container (Container): The container to execute. - - Returns: - Tuple[str, str]: The stdout and stderr of the container, respectively. - """ - with catch_exec_error_group(): - async with asyncer.create_task_group() as task_group: - soon_stdout = task_group.soonify(container.stdout)() - soon_stderr = task_group.soonify(container.stderr)() - return soon_stdout.value, soon_stderr.value - - -async def get_exec_result(container: Container) -> Tuple[int, str, str]: - """Retrieve the exit_code along with stdout and stderr of a container by handling the ExecError. - - Note: It is preferrable to not worry about the exit code value and just capture - ExecError to handle errors. This is offered as a convenience when the exit code - value is actually needed. - - If the container has a file at /exit_code, the exit code will be read from it. - See hacks.never_fail_exec for more details. - - Args: - container (Container): The container to execute. - - Returns: - Tuple[int, str, str]: The exit_code, stdout and stderr of the container, respectively. - """ - try: - exit_code = 0 - in_file_exit_code = await get_file_contents(container, "/exit_code") - if in_file_exit_code: - exit_code = int(in_file_exit_code) - return exit_code, *(await get_container_output(container)) - except ExecError as e: - return e.exit_code, e.stdout, e.stderr - - -async def with_exit_code(container: Container) -> int: - """Read the container exit code. - - Args: - container (Container): The container from which you want to read the exit code. - - Returns: - int: The exit code. - """ - try: - await container - except ExecError as e: - return e.exit_code - return 0 - - -async def with_stderr(container: Container) -> str: - """Retrieve the stderr of a container even on execution error.""" - try: - return await container.stderr() - except ExecError as e: - return e.stderr - - -async def with_stdout(container: Container) -> str: - """Retrieve the stdout of a container even on execution error.""" - try: - return await container.stdout() - except ExecError as e: - return e.stdout - - -def get_current_git_branch() -> str: # noqa D103 - return git.Repo().active_branch.name - - -def get_current_git_revision() -> str: # noqa D103 - return git.Repo().head.object.hexsha - - -def get_current_epoch_time() -> int: # noqa D103 - return round(datetime.datetime.utcnow().timestamp()) - - -async def get_modified_files_in_branch_remote( - current_git_branch: str, current_git_revision: str, diffed_branch: str = "origin/master" -) -> Set[str]: - """Use git diff to spot the modified files on the remote branch.""" - async with Connection(DAGGER_CONFIG) as dagger_client: - modified_files = await ( - dagger_client.container() - .from_("alpine/git:latest") - .with_workdir("/repo") - .with_exec(["init"]) - .with_env_variable("CACHEBUSTER", current_git_revision) - .with_exec( - [ - "remote", - "add", - "--fetch", - "--track", - diffed_branch.split("/")[-1], - "--track", - current_git_branch, - "origin", - AIRBYTE_REPO_URL, - ] - ) - .with_exec(["checkout", "-t", f"origin/{current_git_branch}"]) - .with_exec(["diff", f"--diff-filter={DIFF_FILTER}", "--name-only", f"{diffed_branch}...{current_git_revision}"]) - .stdout() - ) - return set(modified_files.split("\n")) - - -def get_modified_files_in_branch_local(current_git_revision: str, diffed_branch: str = "master") -> Set[str]: - """Use git diff and git status to spot the modified files on the local branch.""" - airbyte_repo = git.Repo() - modified_files = airbyte_repo.git.diff( - f"--diff-filter={DIFF_FILTER}", "--name-only", f"{diffed_branch}...{current_git_revision}" - ).split("\n") - status_output = airbyte_repo.git.status("--porcelain") - for not_committed_change in status_output.split("\n"): - file_path = not_committed_change.strip().split(" ")[-1] - if file_path: - modified_files.append(file_path) - return set(modified_files) - - -def get_modified_files_in_branch(current_git_branch: str, current_git_revision: str, diffed_branch: str, is_local: bool = True) -> Set[str]: - """Retrieve the list of modified files on the branch.""" - if is_local: - return get_modified_files_in_branch_local(current_git_revision, diffed_branch) - else: - return anyio.run(get_modified_files_in_branch_remote, current_git_branch, current_git_revision, diffed_branch) - - -async def get_modified_files_in_commit_remote(current_git_branch: str, current_git_revision: str) -> Set[str]: - async with Connection(DAGGER_CONFIG) as dagger_client: - modified_files = await ( - dagger_client.container() - .from_("alpine/git:latest") - .with_workdir("/repo") - .with_exec(["init"]) - .with_env_variable("CACHEBUSTER", current_git_revision) - .with_exec( - [ - "remote", - "add", - "--fetch", - "--track", - current_git_branch, - "origin", - AIRBYTE_REPO_URL, - ] - ) - .with_exec(["checkout", "-t", f"origin/{current_git_branch}"]) - .with_exec(["diff-tree", "--no-commit-id", "--name-only", current_git_revision, "-r"]) - .stdout() - ) - return set(modified_files.split("\n")) - - -def get_modified_files_in_commit_local(current_git_revision: str) -> Set[str]: - airbyte_repo = git.Repo() - modified_files = airbyte_repo.git.diff_tree("--no-commit-id", "--name-only", current_git_revision, "-r").split("\n") - return set(modified_files) - - -def get_modified_files_in_commit(current_git_branch: str, current_git_revision: str, is_local: bool = True) -> Set[str]: - if is_local: - return get_modified_files_in_commit_local(current_git_revision) - else: - return anyio.run(get_modified_files_in_commit_remote, current_git_branch, current_git_revision) - - -def get_modified_files_in_pull_request(pull_request: PullRequest) -> List[str]: - """Retrieve the list of modified files in a pull request.""" - return [f.filename for f in pull_request.get_files()] - - -def get_last_commit_message() -> str: - """Retrieve the last commit message.""" - return git.Repo().head.commit.message - - -def _is_ignored_file(file_path: Union[str, Path]) -> bool: - """Check if the provided file has an ignored extension.""" - return Path(file_path).suffix in IGNORED_FILE_EXTENSIONS - - -def _find_modified_connectors( - file_path: Union[str, Path], all_connectors: Set[Connector], dependency_scanning: bool = True -) -> Set[Connector]: - """Find all connectors impacted by the file change.""" - modified_connectors = set() - - for connector in all_connectors: - if Path(file_path).is_relative_to(Path(connector.code_directory)): - main_logger.info(f"Adding connector '{connector}' due to connector file modification: {file_path}.") - modified_connectors.add(connector) - - if dependency_scanning: - for connector_dependency in connector.get_local_dependency_paths(): - if Path(file_path).is_relative_to(Path(connector_dependency)): - # Add the connector to the modified connectors - modified_connectors.add(connector) - main_logger.info(f"Adding connector '{connector}' due to dependency modification: '{file_path}'.") - return modified_connectors - - -def get_modified_connectors(modified_files: Set[Path], all_connectors: Set[Connector], dependency_scanning: bool) -> Set[Connector]: - """Create a mapping of modified connectors (key) and modified files (value). - If dependency scanning is enabled any modification to a dependency will trigger connector pipeline for all connectors that depend on it. - It currently works only for Java connectors . - It's especially useful to trigger tests of strict-encrypt variant when a change is made to the base connector. - Or to tests all jdbc connectors when a change is made to source-jdbc or base-java. - We'll consider extending the dependency resolution to Python connectors once we confirm that it's needed and feasible in term of scale. - """ - # Ignore files with certain extensions - modified_connectors = set() - for modified_file in modified_files: - if not _is_ignored_file(modified_file): - modified_connectors.update(_find_modified_connectors(modified_file, all_connectors, dependency_scanning)) - return modified_connectors - - -def get_connector_modified_files(connector: Connector, all_modified_files: Set[Path]) -> FrozenSet[Path]: - connector_modified_files = set() - for modified_file in all_modified_files: - modified_file_path = Path(modified_file) - if modified_file_path.is_relative_to(connector.code_directory): - connector_modified_files.add(modified_file) - return frozenset(connector_modified_files) - - -def get_modified_metadata_files(modified_files: Set[Union[str, Path]]) -> Set[Path]: - return { - Path(str(f)) - for f in modified_files - if str(f).endswith(METADATA_FILE_NAME) and str(f).startswith("airbyte-integrations/connectors") and "-scaffold-" not in str(f) - } - - -def get_expected_metadata_files(modified_files: Set[Union[str, Path]]) -> Set[Path]: - changed_connectors = get_changed_connectors(modified_files=modified_files) - return {changed_connector.metadata_file_path for changed_connector in changed_connectors} - - -def get_all_metadata_files() -> Set[Path]: - return { - Path(metadata_file) - for metadata_file in glob("airbyte-integrations/connectors/**/metadata.yaml", recursive=True) - if "-scaffold-" not in metadata_file - } - - -def slugify(value: Any, allow_unicode: bool = False): - """ - Taken from https://github.com/django/django/blob/master/django/utils/text.py. - - Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated - dashes to single dashes. Remove characters that aren't alphanumerics, - underscores, or hyphens. Convert to lowercase. Also strip leading and - trailing whitespace, dashes, and underscores. - """ - value = str(value) - if allow_unicode: - value = unicodedata.normalize("NFKC", value) - else: - value = unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii") - value = re.sub(r"[^\w\s-]", "", value.lower()) - return re.sub(r"[-\s]+", "-", value).strip("-_") - - -def key_value_text_to_dict(text: str) -> dict: - kv = {} - for line in text.split("\n"): - if "=" in line: - try: - k, v = line.split("=") - except ValueError: - continue - kv[k] = v - return kv - - -async def key_value_file_to_dict(file: File) -> dict: - return key_value_text_to_dict(await file.contents()) - - -async def get_dockerfile_labels(dockerfile: File) -> dict: - return {k.replace("LABEL ", ""): v for k, v in (await key_value_file_to_dict(dockerfile)).items() if k.startswith("LABEL")} - - -async def get_version_from_dockerfile(dockerfile: File) -> str: - dockerfile_labels = await get_dockerfile_labels(dockerfile) - try: - return dockerfile_labels["io.airbyte.version"] - except KeyError: - raise Exception("Could not get the version from the Dockerfile labels.") - - -def create_and_open_file(file_path: Path) -> TextIOWrapper: - """Create a file and open it for writing. - - Args: - file_path (Path): The path to the file to create. - - Returns: - File: The file object. - """ - file_path.parent.mkdir(parents=True, exist_ok=True) - file_path.touch() - return file_path.open("w") - - -class DaggerPipelineCommand(click.Command): - @sentry_utils.with_command_context - def invoke(self, ctx: click.Context) -> Any: - """Wrap parent invoke in a try catch suited to handle pipeline failures. - Args: - ctx (click.Context): The invocation context. - Raises: - e: Raise whatever exception that was caught. - Returns: - Any: The invocation return value. - """ - command_name = self.name - main_logger.info(f"Running Dagger Command {command_name}...") - main_logger.info( - "If you're running this command for the first time the Dagger engine image will be pulled, it can take a short minute..." - ) - ctx.obj["report_output_prefix"] = self.render_report_output_prefix(ctx) - dagger_logs_gcs_key = f"{ctx.obj['report_output_prefix']}/dagger-logs.txt" - try: - if not ctx.obj["show_dagger_logs"]: - dagger_log_dir = Path(f"{consts.LOCAL_REPORTS_PATH_ROOT}/{ctx.obj['report_output_prefix']}") - dagger_log_path = Path(f"{dagger_log_dir}/dagger.log").resolve() - ctx.obj["dagger_logs_path"] = dagger_log_path - main_logger.info(f"Saving dagger logs to: {dagger_log_path}") - if ctx.obj["is_ci"]: - ctx.obj["dagger_logs_url"] = f"{GCS_PUBLIC_DOMAIN}/{ctx.obj['ci_report_bucket_name']}/{dagger_logs_gcs_key}" - else: - ctx.obj["dagger_logs_url"] = None - else: - ctx.obj["dagger_logs_path"] = None - pipeline_success = super().invoke(ctx) - if not pipeline_success: - raise DaggerError(f"Dagger Command {command_name} failed.") - except DaggerError as e: - main_logger.error(f"Dagger Command {command_name} failed", exc_info=e) - sys.exit(1) - finally: - if ctx.obj.get("dagger_logs_path"): - if ctx.obj["is_local"]: - main_logger.info(f"Dagger logs saved to {ctx.obj['dagger_logs_path']}") - if ctx.obj["is_ci"]: - gcs_uri, public_url = upload_to_gcs( - ctx.obj["dagger_logs_path"], ctx.obj["ci_report_bucket_name"], dagger_logs_gcs_key, ctx.obj["ci_gcs_credentials"] - ) - main_logger.info(f"Dagger logs saved to {gcs_uri}. Public URL: {public_url}") - - @staticmethod - def render_report_output_prefix(ctx: click.Context) -> str: - """Render the report output prefix for any command in the Connector CLI. - - The goal is to standardize the output of all logs and reports generated by the CLI - related to a specific command, and to a specific CI context. - - Note: We cannot hoist this higher in the command hierarchy because only one level of - subcommands are available at the time the context is created. - """ - - git_branch = ctx.obj["git_branch"] - git_revision = ctx.obj["git_revision"] - pipeline_start_timestamp = ctx.obj["pipeline_start_timestamp"] - ci_context = ctx.obj["ci_context"] - ci_job_key = ctx.obj["ci_job_key"] if ctx.obj.get("ci_job_key") else ci_context - - sanitized_branch = slugify(git_branch.replace("/", "_")) - - # get the command name for the current context, if a group then prepend the parent command name - if ctx.command_path: - cmd_components = ctx.command_path.split(" ") - cmd_components[0] = STATIC_REPORT_PREFIX - cmd = "/".join(cmd_components) - else: - cmd = None - - path_values = [ - cmd, - ci_job_key, - sanitized_branch, - pipeline_start_timestamp, - git_revision, - ] - - # check all values are defined - if None in path_values: - raise ValueError(f"Missing value required to render the report output prefix: {path_values}") - - # join all values with a slash, and convert all values to string - return "/".join(map(str, path_values)) - - -async def execute_concurrently(steps: List[Callable], concurrency=5): - tasks = [] - # Asyncer does not have builtin semaphore, so control concurrency via chunks of steps - # Anyio has semaphores but does not have the soonify method which allow access to results via the value task attribute. - for chunk in chunked(steps, concurrency): - async with asyncer.create_task_group() as task_group: - tasks += [task_group.soonify(step)() for step in chunk] - return [task.value for task in tasks] - - -async def export_container_to_tarball( - context: ConnectorContext, container: Container, tar_file_name: Optional[str] = None -) -> Tuple[Optional[File], Optional[Path]]: - """Save the container image to the host filesystem as a tar archive. - - Exporting a container image as a tar archive allows user to have a dagger built container image available on their host filesystem. - They can load this tar file to their main docker host with 'docker load'. - This mechanism is also used to share dagger built containers with other steps like AcceptanceTest that have their own dockerd service. - We 'docker load' this tar file to AcceptanceTest's docker host to make sure the container under test image is available for testing. - - Returns: - Tuple[Optional[File], Optional[Path]]: A tuple with the file object holding the tar archive on the host and its path. - """ - if tar_file_name is None: - tar_file_name = f"{context.connector.technical_name}_{context.git_revision}.tar" - tar_file_name = slugify(tar_file_name) - local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}") - export_success = await container.export(str(local_path), forced_compression=ImageLayerCompression.Gzip) - if export_success: - exported_file = ( - context.dagger_client.host().directory(context.host_image_export_dir_path, include=[tar_file_name]).file(tar_file_name) - ) - return exported_file, local_path - else: - return None, None - - -def sanitize_gcs_credentials(raw_value: Optional[str]) -> Optional[str]: - """Try to parse the raw string input that should contain a json object with the GCS credentials. - It will raise an exception if the parsing fails and help us to fail fast on invalid credentials input. - - Args: - raw_value (str): A string representing a json object with the GCS credentials. - - Returns: - str: The raw value string if it was successfully parsed. - """ - if raw_value is None: - return None - return json.dumps(json.loads(raw_value)) - - -def format_duration(time_delta: datetime.timedelta) -> str: - total_seconds = time_delta.total_seconds() - if total_seconds < 60: - return "{:.2f}s".format(total_seconds) - minutes = int(total_seconds // 60) - seconds = int(total_seconds % 60) - return "{:02d}mn{:02d}s".format(minutes, seconds) - - -def upload_to_gcs(file_path: Path, bucket_name: str, object_name: str, credentials: str) -> Tuple[str, str]: - """Upload a file to a GCS bucket. - - Args: - file_path (Path): The path to the file to upload. - bucket_name (str): The name of the GCS bucket. - object_name (str): The name of the object in the GCS bucket. - credentials (str): The GCS credentials as a JSON string. - """ - # Exit early if file does not exist - if not file_path.exists(): - main_logger.warning(f"File {file_path} does not exist. Skipping upload to GCS.") - return "", "" - - credentials = service_account.Credentials.from_service_account_info(json.loads(credentials)) - client = storage.Client(credentials=credentials) - bucket = client.get_bucket(bucket_name) - blob = bucket.blob(object_name) - blob.upload_from_filename(str(file_path)) - gcs_uri = f"gs://{bucket_name}/{object_name}" - public_url = f"{GCS_PUBLIC_DOMAIN}/{bucket_name}/{object_name}" - return gcs_uri, public_url - - -def sh_dash_c(lines: List[str]) -> List[str]: - """Wrap sequence of commands in shell for safe usage of dagger Container's with_exec method.""" - return ["sh", "-c", " && ".join(["set -o xtrace"] + lines)] - - -def transform_strs_to_paths(str_paths: List[str]) -> List[Path]: - """Transform a list of string paths to a list of Path objects. - - Args: - str_paths (List[str]): A list of string paths. - - Returns: - List[Path]: A list of Path objects. - """ - return [Path(str_path) for str_path in str_paths] diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index e5dcf19d5060e..5f04b4316cfac 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -37,4 +37,4 @@ pytest-cov = "^4.1.0" [tool.poetry.scripts] airbyte-ci-internal = "pipelines.commands.airbyte_ci:airbyte_ci" -airbyte-ci = "pipelines.dagger_run:main" +airbyte-ci = "pipelines.cli.dagger_run:main" diff --git a/airbyte-ci/connectors/pipelines/tests/conftest.py b/airbyte-ci/connectors/pipelines/tests/conftest.py index 2e35e8e37bab9..318e05aa5f866 100644 --- a/airbyte-ci/connectors/pipelines/tests/conftest.py +++ b/airbyte-ci/connectors/pipelines/tests/conftest.py @@ -13,7 +13,7 @@ import pytest import requests from connector_ops.utils import Connector -from pipelines import utils +from pipelines.helpers import utils from tests.utils import ALL_CONNECTORS diff --git a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py index 8794019cd5c78..5999c707fd03d 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py +++ b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py @@ -3,8 +3,8 @@ # import pytest -from pipelines.actions import environments -from pipelines.contexts import ConnectorContext +from pipelines.dagger.actions.python import common +from pipelines.pipeline.connectors.context import ConnectorContext pytestmark = [ pytest.mark.anyio, @@ -33,7 +33,7 @@ async def test_apply_python_development_overrides(connector_context, use_local_c before_override_pip_freeze = await fake_connector_container.with_exec(["pip", "freeze"]).stdout() assert "airbyte-cdk" not in before_override_pip_freeze.splitlines(), "The base image should not have the airbyte-cdk installed." - connector_with_overrides = await environments.apply_python_development_overrides(connector_context, fake_connector_container) + connector_with_overrides = await common.apply_python_development_overrides(connector_context, fake_connector_container) after_override_pip_freeze = await connector_with_overrides.with_exec(["pip", "freeze"]).stdout() if use_local_cdk: diff --git a/airbyte-ci/connectors/pipelines/tests/test_bases.py b/airbyte-ci/connectors/pipelines/tests/test_bases.py index e1f1ebee2f733..870723e408df7 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_bases.py +++ b/airbyte-ci/connectors/pipelines/tests/test_bases.py @@ -7,7 +7,7 @@ import anyio import pytest from dagger import DaggerError -from pipelines import bases +from pipelines.models import bases pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index d8b042491fb30..3358083f8213c 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -5,9 +5,10 @@ from pathlib import Path import pytest -from pipelines.bases import StepStatus -from pipelines.builds import build_customization, python_connectors -from pipelines.contexts import ConnectorContext +from pipelines.models.bases import StepStatus +from pipelines.pipeline.connectors.builds.steps import python_connectors +from pipelines.pipeline.connectors.builds.steps import build_customization +from pipelines.pipeline.connectors.context import ConnectorContext pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index c7c0b6aa819bf..10e5f58336ec4 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -7,8 +7,13 @@ import pytest from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.bases import ConnectorWithModifiedFiles +from pipelines.models.bases import ConnectorWithModifiedFiles from pipelines.commands.groups import connectors +import pipelines.pipeline.connectors.builds.commands +import pipelines.pipeline.connectors.commands +import pipelines.pipeline.connectors.format.commands +import pipelines.pipeline.connectors.publish.commands +import pipelines.pipeline.connectors.test.commands from tests.utils import pick_a_random_connector @@ -19,7 +24,7 @@ def runner(): def test_get_selected_connectors_by_name_no_file_modification(): connector = pick_a_random_connector() - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(), selected_languages=(), @@ -36,7 +41,7 @@ def test_get_selected_connectors_by_name_no_file_modification(): def test_get_selected_connectors_by_support_level_no_file_modification(): - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=["certified"], selected_languages=(), @@ -50,7 +55,7 @@ def test_get_selected_connectors_by_support_level_no_file_modification(): def test_get_selected_connectors_by_language_no_file_modification(): - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(ConnectorLanguage.LOW_CODE,), @@ -66,7 +71,7 @@ def test_get_selected_connectors_by_language_no_file_modification(): def test_get_selected_connectors_by_name_with_file_modification(): connector = pick_a_random_connector() modified_files = {connector.code_directory / "setup.py"} - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(), selected_languages=(), @@ -85,7 +90,7 @@ def test_get_selected_connectors_by_name_with_file_modification(): def test_get_selected_connectors_by_name_and_support_level_or_languages_leads_to_intersection(): connector = pick_a_random_connector() modified_files = {connector.code_directory / "setup.py"} - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(connector.support_level,), selected_languages=(connector.language,), @@ -102,7 +107,7 @@ def test_get_selected_connectors_with_modified(): first_modified_connector = pick_a_random_connector() second_modified_connector = pick_a_random_connector(other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -119,7 +124,7 @@ def test_get_selected_connectors_with_modified_and_language(): first_modified_connector = pick_a_random_connector(language=ConnectorLanguage.PYTHON) second_modified_connector = pick_a_random_connector(language=ConnectorLanguage.JAVA, other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(ConnectorLanguage.JAVA,), @@ -137,7 +142,7 @@ def test_get_selected_connectors_with_modified_and_support_level(): first_modified_connector = pick_a_random_connector(support_level="community") second_modified_connector = pick_a_random_connector(support_level="certified", other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=["certified"], selected_languages=(), @@ -159,7 +164,7 @@ def test_get_selected_connectors_with_modified_and_metadata_only(): second_modified_connector.code_directory / METADATA_FILE_NAME, second_modified_connector.code_directory / "setup.py", } - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -185,7 +190,7 @@ def test_get_selected_connectors_with_metadata_only(): second_modified_connector.code_directory / METADATA_FILE_NAME, second_modified_connector.code_directory / "setup.py", } - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -206,7 +211,7 @@ def test_get_selected_connectors_with_metadata_only(): def test_get_selected_connectors_with_metadata_query(): connector = pick_a_random_connector() metadata_query = f"data.dockerRepository == '{connector.metadata['dockerRepository']}'" - selected_connectors = connectors.get_selected_connectors_with_modified_files( + selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -248,9 +253,9 @@ def click_context_obj(): @pytest.mark.parametrize( "command, command_args", [ - (connectors.test, []), + (pipelines.pipeline.connectors.test.commands.test, []), ( - connectors.publish, + pipelines.pipeline.connectors.publish.commands.publish, [ "--spec-cache-gcs-credentials", "test", @@ -266,7 +271,7 @@ def click_context_obj(): "test", ], ), - (connectors.build, []), + (pipelines.pipeline.connectors.builds.commands.build, []), ], ) def test_commands_do_not_override_connector_selection( diff --git a/airbyte-ci/connectors/pipelines/tests/test_gradle.py b/airbyte-ci/connectors/pipelines/tests/test_gradle.py index e45027c860dc9..1fbbf2e1ce637 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_gradle.py +++ b/airbyte-ci/connectors/pipelines/tests/test_gradle.py @@ -5,7 +5,8 @@ from pathlib import Path import pytest -from pipelines import bases, gradle +from pipelines import gradle +from pipelines.models import bases pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 9bcf38a9bfcdb..7dd1ee3d9bcfa 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -9,7 +9,8 @@ import anyio import pytest from pipelines import publish -from pipelines.bases import StepStatus +from pipelines.models.bases import StepStatus +import pipelines.pipeline.metadata.pipeline pytestmark = [ pytest.mark.anyio, @@ -149,7 +150,7 @@ def test_parse_spec_output_no_spec(self, publish_context): (publish, "UploadSpecToCache"), (publish, "PushConnectorImageToRegistry"), (publish, "PullConnectorImageFromRegistry"), - (publish.builds, "run_connector_build"), + (publish.steps, "run_connector_build"), ] @@ -159,7 +160,7 @@ async def test_run_connector_publish_pipeline_when_failed_validation(mocker, pre for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = publish.metadata.MetadataValidation.return_value.run + run_metadata_validation = pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.FAILURE) context = mocker.MagicMock(pre_release=pre_release) @@ -195,7 +196,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = publish.metadata.MetadataValidation.return_value.run + run_metadata_validation = pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) # ensure spec always succeeds @@ -266,7 +267,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( """We check that the full pipeline is executed as expected when the connector image does not exist and the metadata validation passed.""" for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - publish.metadata.MetadataValidation.return_value.run.return_value = mocker.Mock( + pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( name="metadata_validation_result", status=StepStatus.SUCCESS ) publish.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( @@ -277,7 +278,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( built_connector_platform = mocker.Mock() built_connector_platform.values.return_value = ["linux/amd64"] - publish.builds.run_connector_build.return_value = mocker.Mock( + publish.steps.run_connector_build.return_value = mocker.Mock( name="build_connector_for_publish_result", status=build_step_status, output_artifact=built_connector_platform ) @@ -303,9 +304,9 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( report = await publish.run_connector_publish_pipeline(context, semaphore) steps_to_run = [ - publish.metadata.MetadataValidation.return_value.run, + pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run, publish.CheckConnectorImageDoesNotExist.return_value.run, - publish.builds.run_connector_build, + publish.steps.run_connector_build, publish.PushConnectorImageToRegistry.return_value.run, publish.PullConnectorImageFromRegistry.return_value.run, ] diff --git a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py index 83f3929037d9e..857ca596b51d6 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py +++ b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py @@ -5,9 +5,9 @@ from pathlib import Path import pytest -from pipelines.contexts import PipelineContext -from pipelines.steps.simple_docker_step import MountPath, SimpleDockerStep -from pipelines.utils import get_exec_result +from pipelines.models.contexts import PipelineContext +from pipelines.models.steps import MountPath, SimpleDockerStep +from pipelines.helpers.utils import get_exec_result pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index b167c094ffa92..2d9ce079c9ded 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -11,8 +11,8 @@ import pytest import yaml from freezegun import freeze_time -from pipelines.bases import ConnectorWithModifiedFiles, StepStatus -from pipelines.tests import common +from pipelines.models.bases import ConnectorWithModifiedFiles, StepStatus +from pipelines.pipeline.connectors.test.steps import common pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index cd9b3c7d77306..3d2c25cf0512d 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -3,10 +3,10 @@ # import pytest from connector_ops.utils import Connector -from pipelines.bases import StepResult -from pipelines.builds.python_connectors import BuildConnectorImages -from pipelines.contexts import ConnectorContext -from pipelines.tests.python_connectors import UnitTests +from pipelines.models.steps import StepResult +from pipelines.pipeline.connectors.builds.steps.python_connectors import BuildConnectorImages +from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.test.steps.python_connectors import UnitTests pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_utils.py index b540ca8d845c2..e58c164857068 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_utils.py @@ -7,7 +7,8 @@ import pytest from connector_ops.utils import Connector, ConnectorLanguage -from pipelines import utils +from pipelines.helpers import utils +import pipelines.helpers.git from tests.utils import pick_a_random_connector @@ -135,7 +136,7 @@ def test_get_modified_connectors_with_dependency_scanning(all_connectors, enable ) modified_files.append(modified_java_connector.code_directory / "foo.bar") - modified_connectors = utils.get_modified_connectors(modified_files, all_connectors, enable_dependency_scanning) + modified_connectors = pipelines.helpers.git.get_modified_connectors(modified_files, all_connectors, enable_dependency_scanning) if enable_dependency_scanning: assert not_modified_java_connector in modified_connectors else: @@ -152,7 +153,7 @@ def test_get_connector_modified_files(): other_connector.code_directory / "README.md", } - result = utils.get_connector_modified_files(connector, all_modified_files) + result = pipelines.helpers.git.get_connector_modified_files(connector, all_modified_files) assert result == frozenset({connector.code_directory / "setup.py"}) @@ -164,7 +165,7 @@ def test_no_modified_files_in_connector_directory(): other_connector.code_directory / "README.md", } - result = utils.get_connector_modified_files(connector, all_modified_files) + result = pipelines.helpers.git.get_connector_modified_files(connector, all_modified_files) assert result == frozenset() From d2b8802875cf5aab64bc03fb8da2c0e49c8ebdeb Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 15:47:45 -0500 Subject: [PATCH 02/38] fix script paths. when running install --force, you might have to delete old binaries to get this to work --- airbyte-ci/connectors/pipelines/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 5f04b4316cfac..4c018c04e4461 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -36,5 +36,5 @@ freezegun = "^1.2.2" pytest-cov = "^4.1.0" [tool.poetry.scripts] -airbyte-ci-internal = "pipelines.commands.airbyte_ci:airbyte_ci" +airbyte-ci-internal = "pipelines.cli.airbyte_ci:airbyte_ci" airbyte-ci = "pipelines.cli.dagger_run:main" From d72aeb98ad84ac90f7fc2519462b207176342356 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 16:05:07 -0500 Subject: [PATCH 03/38] add bases back, fix some imports --- .../pipelines/pipelines/helpers/gcs.py | 2 +- .../pipelines/pipelines/models/bases.py | 472 ++++++++++++++++++ .../pipelines/pipelines/models/contexts.py | 14 +- .../pipelines/pipelines/models/reports.py | 7 +- .../pipelines/pipelines/models/steps.py | 3 +- 5 files changed, 493 insertions(+), 5 deletions(-) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/bases.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py index 510cdbcec28e0..fb936f177b8c7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py @@ -2,7 +2,7 @@ from pipelines.consts import GCS_PUBLIC_DOMAIN -from google.cloud.storage import storage +from google.cloud import storage from google.oauth2 import service_account diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py new file mode 100644 index 0000000000000..72d311cafc483 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py @@ -0,0 +1,472 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +"""This module declare base / abstract models to be reused in a pipeline lifecycle.""" + +from __future__ import annotations + +import json +import logging +import webbrowser +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import datetime, timedelta +from enum import Enum +from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Set + +import anyio +import asyncer +from anyio import Path +from connector_ops.utils import Connector, console +from dagger import Container, DaggerError +from jinja2 import Environment, PackageLoader, select_autoescape +from pipelines import sentry_utils +from pipelines.consts import GCS_PUBLIC_DOMAIN +from pipelines.helpers.utils import METADATA_FILE_NAME, format_duration, get_exec_result +from rich.console import Group +from rich.panel import Panel +from rich.style import Style +from rich.table import Table +from rich.text import Text +from tabulate import tabulate + +from pipelines.models.reports import Report + +if TYPE_CHECKING: + from pipelines.models.contexts import PipelineContext + + +@dataclass(frozen=True) +class ConnectorWithModifiedFiles(Connector): + modified_files: Set[Path] = field(default_factory=frozenset) + + @property + def has_metadata_change(self) -> bool: + return any(path.name == METADATA_FILE_NAME for path in self.modified_files) + + +class CIContext(str, Enum): + """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" + + MANUAL = "manual" + PULL_REQUEST = "pull_request" + NIGHTLY_BUILDS = "nightly_builds" + MASTER = "master" + + def __str__(self) -> str: + return self.value + + +class StepStatus(Enum): + """An Enum to characterize the success, failure or skipping of a Step.""" + + SUCCESS = "Successful" + FAILURE = "Failed" + SKIPPED = "Skipped" + + def get_rich_style(self) -> Style: + """Match color used in the console output to the step status.""" + if self is StepStatus.SUCCESS: + return Style(color="green") + if self is StepStatus.FAILURE: + return Style(color="red", bold=True) + if self is StepStatus.SKIPPED: + return Style(color="yellow") + + def get_emoji(self) -> str: + """Match emoji used in the console output to the step status.""" + if self is StepStatus.SUCCESS: + return "✅" + if self is StepStatus.FAILURE: + return "❌" + if self is StepStatus.SKIPPED: + return "🟡" + + def __str__(self) -> str: # noqa D105 + return self.value + + +class Step(ABC): + """An abstract class to declare and run pipeline step.""" + + title: ClassVar[str] + max_retries: ClassVar[int] = 0 + max_dagger_error_retries: ClassVar[int] = 3 + should_log: ClassVar[bool] = True + success_exit_code: ClassVar[int] = 0 + skipped_exit_code: ClassVar[int] = None + # The max duration of a step run. If the step run for more than this duration it will be considered as timed out. + # The default of 5 hours is arbitrary and can be changed if needed. + max_duration: ClassVar[timedelta] = timedelta(hours=5) + + retry_delay = timedelta(seconds=10) + + def __init__(self, context: PipelineContext) -> None: # noqa D107 + self.context = context + self.retry_count = 0 + self.started_at = None + self.stopped_at = None + + @property + def run_duration(self) -> timedelta: + if self.started_at and self.stopped_at: + return self.stopped_at - self.started_at + else: + return timedelta(seconds=0) + + @property + def logger(self) -> logging.Logger: + if self.should_log: + return logging.getLogger(f"{self.context.pipeline_name} - {self.title}") + else: + disabled_logger = logging.getLogger() + disabled_logger.disabled = True + return disabled_logger + + @property + def dagger_client(self) -> Container: + return self.context.dagger_client.pipeline(self.title) + + async def log_progress(self, completion_event: anyio.Event) -> None: + """Log the step progress every 30 seconds until the step is done.""" + while not completion_event.is_set(): + duration = datetime.utcnow() - self.started_at + elapsed_seconds = duration.total_seconds() + if elapsed_seconds > 30 and round(elapsed_seconds) % 30 == 0: + self.logger.info(f"⏳ Still running... (duration: {format_duration(duration)})") + await anyio.sleep(1) + + async def run_with_completion(self, completion_event: anyio.Event, *args, **kwargs) -> StepResult: + """Run the step with a timeout and set the completion event when the step is done.""" + try: + with anyio.fail_after(self.max_duration.total_seconds()): + result = await self._run(*args, **kwargs) + completion_event.set() + return result + except TimeoutError: + self.retry_count = self.max_retries + 1 + self.logger.error(f"🚨 {self.title} timed out after {self.max_duration}. No additional retry will happen.") + completion_event.set() + return self._get_timed_out_step_result() + + @sentry_utils.with_step_context + async def run(self, *args, **kwargs) -> StepResult: + """Public method to run the step. It output a step result. + + If an unexpected dagger error happens it outputs a failed step result with the exception payload. + + Returns: + StepResult: The step result following the step run. + """ + self.logger.info(f"🚀 Start {self.title}") + self.started_at = datetime.utcnow() + completion_event = anyio.Event() + try: + async with asyncer.create_task_group() as task_group: + soon_result = task_group.soonify(self.run_with_completion)(completion_event, *args, **kwargs) + task_group.soonify(self.log_progress)(completion_event) + step_result = soon_result.value + except DaggerError as e: + self.logger.error("Step failed with an unexpected dagger error", exc_info=e) + step_result = StepResult(self, StepStatus.FAILURE, stderr=str(e), exc_info=e) + + self.stopped_at = datetime.utcnow() + self.log_step_result(step_result) + + lets_retry = self.should_retry(step_result) + step_result = await self.retry(step_result, *args, **kwargs) if lets_retry else step_result + return step_result + + def should_retry(self, step_result: StepResult) -> bool: + """Return True if the step should be retried.""" + if step_result.status is not StepStatus.FAILURE: + return False + max_retries = self.max_dagger_error_retries if step_result.exc_info else self.max_retries + return self.retry_count < max_retries and max_retries > 0 + + async def retry(self, step_result, *args, **kwargs) -> StepResult: + self.retry_count += 1 + self.logger.warn( + f"Failed with error: {step_result.stderr}.\nRetry #{self.retry_count} in {self.retry_delay.total_seconds()} seconds..." + ) + await anyio.sleep(self.retry_delay.total_seconds()) + return await self.run(*args, **kwargs) + + def log_step_result(self, result: StepResult) -> None: + """Log the step result. + + Args: + result (StepResult): The step result to log. + """ + duration = format_duration(self.run_duration) + if result.status is StepStatus.FAILURE: + self.logger.info(f"{result.status.get_emoji()} failed (duration: {duration})") + if result.status is StepStatus.SKIPPED: + self.logger.info(f"{result.status.get_emoji()} was skipped (duration: {duration})") + if result.status is StepStatus.SUCCESS: + self.logger.info(f"{result.status.get_emoji()} was successful (duration: {duration})") + + @abstractmethod + async def _run(self, *args, **kwargs) -> StepResult: + """Implement the execution of the step and return a step result. + + Returns: + StepResult: The result of the step run. + """ + raise NotImplementedError("Steps must define a '_run' attribute.") + + def skip(self, reason: str = None) -> StepResult: + """Declare a step as skipped. + + Args: + reason (str, optional): Reason why the step was skipped. + + Returns: + StepResult: A skipped step result. + """ + return StepResult(self, StepStatus.SKIPPED, stdout=reason) + + def get_step_status_from_exit_code( + self, + exit_code: int, + ) -> StepStatus: + """Map an exit code to a step status. + + Args: + exit_code (int): A process exit code. + + Raises: + ValueError: Raised if the exit code is not mapped to a step status. + + Returns: + StepStatus: The step status inferred from the exit code. + """ + if exit_code == self.success_exit_code: + return StepStatus.SUCCESS + elif self.skipped_exit_code is not None and exit_code == self.skipped_exit_code: + return StepStatus.SKIPPED + else: + return StepStatus.FAILURE + + async def get_step_result(self, container: Container) -> StepResult: + """Concurrent retrieval of exit code, stdout and stdout of a container. + + Create a StepResult object from these objects. + + Args: + container (Container): The container from which we want to infer a step result/ + + Returns: + StepResult: Failure or success with stdout and stderr. + """ + exit_code, stdout, stderr = await get_exec_result(container) + return StepResult( + self, + self.get_step_status_from_exit_code(exit_code), + stderr=stderr, + stdout=stdout, + output_artifact=container, + ) + + def _get_timed_out_step_result(self) -> StepResult: + return StepResult( + self, + StepStatus.FAILURE, + stdout=f"Timed out after the max duration of {format_duration(self.max_duration)}. Please checkout the Dagger logs to see what happened.", + ) + + +class NoOpStep(Step): + """A step that does nothing.""" + + title = "No Op" + should_log = False + + def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: + super().__init__(context) + self.step_status = step_status + + async def _run(self, *args, **kwargs) -> StepResult: + return StepResult(self, self.step_status) + + +@dataclass(frozen=True) +class StepResult: + """A dataclass to capture the result of a step.""" + + step: Step + status: StepStatus + created_at: datetime = field(default_factory=datetime.utcnow) + stderr: Optional[str] = None + stdout: Optional[str] = None + output_artifact: Any = None + exc_info: Optional[Exception] = None + + def __repr__(self) -> str: # noqa D105 + return f"{self.step.title}: {self.status.value}" + + def __str__(self) -> str: # noqa D105 + return f"{self.step.title}: {self.status.value}\n\nSTDOUT:\n{self.stdout}\n\nSTDERR:\n{self.stderr}" + + def __post_init__(self): + if self.stderr: + super().__setattr__("stderr", self.redact_secrets_from_string(self.stderr)) + if self.stdout: + super().__setattr__("stdout", self.redact_secrets_from_string(self.stdout)) + + def redact_secrets_from_string(self, value: str) -> str: + for secret in self.step.context.secrets_to_mask: + value = value.replace(secret, "********") + return value + + +@dataclass(frozen=True) +class ConnectorReport(Report): + """A dataclass to build connector test reports to share pipelines executions results with the user.""" + + @property + def report_output_prefix(self) -> str: # noqa D102 + return f"{self.pipeline_context.report_output_prefix}/{self.pipeline_context.connector.technical_name}/{self.pipeline_context.connector.version}" + + @property + def html_report_file_name(self) -> str: # noqa D102 + return self.filename + ".html" + + @property + def html_report_remote_storage_key(self) -> str: # noqa D102 + return f"{self.report_output_prefix}/{self.html_report_file_name}" + + @property + def html_report_url(self) -> str: # noqa D102 + return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}" + + @property + def should_be_commented_on_pr(self) -> bool: # noqa D102 + return ( + self.pipeline_context.should_save_report + and self.pipeline_context.is_ci + and self.pipeline_context.pull_request + and self.pipeline_context.PRODUCTION + ) + + def to_json(self) -> str: + """Create a JSON representation of the connector test report. + + Returns: + str: The JSON representation of the report. + """ + return json.dumps( + { + "connector_technical_name": self.pipeline_context.connector.technical_name, + "connector_version": self.pipeline_context.connector.version, + "run_timestamp": self.created_at.isoformat(), + "run_duration": self.run_duration.total_seconds(), + "success": self.success, + "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], + "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], + "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], + "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, + "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, + "pipeline_end_timestamp": round(self.created_at.timestamp()), + "pipeline_duration": round(self.created_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, + "git_branch": self.pipeline_context.git_branch, + "git_revision": self.pipeline_context.git_revision, + "ci_context": self.pipeline_context.ci_context, + "cdk_version": self.pipeline_context.cdk_version, + "html_report_url": self.html_report_url, + "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, + } + ) + + def post_comment_on_pr(self) -> None: + icon_url = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" + global_status_emoji = "✅" if self.success else "❌" + commit_url = f"{self.pipeline_context.pull_request.html_url}/commits/{self.pipeline_context.git_revision}" + markdown_comment = f'## {self.pipeline_context.connector.technical_name} test report (commit [`{self.pipeline_context.git_revision[:10]}`]({commit_url})) - {global_status_emoji}\n\n' + markdown_comment += f"⏲️ Total pipeline duration: {format_duration(self.run_duration)} \n\n" + report_data = [ + [step_result.step.title, step_result.status.get_emoji()] + for step_result in self.steps_results + if step_result.status is not StepStatus.SKIPPED + ] + markdown_comment += tabulate(report_data, headers=["Step", "Result"], tablefmt="pipe") + "\n\n" + markdown_comment += f"🔗 [View the logs here]({self.html_report_url})\n\n" + + if self.pipeline_context.dagger_cloud_url: + markdown_comment += f"☁️ [View runs for commit in Dagger Cloud]({self.pipeline_context.dagger_cloud_url})\n\n" + + markdown_comment += "*Please note that tests are only run on PR ready for review. Please set your PR to draft mode to not flood the CI engine and upstream service on following commits.*\n" + markdown_comment += "**You can run the same pipeline locally on this branch with the [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool with the following command**\n" + markdown_comment += f"```bash\nairbyte-ci connectors --name={self.pipeline_context.connector.technical_name} test\n```\n\n" + self.pipeline_context.pull_request.create_issue_comment(markdown_comment) + + async def to_html(self) -> str: + env = Environment(loader=PackageLoader("pipelines.tests"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) + template = env.get_template("test_report.html.j2") + template.globals["StepStatus"] = StepStatus + template.globals["format_duration"] = format_duration + local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve() + template_context = { + "connector_name": self.pipeline_context.connector.technical_name, + "step_results": self.steps_results, + "run_duration": self.run_duration, + "created_at": self.created_at.isoformat(), + "connector_version": self.pipeline_context.connector.version, + "gha_workflow_run_url": None, + "dagger_logs_url": None, + "git_branch": self.pipeline_context.git_branch, + "git_revision": self.pipeline_context.git_revision, + "commit_url": None, + "icon_url": local_icon_path.as_uri(), + } + + if self.pipeline_context.is_ci: + template_context["commit_url"] = f"https://github.com/airbytehq/airbyte/commit/{self.pipeline_context.git_revision}" + template_context["gha_workflow_run_url"] = self.pipeline_context.gha_workflow_run_url + template_context["dagger_logs_url"] = self.pipeline_context.dagger_logs_url + template_context["dagger_cloud_url"] = self.pipeline_context.dagger_cloud_url + template_context[ + "icon_url" + ] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" + return template.render(template_context) + + async def save(self) -> None: + local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) + absolute_path = await local_html_path.resolve() + if self.pipeline_context.open_report_in_browser: + self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") + if self.pipeline_context.open_report_in_browser: + self.pipeline_context.logger.info("Opening HTML report in browser.") + webbrowser.open(absolute_path.as_uri()) + if self.remote_storage_enabled: + await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html") + self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}") + await super().save() + + def print(self): + """Print the test report to the console in a nice way.""" + connector_name = self.pipeline_context.connector.technical_name + main_panel_title = Text(f"{connector_name.upper()} - {self.name}") + main_panel_title.stylize(Style(color="blue", bold=True)) + duration_subtitle = Text(f"⏲️ Total pipeline duration for {connector_name}: {format_duration(self.run_duration)}") + step_results_table = Table(title="Steps results") + step_results_table.add_column("Step") + step_results_table.add_column("Result") + step_results_table.add_column("Duration") + + for step_result in self.steps_results: + step = Text(step_result.step.title) + step.stylize(step_result.status.get_rich_style()) + result = Text(step_result.status.value) + result.stylize(step_result.status.get_rich_style()) + step_results_table.add_row(step, result, format_duration(step_result.step.run_duration)) + + details_instructions = Text("ℹ️ You can find more details with step executions logs in the saved HTML report.") + to_render = [step_results_table, details_instructions] + + if self.pipeline_context.dagger_cloud_url: + self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") + + main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) + console.print(main_panel) diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py index d08d7b84978ae..b33b46fb973f9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py @@ -17,7 +17,7 @@ from github import PullRequest from pipelines import hacks from pipelines.helpers.gcs import sanitize_gcs_credentials -from pipelines.models.bases import CIContext, Report +from pipelines.models.reports import Report from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import AIRBYTE_REPO_URL @@ -33,6 +33,18 @@ class ContextState(Enum): FAILURE = {"github_state": "failure", "description": "Pipeline failed."} +class CIContext(str, Enum): + """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" + + MANUAL = "manual" + PULL_REQUEST = "pull_request" + NIGHTLY_BUILDS = "nightly_builds" + MASTER = "master" + + def __str__(self) -> str: + return self.value + + class PipelineContext: """The pipeline context is used to store configuration for a specific pipeline run.""" diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py index 4c28527fc9929..80fcf4386af41 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py @@ -7,6 +7,7 @@ from __future__ import annotations import anyio +import typing import json from dataclasses import dataclass, field @@ -18,7 +19,6 @@ from anyio import Path from connector_ops.utils import console from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT -from pipelines.models.contexts import PipelineContext from pipelines.dagger.actions import remote_storage from pipelines.models.steps import StepResult, StepStatus from pipelines.helpers.utils import format_duration @@ -28,11 +28,14 @@ from rich.table import Table from rich.text import Text +if typing.TYPE_CHECKING: + from pipelines.models.steps import PipelineContext + @dataclass(frozen=True) class Report: """A dataclass to build reports to share pipelines executions results with the user.""" - pipeline_context: PipelineContext + pipeline_context: "PipelineContext" steps_results: List[StepResult] created_at: datetime = field(default_factory=datetime.utcnow) name: str = "REPORT" diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py index ed7294d2aef4d..1acfb0d86875c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py @@ -6,8 +6,9 @@ from abc import abstractmethod from dataclasses import dataclass from datetime import timedelta +from enum import Enum from pathlib import Path -from typing import List, Optional, Any, Enum +from typing import List, Optional, Any import anyio import asyncer From 7a9e590b2585558ed12c1e6702b1d934d0931ca2 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 16:12:14 -0500 Subject: [PATCH 04/38] fix some more imports --- airbyte-ci/connectors/pipelines/.gitignore | 3 ++- .../pipelines/dagger/actions/python/common.py | 15 ++++++++++++++- .../pipelines/dagger/actions/python/poetry.py | 14 -------------- .../pipelines/pipeline/connectors/context.py | 2 +- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/.gitignore b/airbyte-ci/connectors/pipelines/.gitignore index a93f5bbc51aca..6878ec3ac2919 100644 --- a/airbyte-ci/connectors/pipelines/.gitignore +++ b/airbyte-ci/connectors/pipelines/.gitignore @@ -1 +1,2 @@ -pipeline_reports \ No newline at end of file +pipeline_reports +.venv \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index bf870f4d3275e..6e1f605ad30ab 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -3,7 +3,6 @@ # from pathlib import Path import re -from pipelines.dagger.actions.python.poetry import _install_python_dependencies_from_poetry from pipelines.dagger.containers.python import with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext @@ -157,6 +156,20 @@ def _install_python_dependencies_from_requirements_txt(container: Container) -> return container.with_exec(install_requirements_cmd) +def _install_python_dependencies_from_poetry( + container: Container, + additional_dependency_groups: Optional[List] = None, +) -> Container: + pip_install_poetry_cmd = ["pip", "install", "poetry"] + poetry_disable_virtual_env_cmd = ["poetry", "config", "virtualenvs.create", "false"] + poetry_install_no_venv_cmd = ["poetry", "install"] + if additional_dependency_groups: + for group in additional_dependency_groups: + poetry_install_no_venv_cmd += ["--with", group] + + return container.with_exec(pip_install_poetry_cmd).with_exec(poetry_disable_virtual_env_cmd).with_exec(poetry_install_no_venv_cmd) + + async def with_installed_python_package( context: PipelineContext, python_environment: Container, diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py index 35838b1937d71..c5ad862a824a4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -53,20 +53,6 @@ async def find_local_dependencies_in_pyproject_toml( return local_dependency_paths -def _install_python_dependencies_from_poetry( - container: Container, - additional_dependency_groups: Optional[List] = None, -) -> Container: - pip_install_poetry_cmd = ["pip", "install", "poetry"] - poetry_disable_virtual_env_cmd = ["poetry", "config", "virtualenvs.create", "false"] - poetry_install_no_venv_cmd = ["poetry", "install"] - if additional_dependency_groups: - for group in additional_dependency_groups: - poetry_install_no_venv_cmd += ["--with", group] - - return container.with_exec(pip_install_poetry_cmd).with_exec(poetry_disable_virtual_env_cmd).with_exec(poetry_install_no_venv_cmd) - - def with_poetry(context: PipelineContext) -> Container: """Install poetry in a python environment. diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py index c01465a319ef4..d357aa307783c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -15,7 +15,7 @@ from github import PullRequest from pipelines.dagger.actions import secrets from pipelines.models.bases import ConnectorReport, ConnectorWithModifiedFiles -from pipelines.models.context import PipelineContext +from pipelines.models.contexts import PipelineContext from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME From f9cbac0505a55b7221db4bd328b7a806b8b7d65e Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 16:29:55 -0500 Subject: [PATCH 05/38] move internal tools containers to containers. fix some more --- .../pipelines/pipelines/dagger/actions/secrets.py | 4 ++-- .../dagger/{actions => containers}/internal_tools.py | 0 airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py | 6 +----- 3 files changed, 3 insertions(+), 7 deletions(-) rename airbyte-ci/connectors/pipelines/pipelines/dagger/{actions => containers}/internal_tools.py (100%) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index a97ad7bf4c637..2b0f77a39b0f4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Callable from dagger import Container, Secret -from pipelines.dagger.actions import internal_tools +from pipelines.dagger.containers.internal_tools import with_ci_credentials from pipelines.helpers.utils import get_file_contents, get_secret_host_variable from pipelines.pipeline.connectors.context import PipelineContext @@ -45,7 +45,7 @@ async def download(context: ConnectorContext, gcp_gsm_env_variable_name: str = " """ gsm_secret = get_secret_host_variable(context.dagger_client, gcp_gsm_env_variable_name) secrets_path = f"/{context.connector.code_directory}/secrets" - ci_credentials = await internal_tools.with_ci_credentials(context, gsm_secret) + ci_credentials = await with_ci_credentials(context, gsm_secret) with_downloaded_secrets = ( ci_credentials.with_exec(["mkdir", "-p", secrets_path]) .with_env_variable( diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/dagger/actions/internal_tools.py rename to airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py index 9c10f7689e8c5..cda317fda8a0b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py @@ -18,12 +18,8 @@ import anyio import asyncer -import click -from connector_ops.utils import get_changed_connectors -from dagger import Client, Config, Container, DaggerError, ExecError, File, ImageLayerCompression, QueryError, Secret +from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, QueryError, Secret from more_itertools import chunked -from pipelines import consts -from pipelines.helpers import sentry_utils if TYPE_CHECKING: from pipelines.pipeline.connectors.context import ConnectorContext From d9c3c73af950ef70fb7929b757bba80c84902246 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 16:54:13 -0500 Subject: [PATCH 06/38] bit of a hacky fix for circular imports on this one - come back to this --- .../pipelines/pipelines/dagger/actions/secrets.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index 2b0f77a39b0f4..7f52ded0c69e2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Callable from dagger import Container, Secret -from pipelines.dagger.containers.internal_tools import with_ci_credentials from pipelines.helpers.utils import get_file_contents, get_secret_host_variable from pipelines.pipeline.connectors.context import PipelineContext @@ -43,6 +42,9 @@ async def download(context: ConnectorContext, gcp_gsm_env_variable_name: str = " Returns: Directory: A directory with the downloaded secrets. """ + # temp - fix circular import + from pipelines.dagger.containers.internal_tools import with_ci_credentials + gsm_secret = get_secret_host_variable(context.dagger_client, gcp_gsm_env_variable_name) secrets_path = f"/{context.connector.code_directory}/secrets" ci_credentials = await with_ci_credentials(context, gsm_secret) @@ -79,10 +81,13 @@ async def upload(context: ConnectorContext, gcp_gsm_env_variable_name: str = "GC Raises: ExecError: If the command returns a non-zero exit code. """ + # temp - fix circular import + from pipelines.dagger.containers.internal_tools import with_ci_credentials + gsm_secret = get_secret_host_variable(context.dagger_client, gcp_gsm_env_variable_name) secrets_path = f"/{context.connector.code_directory}/secrets" - ci_credentials = await internal_tools.with_ci_credentials(context, gsm_secret) + ci_credentials = await with_ci_credentials(context, gsm_secret) return await ci_credentials.with_directory(secrets_path, context.updated_secrets_dir).with_exec( ["ci_credentials", context.connector.technical_name, "update-secrets"] From a54dc060043a8c2141c7a0843703fab409d69ee2 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 16:58:06 -0500 Subject: [PATCH 07/38] move lots of stuff - some steps to pipelines/steps, internal tool containers to internal tools, reports to own module and connector report to pipelines/connectors --- .../pipelines/dagger/actions/secrets.py | 2 +- .../pipelines/pipelines/helpers/github.py | 2 +- .../pipelines/pipelines/models/bases.py | 173 +---------- .../pipelines/pipelines/models/steps.py | 294 +----------------- .../pipelines/pipeline/connectors/context.py | 3 +- .../migrate_to_base_image/pipeline.py | 5 +- .../pipelines/pipeline/connectors/reports.py | 0 .../pipelines/pipeline/metadata/pipeline.py | 6 +- .../pipelines/pipeline/steps/docker.py | 97 ++++++ .../pipelines/pipeline/steps/gradle.py | 161 ++++++++++ .../pipelines/pipeline/steps/no_op.py | 0 .../pipelines/pipeline/steps/poetry.py | 25 ++ .../test_steps/test_simple_docker_step.py | 3 +- 13 files changed, 304 insertions(+), 467 deletions(-) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index 7f52ded0c69e2..049db7c56a98c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -10,11 +10,11 @@ from dagger import Container, Secret from pipelines.helpers.utils import get_file_contents, get_secret_host_variable -from pipelines.pipeline.connectors.context import PipelineContext if TYPE_CHECKING: from dagger import Container from pipelines.pipeline.connectors.context import ConnectorContext + from pipelines.pipeline.connectors.context import PipelineContext async def get_secrets_to_mask(ci_credentials_with_downloaded_secrets: Container) -> list[str]: diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py index d04996da8d2bd..34a7932987e93 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py @@ -11,7 +11,7 @@ from connector_ops.utils import console from pipelines import main_logger -from pipelines.models.bases import CIContext +from pipelines.models.contexts import CIContext if TYPE_CHECKING: from logging import Logger diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py index 72d311cafc483..b3730ea4bebd2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py @@ -6,9 +6,7 @@ from __future__ import annotations -import json import logging -import webbrowser from abc import ABC, abstractmethod from dataclasses import dataclass, field from datetime import datetime, timedelta @@ -18,20 +16,12 @@ import anyio import asyncer from anyio import Path -from connector_ops.utils import Connector, console +from connector_ops.utils import Connector from dagger import Container, DaggerError -from jinja2 import Environment, PackageLoader, select_autoescape from pipelines import sentry_utils -from pipelines.consts import GCS_PUBLIC_DOMAIN from pipelines.helpers.utils import METADATA_FILE_NAME, format_duration, get_exec_result -from rich.console import Group -from rich.panel import Panel from rich.style import Style -from rich.table import Table -from rich.text import Text -from tabulate import tabulate -from pipelines.models.reports import Report if TYPE_CHECKING: from pipelines.models.contexts import PipelineContext @@ -46,18 +36,6 @@ def has_metadata_change(self) -> bool: return any(path.name == METADATA_FILE_NAME for path in self.modified_files) -class CIContext(str, Enum): - """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" - - MANUAL = "manual" - PULL_REQUEST = "pull_request" - NIGHTLY_BUILDS = "nightly_builds" - MASTER = "master" - - def __str__(self) -> str: - return self.value - - class StepStatus(Enum): """An Enum to characterize the success, failure or skipping of a Step.""" @@ -321,152 +299,3 @@ def redact_secrets_from_string(self, value: str) -> str: return value -@dataclass(frozen=True) -class ConnectorReport(Report): - """A dataclass to build connector test reports to share pipelines executions results with the user.""" - - @property - def report_output_prefix(self) -> str: # noqa D102 - return f"{self.pipeline_context.report_output_prefix}/{self.pipeline_context.connector.technical_name}/{self.pipeline_context.connector.version}" - - @property - def html_report_file_name(self) -> str: # noqa D102 - return self.filename + ".html" - - @property - def html_report_remote_storage_key(self) -> str: # noqa D102 - return f"{self.report_output_prefix}/{self.html_report_file_name}" - - @property - def html_report_url(self) -> str: # noqa D102 - return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}" - - @property - def should_be_commented_on_pr(self) -> bool: # noqa D102 - return ( - self.pipeline_context.should_save_report - and self.pipeline_context.is_ci - and self.pipeline_context.pull_request - and self.pipeline_context.PRODUCTION - ) - - def to_json(self) -> str: - """Create a JSON representation of the connector test report. - - Returns: - str: The JSON representation of the report. - """ - return json.dumps( - { - "connector_technical_name": self.pipeline_context.connector.technical_name, - "connector_version": self.pipeline_context.connector.version, - "run_timestamp": self.created_at.isoformat(), - "run_duration": self.run_duration.total_seconds(), - "success": self.success, - "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], - "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], - "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], - "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, - "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, - "pipeline_end_timestamp": round(self.created_at.timestamp()), - "pipeline_duration": round(self.created_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, - "git_branch": self.pipeline_context.git_branch, - "git_revision": self.pipeline_context.git_revision, - "ci_context": self.pipeline_context.ci_context, - "cdk_version": self.pipeline_context.cdk_version, - "html_report_url": self.html_report_url, - "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, - } - ) - - def post_comment_on_pr(self) -> None: - icon_url = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" - global_status_emoji = "✅" if self.success else "❌" - commit_url = f"{self.pipeline_context.pull_request.html_url}/commits/{self.pipeline_context.git_revision}" - markdown_comment = f'## {self.pipeline_context.connector.technical_name} test report (commit [`{self.pipeline_context.git_revision[:10]}`]({commit_url})) - {global_status_emoji}\n\n' - markdown_comment += f"⏲️ Total pipeline duration: {format_duration(self.run_duration)} \n\n" - report_data = [ - [step_result.step.title, step_result.status.get_emoji()] - for step_result in self.steps_results - if step_result.status is not StepStatus.SKIPPED - ] - markdown_comment += tabulate(report_data, headers=["Step", "Result"], tablefmt="pipe") + "\n\n" - markdown_comment += f"🔗 [View the logs here]({self.html_report_url})\n\n" - - if self.pipeline_context.dagger_cloud_url: - markdown_comment += f"☁️ [View runs for commit in Dagger Cloud]({self.pipeline_context.dagger_cloud_url})\n\n" - - markdown_comment += "*Please note that tests are only run on PR ready for review. Please set your PR to draft mode to not flood the CI engine and upstream service on following commits.*\n" - markdown_comment += "**You can run the same pipeline locally on this branch with the [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool with the following command**\n" - markdown_comment += f"```bash\nairbyte-ci connectors --name={self.pipeline_context.connector.technical_name} test\n```\n\n" - self.pipeline_context.pull_request.create_issue_comment(markdown_comment) - - async def to_html(self) -> str: - env = Environment(loader=PackageLoader("pipelines.tests"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) - template = env.get_template("test_report.html.j2") - template.globals["StepStatus"] = StepStatus - template.globals["format_duration"] = format_duration - local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve() - template_context = { - "connector_name": self.pipeline_context.connector.technical_name, - "step_results": self.steps_results, - "run_duration": self.run_duration, - "created_at": self.created_at.isoformat(), - "connector_version": self.pipeline_context.connector.version, - "gha_workflow_run_url": None, - "dagger_logs_url": None, - "git_branch": self.pipeline_context.git_branch, - "git_revision": self.pipeline_context.git_revision, - "commit_url": None, - "icon_url": local_icon_path.as_uri(), - } - - if self.pipeline_context.is_ci: - template_context["commit_url"] = f"https://github.com/airbytehq/airbyte/commit/{self.pipeline_context.git_revision}" - template_context["gha_workflow_run_url"] = self.pipeline_context.gha_workflow_run_url - template_context["dagger_logs_url"] = self.pipeline_context.dagger_logs_url - template_context["dagger_cloud_url"] = self.pipeline_context.dagger_cloud_url - template_context[ - "icon_url" - ] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" - return template.render(template_context) - - async def save(self) -> None: - local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) - absolute_path = await local_html_path.resolve() - if self.pipeline_context.open_report_in_browser: - self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") - if self.pipeline_context.open_report_in_browser: - self.pipeline_context.logger.info("Opening HTML report in browser.") - webbrowser.open(absolute_path.as_uri()) - if self.remote_storage_enabled: - await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html") - self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}") - await super().save() - - def print(self): - """Print the test report to the console in a nice way.""" - connector_name = self.pipeline_context.connector.technical_name - main_panel_title = Text(f"{connector_name.upper()} - {self.name}") - main_panel_title.stylize(Style(color="blue", bold=True)) - duration_subtitle = Text(f"⏲️ Total pipeline duration for {connector_name}: {format_duration(self.run_duration)}") - step_results_table = Table(title="Steps results") - step_results_table.add_column("Step") - step_results_table.add_column("Result") - step_results_table.add_column("Duration") - - for step_result in self.steps_results: - step = Text(step_result.step.title) - step.stylize(step_result.status.get_rich_style()) - result = Text(step_result.status.value) - result.stylize(step_result.status.get_rich_style()) - step_results_table.add_row(step, result, format_duration(step_result.step.run_duration)) - - details_instructions = Text("ℹ️ You can find more details with step executions logs in the saved HTML report.") - to_render = [step_results_table, details_instructions] - - if self.pipeline_context.dagger_cloud_url: - self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") - - main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) - console.print(main_panel) diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py index 1acfb0d86875c..e1fdc1fb73dfd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py @@ -4,39 +4,32 @@ from __future__ import annotations from abc import abstractmethod -from dataclasses import dataclass +from dataclasses import dataclass, field from datetime import timedelta from enum import Enum from pathlib import Path -from typing import List, Optional, Any +import typing +from typing import Optional, Any import anyio import asyncer -import dagger -import datetime +from datetime import datetime import logging from dagger import DaggerError, Container -from pipelines.dagger.actions.python.poetry import with_poetry_module -import pipelines.dagger.actions.system.docker -from pipelines.dagger.containers.python import with_python_base -from pipelines.models.steps import Step, StepResult +from pipelines.helpers import sentry_utils from pipelines import main_logger -from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.helpers.utils import format_duration, get_exec_result -from pipelines.models.contexts import PipelineContext + +if typing.TYPE_CHECKING: + from pipelines.models.contexts import PipelineContext from abc import ABC -from typing import ClassVar, List +from typing import ClassVar from rich.style import Style -from dagger import CacheSharingMode, CacheVolume -from pipelines import hacks -from pipelines.dagger.actions import secrets -from pipelines.consts import AMAZONCORRETTO_IMAGE -from pipelines.helpers.utils import sh_dash_c @dataclass @@ -315,272 +308,3 @@ def _get_timed_out_step_result(self) -> StepResult: StepStatus.FAILURE, stdout=f"Timed out after the max duration of {format_duration(self.max_duration)}. Please checkout the Dagger logs to see what happened.", ) - - -class NoOpStep(Step): - """A step that does nothing.""" - - title = "No Op" - should_log = False - - def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: - super().__init__(context) - self.step_status = step_status - - async def _run(self, *args, **kwargs) -> StepResult: - return StepResult(self, self.step_status) - - -class SimpleDockerStep(Step): - def __init__( - self, - title: str, - context: PipelineContext, - paths_to_mount: List[MountPath] = [], - internal_tools: List[MountPath] = [], - secrets: dict[str, dagger.Secret] = {}, - env_variables: dict[str, str] = {}, - working_directory: str = "/", - command: Optional[List[str]] = None, - ): - """A simple step that runs a given command in a container. - - Args: - title (str): name of the step - context (PipelineContext): context of the step - paths_to_mount (List[MountPath], optional): directory paths to mount. Defaults to []. - internal_tools (List[MountPath], optional): internal tools to install. Defaults to []. - secrets (dict[str, dagger.Secret], optional): secrets to add to container. Defaults to {}. - env_variables (dict[str, str], optional): env variables to set in container. Defaults to {}. - working_directory (str, optional): working directory to run the command in. Defaults to "/". - command (Optional[List[str]], optional): The default command to run. Defaults to None. - """ - self.title = title - super().__init__(context) - - self.paths_to_mount = paths_to_mount - self.working_directory = working_directory - self.internal_tools = internal_tools - self.secrets = secrets - self.env_variables = env_variables - self.command = command - - def _mount_paths(self, container: dagger.Container) -> dagger.Container: - for path_to_mount in self.paths_to_mount: - if path_to_mount.optional and not path_to_mount.path.exists(): - continue - - path_string = str(path_to_mount) - destination_path = f"/{path_string}" - if path_to_mount.is_file: - file_to_load = self.context.get_repo_file(path_string) - container = container.with_mounted_file(destination_path, file_to_load) - else: - container = container.with_mounted_directory(destination_path, self.context.get_repo_dir(path_string)) - return container - - async def _install_internal_tools(self, container: dagger.Container) -> dagger.Container: - for internal_tool in self.internal_tools: - container = await with_installed_pipx_package(self.context, container, str(internal_tool)) - return container - - def _set_workdir(self, container: dagger.Container) -> dagger.Container: - return container.with_workdir(self.working_directory) - - def _set_env_variables(self, container: dagger.Container) -> dagger.Container: - for key, value in self.env_variables.items(): - container = container.with_env_variable(key, value) - return container - - def _set_secrets(self, container: dagger.Container) -> dagger.Container: - for key, value in self.secrets.items(): - container = container.with_secret_variable(key, value) - return container - - async def init_container(self) -> dagger.Container: - # TODO (ben): Replace with python base container when available - container = with_python_base(self.context) - - container = self._mount_paths(container) - container = self._set_env_variables(container) - container = self._set_secrets(container) - container = await self._install_internal_tools(container) - container = self._set_workdir(container) - - return container - - async def _run(self, command=None) -> StepResult: - command_to_run = command or self.command - if not command_to_run: - raise ValueError(f"No command given to the {self.title} step") - - container_to_run = await self.init_container() - return await self.get_step_result(container_to_run.with_exec(command_to_run)) - -class PoetryRunStep(Step): - def __init__(self, context: PipelineContext, title: str, parent_dir_path: str, module_path: str): - """A simple step that runs a given command inside a poetry project. - - Args: - context (PipelineContext): context of the step - title (str): name of the step - parent_dir_path (str): The path to the parent directory of the poetry project - module_path (str): The path to the poetry project - """ - self.title = title - super().__init__(context) - - parent_dir = self.context.get_repo_dir(parent_dir_path) - module_path = module_path - self.poetry_run_container = with_poetry_module(self.context, parent_dir, module_path).with_entrypoint(["poetry", "run"]) - - async def _run(self, poetry_run_args: list) -> StepResult: - poetry_run_exec = self.poetry_run_container.with_exec(poetry_run_args) - return await self.get_step_result(poetry_run_exec) - - -class GradleTask(Step, ABC): - """ - A step to run a Gradle task. - - Attributes: - title (str): The step title. - gradle_task_name (str): The Gradle task name to run. - bind_to_docker_host (bool): Whether to install the docker client and bind it to the host. - mount_connector_secrets (bool): Whether to mount connector secrets. - """ - - DEFAULT_GRADLE_TASK_OPTIONS = ("--no-daemon", "--scan", "--build-cache", "--console=plain") - - gradle_task_name: ClassVar[str] - bind_to_docker_host: ClassVar[bool] = False - mount_connector_secrets: ClassVar[bool] = False - - def __init__(self, context: PipelineContext) -> None: - super().__init__(context) - - @property - def connector_java_build_cache(self) -> CacheVolume: - # TODO: remove this once we finish the project to boost source-postgres CI performance. - # We should use a static gradle-cache volume name. - cache_volume_name = hacks.get_gradle_cache_volume_name(self.context, self.logger) - return self.context.dagger_client.cache_volume(cache_volume_name) - - @property - def build_include(self) -> List[str]: - """Retrieve the list of source code directory required to run a Java connector Gradle task. - - The list is different according to the connector type. - - Returns: - List[str]: List of directories or files to be mounted to the container to run a Java connector Gradle task. - """ - return [ - str(dependency_directory) - for dependency_directory in self.context.connector.get_local_dependency_paths(with_test_dependencies=True) - ] - - def _get_gradle_command(self, task: str) -> List[str]: - return sh_dash_c( - [ - # The gradle command is chained in between a couple of rsyncs which load from- and store to the cache volume. - "(rsync -a --stats /root/gradle-cache/ /root/.gradle || true)", - f"./gradlew {' '.join(self.DEFAULT_GRADLE_TASK_OPTIONS)} {task}", - "(rsync -a --stats /root/.gradle/ /root/gradle-cache || true)", - ] - ) - - async def _run(self) -> StepResult: - include = [ - ".root", - ".env", - "build.gradle", - "deps.toml", - "gradle.properties", - "gradle", - "gradlew", - "LICENSE_SHORT", - "settings.gradle", - "build.gradle", - "tools/gradle", - "spotbugs-exclude-filter-file.xml", - "buildSrc", - "tools/bin/build_image.sh", - "tools/lib/lib.sh", - "tools/gradle/codestyle", - "pyproject.toml", - "airbyte-cdk/java/airbyte-cdk/**", - ] + self.build_include - - yum_packages_to_install = [ - "docker", # required by :integrationTestJava. - "findutils", # gradle requires xargs, which is shipped in findutils. - "jq", # required by :airbyte-connector-test-harnesses:acceptance-test-harness to inspect docker images. - "npm", # required by :format. - "python3.11-pip", # required by :format. - "rsync", # required for gradle cache synchronization. - ] - - # Define a gradle container which will be cached and re-used for all tasks. - # We should do our best to cram any generic & expensive layers in here. - gradle_container = ( - self.dagger_client.container() - # Use a linux+jdk base image with long-term support, such as amazoncorretto. - .from_(AMAZONCORRETTO_IMAGE) - # Install a bunch of packages as early as possible. - .with_exec( - sh_dash_c( - [ - # Update first, but in the same .with_exec step as the package installation. - # Otherwise, we risk caching stale package URLs. - "yum update -y", - f"yum install -y {' '.join(yum_packages_to_install)}", - # Remove any dangly bits. - "yum clean all", - # Deliberately soft-remove docker, so that the `docker` CLI is unavailable by default. - # This is a defensive choice to enforce the expectation that, as a general rule, gradle tasks do not rely on docker. - "yum remove -y --noautoremove docker", # remove docker package but not its dependencies - "yum install -y --downloadonly docker", # have docker package in place for quick install - ] - ) - ) - # Set GRADLE_HOME and GRADLE_USER_HOME to the directory which will be rsync-ed with the gradle cache volume. - .with_env_variable("GRADLE_HOME", "/root/.gradle") - .with_env_variable("GRADLE_USER_HOME", "/root/.gradle") - # Set RUN_IN_AIRBYTE_CI to tell gradle how to configure its build cache. - # This is consumed by settings.gradle in the repo root. - .with_env_variable("RUN_IN_AIRBYTE_CI", "1") - # TODO: remove this once we finish the project to boost source-postgres CI performance. - .with_env_variable("CACHEBUSTER", hacks.get_cachebuster(self.context, self.logger)) - # Mount the gradle cache volume. - # We deliberately don't mount it at $GRADLE_HOME, instead we load it there and store it from there using rsync. - # This is because the volume is accessed concurrently by all GradleTask instances. - # Hence, why we synchronize the writes by setting the `sharing` parameter to LOCKED. - .with_mounted_cache("/root/gradle-cache", self.connector_java_build_cache, sharing=CacheSharingMode.LOCKED) - # Mount the parts of the repo which interest us in /airbyte. - .with_workdir("/airbyte") - .with_mounted_directory("/airbyte", self.context.get_repo_dir(".", include=include)) - .with_mounted_directory(str(self.context.connector.code_directory), await self.context.get_connector_dir()) - # Disable the Ryuk container because it needs privileged docker access that does not work: - .with_env_variable("TESTCONTAINERS_RYUK_DISABLED", "true") - # Run gradle once to populate the container's local maven repository. - # This step is useful also to serve as a basic sanity check and to warm the gradle cache. - # This will download gradle itself, a bunch of poms and jars, compile the gradle plugins, configure tasks, etc. - .with_exec(self._get_gradle_command(":airbyte-cdk:java:airbyte-cdk:publishSnapshotIfNeeded")) - ) - - # From this point on, we add layers which are task-dependent. - if self.mount_connector_secrets: - gradle_container = gradle_container.with_( - await secrets.mounted_connector_secrets(self.context, f"{self.context.connector.code_directory}/secrets") - ) - if self.bind_to_docker_host: - # If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container. - gradle_container = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container) - # This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed. - gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"]) - - # Run the gradle task that we actually care about. - connector_task = f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}" - gradle_container = gradle_container.with_exec(self._get_gradle_command(connector_task)) - return await self.get_step_result(gradle_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py index d357aa307783c..f35c6c01e9b5d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -14,11 +14,12 @@ from dagger import Directory from github import PullRequest from pipelines.dagger.actions import secrets -from pipelines.models.bases import ConnectorReport, ConnectorWithModifiedFiles +from pipelines.models.bases import ConnectorWithModifiedFiles from pipelines.models.contexts import PipelineContext from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME +from pipelines.pipeline.connectors.reports import ConnectorReport class ConnectorContext(PipelineContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index 258a937867262..b0451afa49bd8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -10,12 +10,13 @@ from dagger import Directory from jinja2 import Template from pipelines import consts -from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.models.bases import Step, StepResult, StepStatus from pipelines.pipeline.connectors.bump_version.pipeline import BumpDockerImageTagInMetadata from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, get_bumped_version from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext +from pipelines.pipeline.connectors.reports import ConnectorReport -from ....helpers.connectors import metadata_change_helpers +from pipelines.helpers import metadata_change_helpers class UpgradeBaseImageMetadata(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index 367afab001f90..8c2d1d58edd4c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -3,19 +3,17 @@ from typing import Optional import dagger -from pipelines.dagger.actions.python.common import with_pip_packages -from pipelines.dagger.containers.python import with_python_base +from pipelines.dagger.containers.python import with_python_base, with_pip_packages from pipelines.models.bases import Report, Step, StepResult from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.helpers.steps import run_steps from pipelines.models.steps import PoetryRunStep, MountPath, SimpleDockerStep -from pipelines.tools.internal import INTERNAL_TOOL_PATHS from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.models.steps import MountPath, SimpleDockerStep -from pipelines.tools.internal import INTERNAL_TOOL_PATHS +from pipelines.internal_tools import INTERNAL_TOOL_PATHS # STEPS diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py new file mode 100644 index 0000000000000..0d5251fd545fd --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py @@ -0,0 +1,97 @@ +from pipelines.dagger.actions.python.pipx import with_installed_pipx_package +from pipelines.dagger.containers.python import with_python_base +from pipelines.models.contexts import PipelineContext +from pipelines.models.steps import MountPath, Step, StepResult + + +import dagger + + +from typing import List, Optional + + +class SimpleDockerStep(Step): + def __init__( + self, + title: str, + context: PipelineContext, + paths_to_mount: List[MountPath] = [], + internal_tools: List[MountPath] = [], + secrets: dict[str, dagger.Secret] = {}, + env_variables: dict[str, str] = {}, + working_directory: str = "/", + command: Optional[List[str]] = None, + ): + """A simple step that runs a given command in a container. + + Args: + title (str): name of the step + context (PipelineContext): context of the step + paths_to_mount (List[MountPath], optional): directory paths to mount. Defaults to []. + internal_tools (List[MountPath], optional): internal tools to install. Defaults to []. + secrets (dict[str, dagger.Secret], optional): secrets to add to container. Defaults to {}. + env_variables (dict[str, str], optional): env variables to set in container. Defaults to {}. + working_directory (str, optional): working directory to run the command in. Defaults to "/". + command (Optional[List[str]], optional): The default command to run. Defaults to None. + """ + self.title = title + super().__init__(context) + + self.paths_to_mount = paths_to_mount + self.working_directory = working_directory + self.internal_tools = internal_tools + self.secrets = secrets + self.env_variables = env_variables + self.command = command + + def _mount_paths(self, container: dagger.Container) -> dagger.Container: + for path_to_mount in self.paths_to_mount: + if path_to_mount.optional and not path_to_mount.path.exists(): + continue + + path_string = str(path_to_mount) + destination_path = f"/{path_string}" + if path_to_mount.is_file: + file_to_load = self.context.get_repo_file(path_string) + container = container.with_mounted_file(destination_path, file_to_load) + else: + container = container.with_mounted_directory(destination_path, self.context.get_repo_dir(path_string)) + return container + + async def _install_internal_tools(self, container: dagger.Container) -> dagger.Container: + for internal_tool in self.internal_tools: + container = await with_installed_pipx_package(self.context, container, str(internal_tool)) + return container + + def _set_workdir(self, container: dagger.Container) -> dagger.Container: + return container.with_workdir(self.working_directory) + + def _set_env_variables(self, container: dagger.Container) -> dagger.Container: + for key, value in self.env_variables.items(): + container = container.with_env_variable(key, value) + return container + + def _set_secrets(self, container: dagger.Container) -> dagger.Container: + for key, value in self.secrets.items(): + container = container.with_secret_variable(key, value) + return container + + async def init_container(self) -> dagger.Container: + # TODO (ben): Replace with python base container when available + container = with_python_base(self.context) + + container = self._mount_paths(container) + container = self._set_env_variables(container) + container = self._set_secrets(container) + container = await self._install_internal_tools(container) + container = self._set_workdir(container) + + return container + + async def _run(self, command=None) -> StepResult: + command_to_run = command or self.command + if not command_to_run: + raise ValueError(f"No command given to the {self.title} step") + + container_to_run = await self.init_container() + return await self.get_step_result(container_to_run.with_exec(command_to_run)) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py new file mode 100644 index 0000000000000..41b9c5f2272f7 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py @@ -0,0 +1,161 @@ +import pipelines.dagger.actions.system.docker +from pipelines import hacks +from pipelines.consts import AMAZONCORRETTO_IMAGE +from pipelines.dagger.actions import secrets +from pipelines.helpers.utils import sh_dash_c +from pipelines.models.contexts import PipelineContext +from pipelines.models.steps import Step, StepResult + + +from dagger import CacheSharingMode, CacheVolume + + +from abc import ABC +from typing import ClassVar, List + + +class GradleTask(Step, ABC): + """ + A step to run a Gradle task. + + Attributes: + title (str): The step title. + gradle_task_name (str): The Gradle task name to run. + bind_to_docker_host (bool): Whether to install the docker client and bind it to the host. + mount_connector_secrets (bool): Whether to mount connector secrets. + """ + + DEFAULT_GRADLE_TASK_OPTIONS = ("--no-daemon", "--scan", "--build-cache", "--console=plain") + + gradle_task_name: ClassVar[str] + bind_to_docker_host: ClassVar[bool] = False + mount_connector_secrets: ClassVar[bool] = False + + def __init__(self, context: PipelineContext) -> None: + super().__init__(context) + + @property + def connector_java_build_cache(self) -> CacheVolume: + # TODO: remove this once we finish the project to boost source-postgres CI performance. + # We should use a static gradle-cache volume name. + cache_volume_name = hacks.get_gradle_cache_volume_name(self.context, self.logger) + return self.context.dagger_client.cache_volume(cache_volume_name) + + @property + def build_include(self) -> List[str]: + """Retrieve the list of source code directory required to run a Java connector Gradle task. + + The list is different according to the connector type. + + Returns: + List[str]: List of directories or files to be mounted to the container to run a Java connector Gradle task. + """ + return [ + str(dependency_directory) + for dependency_directory in self.context.connector.get_local_dependency_paths(with_test_dependencies=True) + ] + + def _get_gradle_command(self, task: str) -> List[str]: + return sh_dash_c( + [ + # The gradle command is chained in between a couple of rsyncs which load from- and store to the cache volume. + "(rsync -a --stats /root/gradle-cache/ /root/.gradle || true)", + f"./gradlew {' '.join(self.DEFAULT_GRADLE_TASK_OPTIONS)} {task}", + "(rsync -a --stats /root/.gradle/ /root/gradle-cache || true)", + ] + ) + + async def _run(self) -> StepResult: + include = [ + ".root", + ".env", + "build.gradle", + "deps.toml", + "gradle.properties", + "gradle", + "gradlew", + "LICENSE_SHORT", + "settings.gradle", + "build.gradle", + "tools/gradle", + "spotbugs-exclude-filter-file.xml", + "buildSrc", + "tools/bin/build_image.sh", + "tools/lib/lib.sh", + "tools/gradle/codestyle", + "pyproject.toml", + "airbyte-cdk/java/airbyte-cdk/**", + ] + self.build_include + + yum_packages_to_install = [ + "docker", # required by :integrationTestJava. + "findutils", # gradle requires xargs, which is shipped in findutils. + "jq", # required by :airbyte-connector-test-harnesses:acceptance-test-harness to inspect docker images. + "npm", # required by :format. + "python3.11-pip", # required by :format. + "rsync", # required for gradle cache synchronization. + ] + + # Define a gradle container which will be cached and re-used for all tasks. + # We should do our best to cram any generic & expensive layers in here. + gradle_container = ( + self.dagger_client.container() + # Use a linux+jdk base image with long-term support, such as amazoncorretto. + .from_(AMAZONCORRETTO_IMAGE) + # Install a bunch of packages as early as possible. + .with_exec( + sh_dash_c( + [ + # Update first, but in the same .with_exec step as the package installation. + # Otherwise, we risk caching stale package URLs. + "yum update -y", + f"yum install -y {' '.join(yum_packages_to_install)}", + # Remove any dangly bits. + "yum clean all", + # Deliberately soft-remove docker, so that the `docker` CLI is unavailable by default. + # This is a defensive choice to enforce the expectation that, as a general rule, gradle tasks do not rely on docker. + "yum remove -y --noautoremove docker", # remove docker package but not its dependencies + "yum install -y --downloadonly docker", # have docker package in place for quick install + ] + ) + ) + # Set GRADLE_HOME and GRADLE_USER_HOME to the directory which will be rsync-ed with the gradle cache volume. + .with_env_variable("GRADLE_HOME", "/root/.gradle") + .with_env_variable("GRADLE_USER_HOME", "/root/.gradle") + # Set RUN_IN_AIRBYTE_CI to tell gradle how to configure its build cache. + # This is consumed by settings.gradle in the repo root. + .with_env_variable("RUN_IN_AIRBYTE_CI", "1") + # TODO: remove this once we finish the project to boost source-postgres CI performance. + .with_env_variable("CACHEBUSTER", hacks.get_cachebuster(self.context, self.logger)) + # Mount the gradle cache volume. + # We deliberately don't mount it at $GRADLE_HOME, instead we load it there and store it from there using rsync. + # This is because the volume is accessed concurrently by all GradleTask instances. + # Hence, why we synchronize the writes by setting the `sharing` parameter to LOCKED. + .with_mounted_cache("/root/gradle-cache", self.connector_java_build_cache, sharing=CacheSharingMode.LOCKED) + # Mount the parts of the repo which interest us in /airbyte. + .with_workdir("/airbyte") + .with_mounted_directory("/airbyte", self.context.get_repo_dir(".", include=include)) + .with_mounted_directory(str(self.context.connector.code_directory), await self.context.get_connector_dir()) + # Disable the Ryuk container because it needs privileged docker access that does not work: + .with_env_variable("TESTCONTAINERS_RYUK_DISABLED", "true") + # Run gradle once to populate the container's local maven repository. + # This step is useful also to serve as a basic sanity check and to warm the gradle cache. + # This will download gradle itself, a bunch of poms and jars, compile the gradle plugins, configure tasks, etc. + .with_exec(self._get_gradle_command(":airbyte-cdk:java:airbyte-cdk:publishSnapshotIfNeeded")) + ) + + # From this point on, we add layers which are task-dependent. + if self.mount_connector_secrets: + gradle_container = gradle_container.with_( + await secrets.mounted_connector_secrets(self.context, f"{self.context.connector.code_directory}/secrets") + ) + if self.bind_to_docker_host: + # If this GradleTask subclass needs docker, then install it and bind it to the existing global docker host container. + gradle_container = pipelines.dagger.actions.system.docker.with_bound_docker_host(self.context, gradle_container) + # This installation should be cheap, as the package has already been downloaded, and its dependencies are already installed. + gradle_container = gradle_container.with_exec(["yum", "install", "-y", "docker"]) + + # Run the gradle task that we actually care about. + connector_task = f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}" + gradle_container = gradle_container.with_exec(self._get_gradle_command(connector_task)) + return await self.get_step_result(gradle_container) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py new file mode 100644 index 0000000000000..1003ba9c619c8 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py @@ -0,0 +1,25 @@ +from pipelines.dagger.actions.python.poetry import with_poetry_module +from pipelines.models.contexts import PipelineContext +from pipelines.models.steps import Step, StepResult + + +class PoetryRunStep(Step): + def __init__(self, context: PipelineContext, title: str, parent_dir_path: str, module_path: str): + """A simple step that runs a given command inside a poetry project. + + Args: + context (PipelineContext): context of the step + title (str): name of the step + parent_dir_path (str): The path to the parent directory of the poetry project + module_path (str): The path to the poetry project + """ + self.title = title + super().__init__(context) + + parent_dir = self.context.get_repo_dir(parent_dir_path) + module_path = module_path + self.poetry_run_container = with_poetry_module(self.context, parent_dir, module_path).with_entrypoint(["poetry", "run"]) + + async def _run(self, poetry_run_args: list) -> StepResult: + poetry_run_exec = self.poetry_run_container.with_exec(poetry_run_args) + return await self.get_step_result(poetry_run_exec) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py index 857ca596b51d6..849d577c27dfb 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py +++ b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py @@ -6,8 +6,9 @@ import pytest from pipelines.models.contexts import PipelineContext -from pipelines.models.steps import MountPath, SimpleDockerStep +from pipelines.models.steps import MountPath from pipelines.helpers.utils import get_exec_result +from pipelines.pipeline.steps.docker import SimpleDockerStep pytestmark = [ pytest.mark.anyio, From b8ff63a29093a5b7504c130e5ac7bc7f686cd478 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 17:12:06 -0500 Subject: [PATCH 08/38] fixes --- .../pipelines/pipelines/cli/airbyte_ci.py | 2 +- .../connectors/pipelines/pipelines/consts.py | 23 +++ .../pipelines/helpers/connectors/modifed.py | 2 +- .../pipelines/pipelines/helpers/github.py | 3 +- .../pipelines/pipelines/models/contexts.py | 25 +-- .../connectors/builds/steps/__init__.py | 3 +- .../connectors/bump_version/pipeline.py | 3 +- .../pipelines/pipeline/connectors/commands.py | 2 - .../pipeline/connectors/publish/context.py | 2 +- .../pipelines/pipeline/connectors/reports.py | 171 ++++++++++++++++++ .../pipeline/connectors/test/steps/common.py | 3 +- .../pipelines/pipeline/metadata/commands.py | 2 +- 12 files changed, 208 insertions(+), 33 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py index 44c88148314cf..ad8afda43afb1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py @@ -12,7 +12,7 @@ from pipelines import main_logger from pipelines.helpers import github from pipelines.helpers.git import get_current_git_branch, get_current_git_revision, get_modified_files_in_branch, get_modified_files_in_commit, get_modified_files_in_pull_request -from pipelines.models.bases import CIContext +from pipelines.consts import CIContext from pipelines.consts import LOCAL_PIPELINE_PACKAGE_PATH from pipelines.cli.telemetry import track_command from pipelines.helpers.utils import ( diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 0b71859085f8d..509b27eba5ad1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -2,6 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from enum import Enum import platform import git @@ -41,3 +42,25 @@ REPO = git.Repo(search_parent_directories=True) REPO_PATH = REPO.working_tree_dir STATIC_REPORT_PREFIX = "airbyte-ci" + + +class CIContext(str, Enum): + """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" + + MANUAL = "manual" + PULL_REQUEST = "pull_request" + NIGHTLY_BUILDS = "nightly_builds" + MASTER = "master" + + def __str__(self) -> str: + return self.value + + +class ContextState(Enum): + """Enum to characterize the current context state, values are used for external representation on GitHub commit checks.""" + + INITIALIZED = {"github_state": "pending", "description": "Pipelines are being initialized..."} + RUNNING = {"github_state": "pending", "description": "Pipelines are running..."} + ERROR = {"github_state": "error", "description": "Something went wrong while running the Pipelines."} + SUCCESSFUL = {"github_state": "success", "description": "All Pipelines ran successfully."} + FAILURE = {"github_state": "failure", "description": "Pipeline failed."} diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py index 3ecdf3d4fa2cc..cedbe298f595e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import FrozenSet, Set, Union -from pipelines import main_logger, Union +from pipelines import main_logger from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py index 34a7932987e93..3bde8c61f9d2f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/github.py @@ -11,7 +11,7 @@ from connector_ops.utils import console from pipelines import main_logger -from pipelines.models.contexts import CIContext +from pipelines.consts import CIContext if TYPE_CHECKING: from logging import Logger @@ -92,6 +92,7 @@ def get_pull_request(pull_request_number: int, github_access_token: str) -> Pull def update_global_commit_status_check_for_tests(click_context: dict, github_state: str, logger: Logger = None): + update_commit_status_check( click_context["git_revision"], github_state, diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py index b33b46fb973f9..b347c9c0b8b4f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py @@ -7,7 +7,6 @@ import logging import os from datetime import datetime -from enum import Enum from glob import glob from types import TracebackType from typing import List, Optional @@ -16,6 +15,8 @@ from dagger import Client, Directory, File, Secret from github import PullRequest from pipelines import hacks +from pipelines.consts import CIContext +from pipelines.consts import ContextState from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.models.reports import Report from pipelines.helpers.github import update_commit_status_check @@ -23,28 +24,6 @@ from pipelines.helpers.utils import AIRBYTE_REPO_URL -class ContextState(Enum): - """Enum to characterize the current context state, values are used for external representation on GitHub commit checks.""" - - INITIALIZED = {"github_state": "pending", "description": "Pipelines are being initialized..."} - RUNNING = {"github_state": "pending", "description": "Pipelines are running..."} - ERROR = {"github_state": "error", "description": "Something went wrong while running the Pipelines."} - SUCCESSFUL = {"github_state": "success", "description": "All Pipelines ran successfully."} - FAILURE = {"github_state": "failure", "description": "Pipeline failed."} - - -class CIContext(str, Enum): - """An enum for Ci context values which can be ["manual", "pull_request", "nightly_builds"].""" - - MANUAL = "manual" - PULL_REQUEST = "pull_request" - NIGHTLY_BUILDS = "nightly_builds" - MASTER = "master" - - def __str__(self) -> str: - return self.value - - class PipelineContext: """The pipeline context is used to store configuration for a specific pipeline run.""" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py index 430c5e2182175..21d10eb74f74d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py @@ -9,12 +9,13 @@ import anyio from connector_ops.utils import ConnectorLanguage -from pipelines.models.bases import ConnectorReport, StepResult +from pipelines.models.bases import StepResult from pipelines.pipeline.connectors.builds.steps import python_connectors from pipelines.pipeline.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.pipeline.connectors.builds.steps import java_connectors from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.reports import ConnectorReport class NoBuildStepForLanguageError(Exception): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py index 4d522beb5e518..6cc4376234129 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py @@ -4,8 +4,9 @@ import semver from pipelines import consts from pipelines.helpers.connectors import metadata_change_helpers -from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.models.bases import Step, StepResult, StepStatus from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.reports import ConnectorReport def get_bumped_version(version: str, bump_type: str) -> str: diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index b7836a7dd3069..d56ef18b728a7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -1,8 +1,6 @@ import os from pathlib import Path from pipelines import main_logger -from pipelines.commands.groups.connectors import ALL_CONNECTORS - import click from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py index 9cd339b17f2d9..f18ea26b7ccd2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -12,7 +12,7 @@ from pipelines.models.bases import ConnectorWithModifiedFiles from pipelines.helpers.utils import format_duration from pipelines.pipeline.connectors import ConnectorContext -from pipelines.models.contexts import ContextState +from pipelines.consts import ContextState class PublishConnectorContext(ConnectorContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index e69de29bb2d1d..cd2f1a1d47af5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -0,0 +1,171 @@ +from pipelines.consts import GCS_PUBLIC_DOMAIN +from pipelines.helpers.utils import format_duration +from pipelines.models.bases import StepStatus +from pipelines.models.reports import Report + + +from anyio import Path +from connector_ops.utils import console +from jinja2 import Environment, PackageLoader, select_autoescape +from rich.console import Group +from rich.panel import Panel +from rich.style import Style +from rich.table import Table +from rich.text import Text +from tabulate import tabulate + + +import json +import webbrowser +from dataclasses import dataclass + + +@dataclass(frozen=True) +class ConnectorReport(Report): + """A dataclass to build connector test reports to share pipelines executions results with the user.""" + + @property + def report_output_prefix(self) -> str: # noqa D102 + return f"{self.pipeline_context.report_output_prefix}/{self.pipeline_context.connector.technical_name}/{self.pipeline_context.connector.version}" + + @property + def html_report_file_name(self) -> str: # noqa D102 + return self.filename + ".html" + + @property + def html_report_remote_storage_key(self) -> str: # noqa D102 + return f"{self.report_output_prefix}/{self.html_report_file_name}" + + @property + def html_report_url(self) -> str: # noqa D102 + return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}" + + @property + def should_be_commented_on_pr(self) -> bool: # noqa D102 + return ( + self.pipeline_context.should_save_report + and self.pipeline_context.is_ci + and self.pipeline_context.pull_request + and self.pipeline_context.PRODUCTION + ) + + def to_json(self) -> str: + """Create a JSON representation of the connector test report. + + Returns: + str: The JSON representation of the report. + """ + return json.dumps( + { + "connector_technical_name": self.pipeline_context.connector.technical_name, + "connector_version": self.pipeline_context.connector.version, + "run_timestamp": self.created_at.isoformat(), + "run_duration": self.run_duration.total_seconds(), + "success": self.success, + "failed_steps": [s.step.__class__.__name__ for s in self.failed_steps], + "successful_steps": [s.step.__class__.__name__ for s in self.successful_steps], + "skipped_steps": [s.step.__class__.__name__ for s in self.skipped_steps], + "gha_workflow_run_url": self.pipeline_context.gha_workflow_run_url, + "pipeline_start_timestamp": self.pipeline_context.pipeline_start_timestamp, + "pipeline_end_timestamp": round(self.created_at.timestamp()), + "pipeline_duration": round(self.created_at.timestamp()) - self.pipeline_context.pipeline_start_timestamp, + "git_branch": self.pipeline_context.git_branch, + "git_revision": self.pipeline_context.git_revision, + "ci_context": self.pipeline_context.ci_context, + "cdk_version": self.pipeline_context.cdk_version, + "html_report_url": self.html_report_url, + "dagger_cloud_url": self.pipeline_context.dagger_cloud_url, + } + ) + + def post_comment_on_pr(self) -> None: + icon_url = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" + global_status_emoji = "✅" if self.success else "❌" + commit_url = f"{self.pipeline_context.pull_request.html_url}/commits/{self.pipeline_context.git_revision}" + markdown_comment = f'## {self.pipeline_context.connector.technical_name} test report (commit [`{self.pipeline_context.git_revision[:10]}`]({commit_url})) - {global_status_emoji}\n\n' + markdown_comment += f"⏲️ Total pipeline duration: {format_duration(self.run_duration)} \n\n" + report_data = [ + [step_result.step.title, step_result.status.get_emoji()] + for step_result in self.steps_results + if step_result.status is not StepStatus.SKIPPED + ] + markdown_comment += tabulate(report_data, headers=["Step", "Result"], tablefmt="pipe") + "\n\n" + markdown_comment += f"🔗 [View the logs here]({self.html_report_url})\n\n" + + if self.pipeline_context.dagger_cloud_url: + markdown_comment += f"☁️ [View runs for commit in Dagger Cloud]({self.pipeline_context.dagger_cloud_url})\n\n" + + markdown_comment += "*Please note that tests are only run on PR ready for review. Please set your PR to draft mode to not flood the CI engine and upstream service on following commits.*\n" + markdown_comment += "**You can run the same pipeline locally on this branch with the [airbyte-ci](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool with the following command**\n" + markdown_comment += f"```bash\nairbyte-ci connectors --name={self.pipeline_context.connector.technical_name} test\n```\n\n" + self.pipeline_context.pull_request.create_issue_comment(markdown_comment) + + async def to_html(self) -> str: + env = Environment(loader=PackageLoader("pipelines.tests"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) + template = env.get_template("test_report.html.j2") + template.globals["StepStatus"] = StepStatus + template.globals["format_duration"] = format_duration + local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve() + template_context = { + "connector_name": self.pipeline_context.connector.technical_name, + "step_results": self.steps_results, + "run_duration": self.run_duration, + "created_at": self.created_at.isoformat(), + "connector_version": self.pipeline_context.connector.version, + "gha_workflow_run_url": None, + "dagger_logs_url": None, + "git_branch": self.pipeline_context.git_branch, + "git_revision": self.pipeline_context.git_revision, + "commit_url": None, + "icon_url": local_icon_path.as_uri(), + } + + if self.pipeline_context.is_ci: + template_context["commit_url"] = f"https://github.com/airbytehq/airbyte/commit/{self.pipeline_context.git_revision}" + template_context["gha_workflow_run_url"] = self.pipeline_context.gha_workflow_run_url + template_context["dagger_logs_url"] = self.pipeline_context.dagger_logs_url + template_context["dagger_cloud_url"] = self.pipeline_context.dagger_cloud_url + template_context[ + "icon_url" + ] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg" + return template.render(template_context) + + async def save(self) -> None: + local_html_path = await self.save_local(self.html_report_file_name, await self.to_html()) + absolute_path = await local_html_path.resolve() + if self.pipeline_context.open_report_in_browser: + self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}") + if self.pipeline_context.open_report_in_browser: + self.pipeline_context.logger.info("Opening HTML report in browser.") + webbrowser.open(absolute_path.as_uri()) + if self.remote_storage_enabled: + await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html") + self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}") + await super().save() + + def print(self): + """Print the test report to the console in a nice way.""" + connector_name = self.pipeline_context.connector.technical_name + main_panel_title = Text(f"{connector_name.upper()} - {self.name}") + main_panel_title.stylize(Style(color="blue", bold=True)) + duration_subtitle = Text(f"⏲️ Total pipeline duration for {connector_name}: {format_duration(self.run_duration)}") + step_results_table = Table(title="Steps results") + step_results_table.add_column("Step") + step_results_table.add_column("Result") + step_results_table.add_column("Duration") + + for step_result in self.steps_results: + step = Text(step_result.step.title) + step.stylize(step_result.status.get_rich_style()) + result = Text(step_result.status.value) + result.stylize(step_result.status.get_rich_style()) + step_results_table.add_row(step, result, format_duration(step_result.step.run_duration)) + + details_instructions = Text("ℹ️ You can find more details with step executions logs in the saved HTML report.") + to_render = [step_results_table, details_instructions] + + if self.pipeline_context.dagger_cloud_url: + self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") + + main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) + console.print(main_panel) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py index f56fc880aa9e3..0222bd8b693bf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py @@ -18,8 +18,9 @@ from pipelines import hacks from pipelines.dagger.actions import secrets import pipelines.dagger.actions.internal_tools -from pipelines.models.bases import CIContext, Step, StepResult, StepStatus +from pipelines.models.bases import Step, StepResult, StepStatus from pipelines.helpers.utils import METADATA_FILE_NAME +from pipelines.consts import CIContext class VersionCheck(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py index a27814fa3dc52..5ec47eea2be4a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py @@ -1,4 +1,4 @@ -from pipelines.pipelines.metadata import run_metadata_orchestrator_deploy_pipeline +from pipelines.pipeline.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand From 586372cf33deec9d43f549ec928d0254ecb0e44a Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 17:21:48 -0500 Subject: [PATCH 09/38] airbyte ci help finally works :D --- .../pipelines/cli/dagger_pipeline_command.py | 3 +- .../pipelines/pipelines/helpers/steps.py | 2 +- .../pipelines/pipelines/models/bases.py | 282 +----------------- .../connectors/builds/steps/__init__.py | 2 +- .../connectors/bump_version/pipeline.py | 2 +- .../migrate_to_base_image/pipeline.py | 2 +- .../pipeline/connectors/publish/pipeline.py | 3 +- .../pipelines/pipeline/connectors/reports.py | 2 +- .../connectors/test/steps/__init__.py | 3 +- .../pipeline/connectors/test/steps/common.py | 6 +- .../connectors/test/steps/java_connectors.py | 2 +- .../pipelines/pipeline/metadata/pipeline.py | 14 +- .../pipelines/pipeline/steps/no_op.py | 16 + .../test_builds/test_python_connectors.py | 2 +- .../pipelines/tests/test_publish.py | 2 +- 15 files changed, 44 insertions(+), 299 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py index 0b989d5a3945c..2c17304cdf0c0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py @@ -15,7 +15,8 @@ from pipelines import consts, main_logger from pipelines.consts import GCS_PUBLIC_DOMAIN from pipelines.helpers import sentry_utils -from pipelines.helpers.utils import slugify, upload_to_gcs +from pipelines.helpers.utils import slugify +from pipelines.helpers.gcs import upload_to_gcs from pipelines.consts import STATIC_REPORT_PREFIX class DaggerPipelineCommand(click.Command): diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py index 4b2bf10d56489..95ad77c0b9969 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/steps.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, List, Tuple, Union import asyncer -from pipelines.models.bases import Step, StepStatus +from pipelines.models.steps import Step, StepStatus if TYPE_CHECKING: from pipelines.models.steps import StepResult diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py index b3730ea4bebd2..01e769018327a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py @@ -6,25 +6,12 @@ from __future__ import annotations -import logging -from abc import ABC, abstractmethod from dataclasses import dataclass, field -from datetime import datetime, timedelta -from enum import Enum -from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Set +from typing import Set -import anyio -import asyncer from anyio import Path from connector_ops.utils import Connector -from dagger import Container, DaggerError -from pipelines import sentry_utils -from pipelines.helpers.utils import METADATA_FILE_NAME, format_duration, get_exec_result -from rich.style import Style - - -if TYPE_CHECKING: - from pipelines.models.contexts import PipelineContext +from pipelines.helpers.utils import METADATA_FILE_NAME @dataclass(frozen=True) @@ -34,268 +21,3 @@ class ConnectorWithModifiedFiles(Connector): @property def has_metadata_change(self) -> bool: return any(path.name == METADATA_FILE_NAME for path in self.modified_files) - - -class StepStatus(Enum): - """An Enum to characterize the success, failure or skipping of a Step.""" - - SUCCESS = "Successful" - FAILURE = "Failed" - SKIPPED = "Skipped" - - def get_rich_style(self) -> Style: - """Match color used in the console output to the step status.""" - if self is StepStatus.SUCCESS: - return Style(color="green") - if self is StepStatus.FAILURE: - return Style(color="red", bold=True) - if self is StepStatus.SKIPPED: - return Style(color="yellow") - - def get_emoji(self) -> str: - """Match emoji used in the console output to the step status.""" - if self is StepStatus.SUCCESS: - return "✅" - if self is StepStatus.FAILURE: - return "❌" - if self is StepStatus.SKIPPED: - return "🟡" - - def __str__(self) -> str: # noqa D105 - return self.value - - -class Step(ABC): - """An abstract class to declare and run pipeline step.""" - - title: ClassVar[str] - max_retries: ClassVar[int] = 0 - max_dagger_error_retries: ClassVar[int] = 3 - should_log: ClassVar[bool] = True - success_exit_code: ClassVar[int] = 0 - skipped_exit_code: ClassVar[int] = None - # The max duration of a step run. If the step run for more than this duration it will be considered as timed out. - # The default of 5 hours is arbitrary and can be changed if needed. - max_duration: ClassVar[timedelta] = timedelta(hours=5) - - retry_delay = timedelta(seconds=10) - - def __init__(self, context: PipelineContext) -> None: # noqa D107 - self.context = context - self.retry_count = 0 - self.started_at = None - self.stopped_at = None - - @property - def run_duration(self) -> timedelta: - if self.started_at and self.stopped_at: - return self.stopped_at - self.started_at - else: - return timedelta(seconds=0) - - @property - def logger(self) -> logging.Logger: - if self.should_log: - return logging.getLogger(f"{self.context.pipeline_name} - {self.title}") - else: - disabled_logger = logging.getLogger() - disabled_logger.disabled = True - return disabled_logger - - @property - def dagger_client(self) -> Container: - return self.context.dagger_client.pipeline(self.title) - - async def log_progress(self, completion_event: anyio.Event) -> None: - """Log the step progress every 30 seconds until the step is done.""" - while not completion_event.is_set(): - duration = datetime.utcnow() - self.started_at - elapsed_seconds = duration.total_seconds() - if elapsed_seconds > 30 and round(elapsed_seconds) % 30 == 0: - self.logger.info(f"⏳ Still running... (duration: {format_duration(duration)})") - await anyio.sleep(1) - - async def run_with_completion(self, completion_event: anyio.Event, *args, **kwargs) -> StepResult: - """Run the step with a timeout and set the completion event when the step is done.""" - try: - with anyio.fail_after(self.max_duration.total_seconds()): - result = await self._run(*args, **kwargs) - completion_event.set() - return result - except TimeoutError: - self.retry_count = self.max_retries + 1 - self.logger.error(f"🚨 {self.title} timed out after {self.max_duration}. No additional retry will happen.") - completion_event.set() - return self._get_timed_out_step_result() - - @sentry_utils.with_step_context - async def run(self, *args, **kwargs) -> StepResult: - """Public method to run the step. It output a step result. - - If an unexpected dagger error happens it outputs a failed step result with the exception payload. - - Returns: - StepResult: The step result following the step run. - """ - self.logger.info(f"🚀 Start {self.title}") - self.started_at = datetime.utcnow() - completion_event = anyio.Event() - try: - async with asyncer.create_task_group() as task_group: - soon_result = task_group.soonify(self.run_with_completion)(completion_event, *args, **kwargs) - task_group.soonify(self.log_progress)(completion_event) - step_result = soon_result.value - except DaggerError as e: - self.logger.error("Step failed with an unexpected dagger error", exc_info=e) - step_result = StepResult(self, StepStatus.FAILURE, stderr=str(e), exc_info=e) - - self.stopped_at = datetime.utcnow() - self.log_step_result(step_result) - - lets_retry = self.should_retry(step_result) - step_result = await self.retry(step_result, *args, **kwargs) if lets_retry else step_result - return step_result - - def should_retry(self, step_result: StepResult) -> bool: - """Return True if the step should be retried.""" - if step_result.status is not StepStatus.FAILURE: - return False - max_retries = self.max_dagger_error_retries if step_result.exc_info else self.max_retries - return self.retry_count < max_retries and max_retries > 0 - - async def retry(self, step_result, *args, **kwargs) -> StepResult: - self.retry_count += 1 - self.logger.warn( - f"Failed with error: {step_result.stderr}.\nRetry #{self.retry_count} in {self.retry_delay.total_seconds()} seconds..." - ) - await anyio.sleep(self.retry_delay.total_seconds()) - return await self.run(*args, **kwargs) - - def log_step_result(self, result: StepResult) -> None: - """Log the step result. - - Args: - result (StepResult): The step result to log. - """ - duration = format_duration(self.run_duration) - if result.status is StepStatus.FAILURE: - self.logger.info(f"{result.status.get_emoji()} failed (duration: {duration})") - if result.status is StepStatus.SKIPPED: - self.logger.info(f"{result.status.get_emoji()} was skipped (duration: {duration})") - if result.status is StepStatus.SUCCESS: - self.logger.info(f"{result.status.get_emoji()} was successful (duration: {duration})") - - @abstractmethod - async def _run(self, *args, **kwargs) -> StepResult: - """Implement the execution of the step and return a step result. - - Returns: - StepResult: The result of the step run. - """ - raise NotImplementedError("Steps must define a '_run' attribute.") - - def skip(self, reason: str = None) -> StepResult: - """Declare a step as skipped. - - Args: - reason (str, optional): Reason why the step was skipped. - - Returns: - StepResult: A skipped step result. - """ - return StepResult(self, StepStatus.SKIPPED, stdout=reason) - - def get_step_status_from_exit_code( - self, - exit_code: int, - ) -> StepStatus: - """Map an exit code to a step status. - - Args: - exit_code (int): A process exit code. - - Raises: - ValueError: Raised if the exit code is not mapped to a step status. - - Returns: - StepStatus: The step status inferred from the exit code. - """ - if exit_code == self.success_exit_code: - return StepStatus.SUCCESS - elif self.skipped_exit_code is not None and exit_code == self.skipped_exit_code: - return StepStatus.SKIPPED - else: - return StepStatus.FAILURE - - async def get_step_result(self, container: Container) -> StepResult: - """Concurrent retrieval of exit code, stdout and stdout of a container. - - Create a StepResult object from these objects. - - Args: - container (Container): The container from which we want to infer a step result/ - - Returns: - StepResult: Failure or success with stdout and stderr. - """ - exit_code, stdout, stderr = await get_exec_result(container) - return StepResult( - self, - self.get_step_status_from_exit_code(exit_code), - stderr=stderr, - stdout=stdout, - output_artifact=container, - ) - - def _get_timed_out_step_result(self) -> StepResult: - return StepResult( - self, - StepStatus.FAILURE, - stdout=f"Timed out after the max duration of {format_duration(self.max_duration)}. Please checkout the Dagger logs to see what happened.", - ) - - -class NoOpStep(Step): - """A step that does nothing.""" - - title = "No Op" - should_log = False - - def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: - super().__init__(context) - self.step_status = step_status - - async def _run(self, *args, **kwargs) -> StepResult: - return StepResult(self, self.step_status) - - -@dataclass(frozen=True) -class StepResult: - """A dataclass to capture the result of a step.""" - - step: Step - status: StepStatus - created_at: datetime = field(default_factory=datetime.utcnow) - stderr: Optional[str] = None - stdout: Optional[str] = None - output_artifact: Any = None - exc_info: Optional[Exception] = None - - def __repr__(self) -> str: # noqa D105 - return f"{self.step.title}: {self.status.value}" - - def __str__(self) -> str: # noqa D105 - return f"{self.step.title}: {self.status.value}\n\nSTDOUT:\n{self.stdout}\n\nSTDERR:\n{self.stderr}" - - def __post_init__(self): - if self.stderr: - super().__setattr__("stderr", self.redact_secrets_from_string(self.stderr)) - if self.stdout: - super().__setattr__("stdout", self.redact_secrets_from_string(self.stdout)) - - def redact_secrets_from_string(self, value: str) -> str: - for secret in self.step.context.secrets_to_mask: - value = value.replace(secret, "********") - return value - - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py index 21d10eb74f74d..a0e0d82a687f5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py @@ -9,7 +9,7 @@ import anyio from connector_ops.utils import ConnectorLanguage -from pipelines.models.bases import StepResult +from pipelines.models.steps import StepResult from pipelines.pipeline.connectors.builds.steps import python_connectors from pipelines.pipeline.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py index 6cc4376234129..c09d2a94e018d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py @@ -4,7 +4,7 @@ import semver from pipelines import consts from pipelines.helpers.connectors import metadata_change_helpers -from pipelines.models.bases import Step, StepResult, StepStatus +from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.reports import ConnectorReport diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index b0451afa49bd8..31c68f8b709e4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -10,7 +10,7 @@ from dagger import Directory from jinja2 import Template from pipelines import consts -from pipelines.models.bases import Step, StepResult, StepStatus +from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.bump_version.pipeline import BumpDockerImageTagInMetadata from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, get_bumped_version from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py index 26daeeb1ee7be..d0a9b91f439fe 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py @@ -12,9 +12,10 @@ from pipelines import consts from pipelines.dagger.actions.system import docker from pipelines.dagger.actions.remote_storage import upload_to_gcs -from pipelines.models.bases import ConnectorReport, Step, StepResult, StepStatus +from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.builds import steps from pipelines.pipeline.connectors.publish.context import PublishConnectorContext +from pipelines.pipeline.connectors.reports import ConnectorReport from pipelines.pipeline.metadata.pipeline import MetadataValidation, MetadataUpload from pydantic import ValidationError diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index cd2f1a1d47af5..03393a4756c19 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -1,6 +1,6 @@ from pipelines.consts import GCS_PUBLIC_DOMAIN from pipelines.helpers.utils import format_duration -from pipelines.models.bases import StepStatus +from pipelines.models.steps import StepStatus from pipelines.models.reports import Report diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py index 8fa15c0d8d0e5..baf8381bc7cc1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py @@ -9,8 +9,9 @@ import anyio import asyncer from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.models.bases import ConnectorReport, StepResult +from pipelines.models.steps import StepResult from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.reports import ConnectorReport from pipelines.pipeline.connectors.test.steps import java_connectors from pipelines.pipeline.metadata.pipeline import MetadataValidation from pipelines.pipeline.connectors.test.steps import python_connectors diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py index 0222bd8b693bf..3c7051cc4c321 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py @@ -17,8 +17,8 @@ from dagger import Container, Directory, File from pipelines import hacks from pipelines.dagger.actions import secrets -import pipelines.dagger.actions.internal_tools -from pipelines.models.bases import Step, StepResult, StepStatus +from pipelines.dagger.containers import internal_tools +from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.helpers.utils import METADATA_FILE_NAME from pipelines.consts import CIContext @@ -145,7 +145,7 @@ async def _run(self) -> StepResult: Returns: StepResult: Failure or success of the QA checks with stdout and stderr. """ - connector_ops = await pipelines.dagger.actions.internal_tools.with_connector_ops(self.context) + connector_ops = await internal_tools.with_connector_ops(self.context) include = [ str(self.context.connector.code_directory), str(self.context.connector.documentation_file_path), diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py index 4505d9c2ee198..82ec89c94ccfa 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py @@ -15,7 +15,7 @@ from pipelines.pipeline.connectors.builds.steps.normalization import BuildOrPullNormalization from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipelines.models.steps import GradleTask +from pipelines.pipeline.steps import GradleTask from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests from pipelines.helpers.utils import export_container_to_tarball diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index 8c2d1d58edd4c..4daaa31aad540 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -3,17 +3,21 @@ from typing import Optional import dagger -from pipelines.dagger.containers.python import with_python_base, with_pip_packages -from pipelines.models.bases import Report, Step, StepResult +from pipelines.dagger.actions.python.common import with_pip_packages +from pipelines.dagger.containers.python import with_python_base +from pipelines.models.steps import Step, StepResult +from pipelines.models.reports import Report from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.helpers.steps import run_steps -from pipelines.models.steps import PoetryRunStep, MountPath, SimpleDockerStep +from pipelines.models.steps import MountPath from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.models.steps import MountPath, SimpleDockerStep -from pipelines.internal_tools import INTERNAL_TOOL_PATHS +from pipelines.models.steps import MountPath +from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS +from pipelines.pipeline.steps.docker import SimpleDockerStep +from pipelines.pipeline.steps.poetry import PoetryRunStep # STEPS diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py index e69de29bb2d1d..31b9824e52b31 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py @@ -0,0 +1,16 @@ +from pipelines.models.contexts import PipelineContext +from pipelines.models.steps import Step, StepResult, StepStatus + + +class NoOpStep(Step): + """A step that does nothing.""" + + title = "No Op" + should_log = False + + def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: + super().__init__(context) + self.step_status = step_status + + async def _run(self, *args, **kwargs) -> StepResult: + return StepResult(self, self.step_status) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index 3358083f8213c..19a4a94783b9a 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -5,7 +5,7 @@ from pathlib import Path import pytest -from pipelines.models.bases import StepStatus +from pipelines.models.steps import StepStatus from pipelines.pipeline.connectors.builds.steps import python_connectors from pipelines.pipeline.connectors.builds.steps import build_customization from pipelines.pipeline.connectors.context import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 7dd1ee3d9bcfa..bc7456594c439 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -9,7 +9,7 @@ import anyio import pytest from pipelines import publish -from pipelines.models.bases import StepStatus +from pipelines.models.steps import StepStatus import pipelines.pipeline.metadata.pipeline pytestmark = [ From 01340fe673ea5cbd2b72d046c79fcd45981ff01a Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 17:42:39 -0500 Subject: [PATCH 10/38] run formatter --- .../pipelines/pipelines/cli/airbyte_ci.py | 17 +++++++++-------- .../pipelines/cli/dagger_pipeline_command.py | 6 +++--- .../connectors/pipelines/pipelines/consts.py | 2 +- .../dagger/actions/connector/hooks.py | 7 ++----- .../dagger/actions/connector/normalization.py | 3 +-- .../pipelines/dagger/actions/python/common.py | 11 ++++------- .../pipelines/dagger/actions/python/pipx.py | 9 +++------ .../pipelines/dagger/actions/python/poetry.py | 13 +++++-------- .../pipelines/dagger/actions/secrets.py | 3 +-- .../pipelines/dagger/actions/system/common.py | 5 ++--- .../pipelines/dagger/actions/system/docker.py | 8 +++----- .../dagger/containers/internal_tools.py | 4 +--- .../pipelines/dagger/containers/java.py | 6 ++---- .../pipelines/dagger/containers/python.py | 4 +--- .../pipelines/helpers/connectors/modifed.py | 6 ++---- .../pipelines/pipelines/helpers/gcs.py | 13 +++++-------- .../pipelines/pipelines/helpers/git.py | 14 +++----------- .../pipelines/pipelines/helpers/utils.py | 1 - .../pipelines/pipelines/models/contexts.py | 5 ++--- .../pipelines/pipelines/models/reports.py | 11 ++++------- .../pipelines/pipelines/models/steps.py | 16 ++++++---------- .../pipeline/connectors/builds/commands.py | 8 +++----- .../connectors/builds/steps/common.py | 4 ++-- .../builds/steps/java_connectors.py | 2 +- .../builds/steps/python_connectors.py | 3 +-- .../connectors/bump_version/commands.py | 8 +++----- .../connectors/bump_version/pipeline.py | 5 +++-- .../pipelines/pipeline/connectors/commands.py | 10 +++------- .../pipelines/pipeline/connectors/context.py | 5 ++--- .../pipeline/connectors/list/commands.py | 6 ++---- .../migrate_to_base_image/commands.py | 10 ++++------ .../migrate_to_base_image/pipeline.py | 6 ++---- .../pipelines/pipeline/connectors/pipeline.py | 4 ++-- .../pipeline/connectors/publish/commands.py | 10 ++++------ .../pipeline/connectors/publish/context.py | 4 ++-- .../pipeline/connectors/publish/pipeline.py | 5 +++-- .../pipelines/pipeline/connectors/reports.py | 19 ++++++++----------- .../pipeline/connectors/test/commands.py | 13 +++++-------- .../pipeline/connectors/test/steps/common.py | 4 ++-- .../connectors/test/steps/java_connectors.py | 12 ++++++++---- .../test/steps/python_connectors.py | 8 ++++---- .../connectors/upgrade_base_image/commands.py | 10 ++++------ .../pipelines/pipeline/metadata/commands.py | 7 ++----- .../pipelines/pipeline/metadata/pipeline.py | 15 ++++++--------- .../pipelines/pipeline/steps/docker.py | 11 ++++------- .../pipelines/pipelines/pipeline/steps/git.py | 2 +- .../pipelines/pipeline/steps/gradle.py | 13 +++++-------- .../pipelines/pipeline/steps/no_op.py | 2 +- .../pipelines/pipeline/steps/poetry.py | 2 +- .../pipelines/pipeline/test/commands.py | 4 +--- .../pipelines/pipeline/test/pipeline.py | 11 ++++------- .../test_builds/test_python_connectors.py | 3 +-- .../test_groups/test_connectors.py | 10 +++++----- .../pipelines/tests/test_publish.py | 2 +- .../test_steps/test_simple_docker_step.py | 2 +- .../connectors/pipelines/tests/test_utils.py | 2 +- 56 files changed, 162 insertions(+), 244 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py index ad8afda43afb1..f9e9376c07bae 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py @@ -10,16 +10,17 @@ import click from github import PullRequest from pipelines import main_logger -from pipelines.helpers import github -from pipelines.helpers.git import get_current_git_branch, get_current_git_revision, get_modified_files_in_branch, get_modified_files_in_commit, get_modified_files_in_pull_request -from pipelines.consts import CIContext -from pipelines.consts import LOCAL_PIPELINE_PACKAGE_PATH from pipelines.cli.telemetry import track_command -from pipelines.helpers.utils import ( - get_current_epoch_time, - transform_strs_to_paths, +from pipelines.consts import LOCAL_PIPELINE_PACKAGE_PATH, CIContext +from pipelines.helpers import github +from pipelines.helpers.git import ( + get_current_git_branch, + get_current_git_revision, + get_modified_files_in_branch, + get_modified_files_in_commit, + get_modified_files_in_pull_request, ) - +from pipelines.helpers.utils import get_current_epoch_time, transform_strs_to_paths from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.metadata.commands import metadata from pipelines.pipeline.test.commands import test diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py index 2c17304cdf0c0..efbb19a428132 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/dagger_pipeline_command.py @@ -13,11 +13,11 @@ import click from dagger import DaggerError from pipelines import consts, main_logger -from pipelines.consts import GCS_PUBLIC_DOMAIN +from pipelines.consts import GCS_PUBLIC_DOMAIN, STATIC_REPORT_PREFIX from pipelines.helpers import sentry_utils -from pipelines.helpers.utils import slugify from pipelines.helpers.gcs import upload_to_gcs -from pipelines.consts import STATIC_REPORT_PREFIX +from pipelines.helpers.utils import slugify + class DaggerPipelineCommand(click.Command): @sentry_utils.with_command_context diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 509b27eba5ad1..94cf572e2829a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -2,8 +2,8 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from enum import Enum import platform +from enum import Enum import git from dagger import Platform diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py index 756d5f5c518a5..d987d8202a650 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py @@ -1,11 +1,8 @@ -from pipelines.pipeline.connectors.context import ConnectorContext - +import importlib.util from dagger import Container from dagger.engine._version import CLI_VERSION as dagger_engine_version - - -import importlib.util +from pipelines.pipeline.connectors.context import ConnectorContext async def finalize_build(context: ConnectorContext, connector_container: Container) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py index ccc79acfb96d6..cf1e97965c2b2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py @@ -1,7 +1,6 @@ +from dagger import Container, Platform from pipelines.pipeline.connectors.context import ConnectorContext - -from dagger import Container, Platform BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { "destination-clickhouse": { "dockerfile": "clickhouse.Dockerfile", diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index 6e1f605ad30ab..c468efbd4d3f8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -1,19 +1,16 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from pathlib import Path import re +from pathlib import Path +from typing import List, Optional + +from dagger import Container, Directory from pipelines.dagger.containers.python import with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext -from dagger import Container, Directory - - -from typing import List, Optional - - def with_python_package( context: PipelineContext, python_environment: Container, diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py index 21cbf0751a62d..ccc06414549df 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py @@ -1,14 +1,11 @@ +from typing import List, Optional + +from dagger import Container from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package from pipelines.dagger.actions.python.poetry import find_local_dependencies_in_pyproject_toml from pipelines.pipeline.connectors.context import PipelineContext -from dagger import Container - - -from typing import List, Optional - - def with_pipx(base_python_container: Container) -> Container: """Installs pipx in a python container. diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py index c5ad862a824a4..99558a778c67f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -1,4 +1,9 @@ import uuid +from pathlib import Path +from typing import List, Optional + +import toml +from dagger import Container, Directory from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package from pipelines.dagger.actions.system.common import with_debian_packages from pipelines.dagger.containers.python import with_python_base @@ -6,14 +11,6 @@ from pipelines.pipeline.connectors.context import PipelineContext -import toml -from dagger import Container, Directory - - -from pathlib import Path -from typing import List, Optional - - async def find_local_dependencies_in_pyproject_toml( context: PipelineContext, base_container: Container, diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index 049db7c56a98c..ec401b3506b22 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -13,8 +13,7 @@ if TYPE_CHECKING: from dagger import Container - from pipelines.pipeline.connectors.context import ConnectorContext - from pipelines.pipeline.connectors.context import PipelineContext + from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext async def get_secrets_to_mask(ci_credentials_with_downloaded_secrets: Container) -> list[str]: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py index 7d3b3835cb602..7c822981a35b2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py @@ -1,11 +1,10 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dagger import Container - - from typing import List +from dagger import Container + def with_debian_packages(base_container: Container, packages_to_install: List[str]) -> Container: """Installs packages using apt-get. diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py index 073c789063854..bbd160d14353f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py @@ -1,13 +1,11 @@ import json -from typing import Callable import uuid +from typing import Callable + +from dagger import Client, Container, File from pipelines import consts from pipelines.consts import DOCKER_HOST_NAME, DOCKER_HOST_PORT, DOCKER_TMP_VOLUME_NAME from pipelines.helpers.utils import sh_dash_c - - -from dagger import Client, Container, File - from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index 3aa661f20bbe2..9941c850073b4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -1,12 +1,10 @@ +from dagger import Container, Secret from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS from pipelines.pipeline.connectors.context import PipelineContext -from dagger import Container, Secret - - async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: """Install the ci_credentials package in a python environment. diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py index 9fb064232f4cc..cf849be7dadb9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -1,16 +1,14 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from pipelines.dagger.actions.connector.hooks import finalize_build +from dagger import CacheVolume, Container, File, Platform from pipelines.consts import AMAZONCORRETTO_IMAGE +from pipelines.dagger.actions.connector.hooks import finalize_build from pipelines.dagger.actions.connector.normalization import DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, with_normalization from pipelines.helpers.utils import sh_dash_c from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext -from dagger import CacheVolume, Container, File, Platform - - def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: return ( context.dagger_client.container(platform=build_platform) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py index 288f361a39b6a..07143e3dfeb13 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py @@ -1,11 +1,9 @@ +from dagger import CacheVolume, Container from pipelines.consts import CONNECTOR_TESTING_REQUIREMENTS, LICENSE_SHORT_FILE_PATH, PYPROJECT_TOML_FILE_PATH from pipelines.helpers.utils import sh_dash_c from pipelines.pipeline.connectors.context import PipelineContext -from dagger import CacheVolume, Container - - def with_python_base(context: PipelineContext, python_version: str = "3.10") -> Container: """Build a Python container with a cache volume for pip cache. diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py index cedbe298f595e..e52a5fcc29340 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -1,13 +1,11 @@ -from connector_ops.utils import Connector - - from pathlib import Path from typing import FrozenSet, Set, Union + +from connector_ops.utils import Connector from pipelines import main_logger from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS - def get_connector_modified_files(connector: Connector, all_modified_files: Set[Path]) -> FrozenSet[Path]: connector_modified_files = set() for modified_file in all_modified_files: diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py index fb936f177b8c7..fa19df28ce21f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py @@ -1,15 +1,12 @@ -from pipelines import main_logger -from pipelines.consts import GCS_PUBLIC_DOMAIN - - -from google.cloud import storage -from google.oauth2 import service_account - - import json from pathlib import Path from typing import Optional, Tuple +from google.cloud import storage +from google.oauth2 import service_account +from pipelines import main_logger +from pipelines.consts import GCS_PUBLIC_DOMAIN + def upload_to_gcs(file_path: Path, bucket_name: str, object_name: str, credentials: str) -> Tuple[str, str]: """Upload a file to a GCS bucket. diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py index e82244932b15f..53613799c228e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py @@ -1,9 +1,9 @@ -import anyio -from dagger import Connection from typing import List, Set + +import anyio import git +from dagger import Connection from github import PullRequest - from pipelines.helpers.utils import AIRBYTE_REPO_URL, DAGGER_CONFIG, DIFF_FILTER @@ -110,11 +110,3 @@ def get_modified_files_in_commit(current_git_branch: str, current_git_revision: def get_modified_files_in_pull_request(pull_request: PullRequest) -> List[str]: """Retrieve the list of modified files in a pull request.""" return [f.filename for f in pull_request.get_files()] - - - - - - - - diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py index cda317fda8a0b..cab5d2f63ec58 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py @@ -32,7 +32,6 @@ IGNORED_FILE_EXTENSIONS = [".md"] - # This utils will probably be redundant once https://github.com/dagger/dagger/issues/3764 is implemented async def check_path_in_workdir(container: Container, path: str) -> bool: """Check if a local path is mounted to the working directory of a container. diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py index b347c9c0b8b4f..69a47f540f1c2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/contexts.py @@ -15,13 +15,12 @@ from dagger import Client, Directory, File, Secret from github import PullRequest from pipelines import hacks -from pipelines.consts import CIContext -from pipelines.consts import ContextState +from pipelines.consts import CIContext, ContextState from pipelines.helpers.gcs import sanitize_gcs_credentials -from pipelines.models.reports import Report from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import AIRBYTE_REPO_URL +from pipelines.models.reports import Report class PipelineContext: diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py index 80fcf4386af41..8e1ecfec6c342 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/reports.py @@ -6,22 +6,19 @@ from __future__ import annotations -import anyio -import typing import json - +import typing from dataclasses import dataclass, field from datetime import datetime, timedelta - - from typing import List +import anyio from anyio import Path from connector_ops.utils import console from pipelines.consts import GCS_PUBLIC_DOMAIN, LOCAL_REPORTS_PATH_ROOT from pipelines.dagger.actions import remote_storage -from pipelines.models.steps import StepResult, StepStatus from pipelines.helpers.utils import format_duration +from pipelines.models.steps import StepResult, StepStatus from rich.console import Group from rich.panel import Panel from rich.style import Style @@ -31,6 +28,7 @@ if typing.TYPE_CHECKING: from pipelines.models.steps import PipelineContext + @dataclass(frozen=True) class Report: """A dataclass to build reports to share pipelines executions results with the user.""" @@ -182,4 +180,3 @@ def print(self): main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) console.print(main_panel) - diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py index e1fdc1fb73dfd..25238a3363d22 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py @@ -3,35 +3,31 @@ # from __future__ import annotations +import logging +import typing from abc import abstractmethod from dataclasses import dataclass, field -from datetime import timedelta +from datetime import datetime, timedelta from enum import Enum from pathlib import Path -import typing -from typing import Optional, Any +from typing import Any, Optional import anyio import asyncer -from datetime import datetime -import logging -from dagger import DaggerError, Container -from pipelines.helpers import sentry_utils +from dagger import Container, DaggerError from pipelines import main_logger +from pipelines.helpers import sentry_utils from pipelines.helpers.utils import format_duration, get_exec_result if typing.TYPE_CHECKING: from pipelines.models.contexts import PipelineContext - - from abc import ABC from typing import ClassVar from rich.style import Style - @dataclass class MountPath: path: Path diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py index 058a15a7a9d45..11bd1f48c395e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py @@ -1,12 +1,10 @@ +import anyio +import click +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.builds.steps import run_connector_build_pipeline from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click @connectors.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py index 7f424fe1a6f71..bfb045a372e1b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py @@ -7,10 +7,10 @@ import docker from dagger import Container, ExecError, Platform, QueryError -from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.consts import BUILD_PLATFORMS -from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.helpers.utils import export_container_to_tarball +from pipelines.models.steps import Step, StepResult, StepStatus +from pipelines.pipeline.connectors.context import ConnectorContext class BuildConnectorImagesBase(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py index 7af361e035e41..6e638ee915350 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py @@ -5,10 +5,10 @@ from typing import List, Optional, Tuple, Union from dagger import Container, Directory, ExecError, File, Host, Platform, QueryError +from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.containers import java from pipelines.models.steps import StepResult, StepStatus from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase -from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipelines.models.steps import GradleTask diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py index 9b6e4cf51e1fc..b5ea33d95c00a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py @@ -4,8 +4,7 @@ from dagger import Container, Platform -from pipelines.dagger.actions.python.common import with_python_connector_installed -from pipelines.dagger.actions.python.common import apply_python_development_overrides +from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed from pipelines.models.steps import StepResult from pipelines.pipeline.connectors.builds.steps import build_customization from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py index 89b8fdc270a91..814c20f93afee 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py @@ -1,12 +1,10 @@ +import anyio +import click +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.bump_version.pipeline import run_connector_version_bump_pipeline from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click @connectors.command(cls=DaggerPipelineCommand, help="Bump a connector version: update metadata.yaml and changelog.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py index c09d2a94e018d..023a641df14f9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py @@ -1,7 +1,8 @@ -from copy import deepcopy -from dagger import Container import datetime +from copy import deepcopy + import semver +from dagger import Container from pipelines import consts from pipelines.helpers.connectors import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index d56ef18b728a7..93f4f5604b8ca 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -1,16 +1,12 @@ import os from pathlib import Path -from pipelines import main_logger +from typing import List, Set, Tuple import click from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo - - -from typing import List, Set, Tuple -from pipelines.helpers.connectors.modifed import get_modified_connectors - +from pipelines import main_logger +from pipelines.helpers.connectors.modifed import get_connector_modified_files, get_modified_connectors from pipelines.models.bases import ConnectorWithModifiedFiles -from pipelines.helpers.connectors.modifed import get_connector_modified_files ALL_CONNECTORS = get_all_connectors_in_repo() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py index f35c6c01e9b5d..5b098b969c70e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -14,11 +14,11 @@ from dagger import Directory from github import PullRequest from pipelines.dagger.actions import secrets -from pipelines.models.bases import ConnectorWithModifiedFiles -from pipelines.models.contexts import PipelineContext from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME +from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.models.contexts import PipelineContext from pipelines.pipeline.connectors.reports import ConnectorReport @@ -232,4 +232,3 @@ async def __aexit__( def create_slack_message(self) -> str: raise NotImplementedError - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py index 2e75d7142b2cf..0d7d7aa7ebb4a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py @@ -1,9 +1,7 @@ -from pipelines.pipeline.connectors.commands import connectors -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - import click from connector_ops.utils import console +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.pipeline.connectors.commands import connectors from rich.table import Table from rich.text import Text diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py index 8cb33f5b07dac..8daeb04ae2625 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py @@ -1,12 +1,10 @@ -from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline +import anyio +import click +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click @connectors.command( diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index 31c68f8b709e4..d6a9adcc4aa18 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -10,14 +10,12 @@ from dagger import Directory from jinja2 import Template from pipelines import consts +from pipelines.helpers import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.bump_version.pipeline import BumpDockerImageTagInMetadata -from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, get_bumped_version +from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.pipeline.connectors.reports import ConnectorReport -from pipelines.helpers import metadata_change_helpers - class UpgradeBaseImageMetadata(Step): title = "Upgrade the base image to the latest version in metadata.yaml" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py index eb933636f004f..58ffeb1e2b41f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py @@ -12,11 +12,11 @@ import dagger from connector_ops.utils import ConnectorLanguage from dagger import Config +from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT from pipelines.dagger.actions.system import docker +from pipelines.helpers.utils import create_and_open_file from pipelines.models.bases import NoOpStep, Report, StepResult, StepStatus -from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT from pipelines.pipeline.connectors.context import ConnectorContext, ContextState -from pipelines.helpers.utils import create_and_open_file GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py index 677bbaebd7d0d..b46e34319a614 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -1,14 +1,12 @@ +import anyio +import click from pipelines import main_logger +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ContextState -from pipelines.pipeline.connectors.publish.context import PublishConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.pipeline.connectors.publish.context import PublishConnectorContext from pipelines.publish import reorder_contexts, run_connector_publish_pipeline -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click @connectors.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py index f18ea26b7ccd2..de7fe25a169f9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -8,11 +8,11 @@ from dagger import Secret from github import PullRequest +from pipelines.consts import ContextState from pipelines.helpers.gcs import sanitize_gcs_credentials -from pipelines.models.bases import ConnectorWithModifiedFiles from pipelines.helpers.utils import format_duration +from pipelines.models.bases import ConnectorWithModifiedFiles from pipelines.pipeline.connectors import ConnectorContext -from pipelines.consts import ContextState class PublishConnectorContext(ConnectorContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py index d0a9b91f439fe..bb0f79e61772c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py @@ -10,13 +10,13 @@ from airbyte_protocol.models.airbyte_protocol import ConnectorSpecification from dagger import Container, ExecError, File, ImageLayerCompression, QueryError from pipelines import consts -from pipelines.dagger.actions.system import docker from pipelines.dagger.actions.remote_storage import upload_to_gcs +from pipelines.dagger.actions.system import docker from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.builds import steps from pipelines.pipeline.connectors.publish.context import PublishConnectorContext from pipelines.pipeline.connectors.reports import ConnectorReport -from pipelines.pipeline.metadata.pipeline import MetadataValidation, MetadataUpload +from pipelines.pipeline.metadata.pipeline import MetadataUpload, MetadataValidation from pydantic import ValidationError @@ -207,6 +207,7 @@ async def _run(self, built_connector: Container) -> StepResult: ## Pipeline + async def run_connector_publish_pipeline(context: PublishConnectorContext, semaphore: anyio.Semaphore) -> ConnectorReport: """Run a publish pipeline for a single connector. diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index 03393a4756c19..4cd6b98e39863 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -1,12 +1,14 @@ -from pipelines.consts import GCS_PUBLIC_DOMAIN -from pipelines.helpers.utils import format_duration -from pipelines.models.steps import StepStatus -from pipelines.models.reports import Report - +import json +import webbrowser +from dataclasses import dataclass from anyio import Path from connector_ops.utils import console from jinja2 import Environment, PackageLoader, select_autoescape +from pipelines.consts import GCS_PUBLIC_DOMAIN +from pipelines.helpers.utils import format_duration +from pipelines.models.reports import Report +from pipelines.models.steps import StepStatus from rich.console import Group from rich.panel import Panel from rich.style import Style @@ -15,11 +17,6 @@ from tabulate import tabulate -import json -import webbrowser -from dataclasses import dataclass - - @dataclass(frozen=True) class ConnectorReport(Report): """A dataclass to build connector test reports to share pipelines executions results with the user.""" @@ -168,4 +165,4 @@ def print(self): self.pipeline_context.logger.info(f"🔗 View runs for commit in Dagger Cloud: {self.pipeline_context.dagger_cloud_url}") main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle) - console.print(main_panel) \ No newline at end of file + console.print(main_panel) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py index 051c70d5a3797..ab96c627d8fe0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -1,17 +1,14 @@ +import sys + +import anyio +import click from pipelines import main_logger +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.helpers.github import update_global_commit_status_check_for_tests from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext, ContextState from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click - - -import sys @connectors.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py index 3c7051cc4c321..309fed265a731 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py @@ -16,11 +16,11 @@ from connector_ops.utils import Connector from dagger import Container, Directory, File from pipelines import hacks +from pipelines.consts import CIContext from pipelines.dagger.actions import secrets from pipelines.dagger.containers import internal_tools -from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.helpers.utils import METADATA_FILE_NAME -from pipelines.consts import CIContext +from pipelines.models.steps import Step, StepResult, StepStatus class VersionCheck(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py index 82ec89c94ccfa..e751746c2077e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py @@ -9,15 +9,19 @@ import anyio import asyncer from dagger import Directory, File, QueryError +from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.actions.system import docker, secrets +from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import StepResult, StepStatus -from pipelines.pipeline.connectors.builds.steps.java_connectors import BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path +from pipelines.pipeline.connectors.builds.steps.java_connectors import ( + BuildConnectorDistributionTar, + BuildConnectorImages, + dist_tar_directory_path, +) from pipelines.pipeline.connectors.builds.steps.normalization import BuildOrPullNormalization -from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.steps import GradleTask from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests -from pipelines.helpers.utils import export_container_to_tarball +from pipelines.pipeline.steps import GradleTask class IntegrationTests(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py index 4fa6b73c81686..14f024037ed34 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py @@ -8,17 +8,17 @@ from typing import Callable, Iterable, List, Tuple import asyncer +import pipelines.dagger.actions.python.common +import pipelines.dagger.actions.system.docker from dagger import Container, File from pipelines.actions import environments -import pipelines.dagger.actions.python.common +from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.actions import secrets -import pipelines.dagger.actions.system.docker +from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.builds.steps.python_connectors import BuildConnectorImages -from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests -from pipelines.helpers.utils import export_container_to_tarball class CodeFormatChecks(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py index c80cd83897e2d..0226311dbca40 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py @@ -1,12 +1,10 @@ -from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline +import anyio +import click +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - -import anyio -import click @connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py index 5ec47eea2be4a..38dcad7174c07 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py @@ -1,10 +1,7 @@ -from pipelines.pipeline.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand - - import anyio import click - +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.pipeline.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline # MAIN GROUP diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index 4daaa31aad540..92078654088ab 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -1,26 +1,22 @@ - import uuid from typing import Optional import dagger +from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH from pipelines.dagger.actions.python.common import with_pip_packages from pipelines.dagger.containers.python import with_python_base -from pipelines.models.steps import Step, StepResult -from pipelines.models.reports import Report -from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.helpers.steps import run_steps -from pipelines.models.steps import MountPath from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable -from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.models.steps import MountPath from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS +from pipelines.models.reports import Report +from pipelines.models.steps import MountPath, Step, StepResult +from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext from pipelines.pipeline.steps.docker import SimpleDockerStep from pipelines.pipeline.steps.poetry import PoetryRunStep # STEPS + class MetadataValidation(SimpleDockerStep): def __init__(self, context: ConnectorContext): super().__init__( @@ -42,6 +38,7 @@ def __init__(self, context: ConnectorContext): ], ) + class MetadataUpload(SimpleDockerStep): # When the metadata service exits with this code, it means the metadata is valid but the upload was skipped because the metadata is already uploaded skipped_exit_code = 5 diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py index 0d5251fd545fd..0a4fdd2da5168 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py @@ -1,15 +1,12 @@ +from typing import List, Optional + +import dagger from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base from pipelines.models.contexts import PipelineContext from pipelines.models.steps import MountPath, Step, StepResult -import dagger - - -from typing import List, Optional - - class SimpleDockerStep(Step): def __init__( self, @@ -94,4 +91,4 @@ async def _run(self, command=None) -> StepResult: raise ValueError(f"No command given to the {self.title} step") container_to_run = await self.init_container() - return await self.get_step_result(container_to_run.with_exec(command_to_run)) \ No newline at end of file + return await self.get_step_result(container_to_run.with_exec(command_to_run)) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py index 5f8dad9b7a821..973d87afe4d4f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py @@ -3,9 +3,9 @@ # from dagger import Client, Container, Directory, Secret -from pipelines.models.steps import Step, StepResult from pipelines.helpers.github import AIRBYTE_GITHUB_REPO from pipelines.helpers.utils import sh_dash_c +from pipelines.models.steps import Step, StepResult def with_git(dagger_client, ci_git_user: str = "octavia") -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py index 41b9c5f2272f7..1625425ed49df 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py @@ -1,4 +1,8 @@ +from abc import ABC +from typing import ClassVar, List + import pipelines.dagger.actions.system.docker +from dagger import CacheSharingMode, CacheVolume from pipelines import hacks from pipelines.consts import AMAZONCORRETTO_IMAGE from pipelines.dagger.actions import secrets @@ -7,13 +11,6 @@ from pipelines.models.steps import Step, StepResult -from dagger import CacheSharingMode, CacheVolume - - -from abc import ABC -from typing import ClassVar, List - - class GradleTask(Step, ABC): """ A step to run a Gradle task. @@ -158,4 +155,4 @@ async def _run(self) -> StepResult: # Run the gradle task that we actually care about. connector_task = f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}" gradle_container = gradle_container.with_exec(self._get_gradle_command(connector_task)) - return await self.get_step_result(gradle_container) \ No newline at end of file + return await self.get_step_result(gradle_container) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py index 31b9824e52b31..9d077cc155dc3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py @@ -13,4 +13,4 @@ def __init__(self, context: PipelineContext, step_status: StepStatus) -> None: self.step_status = step_status async def _run(self, *args, **kwargs) -> StepResult: - return StepResult(self, self.step_status) \ No newline at end of file + return StepResult(self, self.step_status) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py index 1003ba9c619c8..a9c203692c676 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py @@ -22,4 +22,4 @@ def __init__(self, context: PipelineContext, title: str, parent_dir_path: str, m async def _run(self, poetry_run_args: list) -> StepResult: poetry_run_exec = self.poetry_run_container.with_exec(poetry_run_args) - return await self.get_step_result(poetry_run_exec) \ No newline at end of file + return await self.get_step_result(poetry_run_exec) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py index 3149b0511a64f..05b5fc1c0ce6b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py @@ -1,8 +1,6 @@ -from pipelines.pipeline.test.pipeline import run_test - - import anyio import click +from pipelines.pipeline.test.pipeline import run_test @click.command() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py index d5ac46aaccc03..b381bf5d043e8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py @@ -1,14 +1,11 @@ -from pipelines.consts import DOCKER_VERSION -from pipelines.helpers.utils import sh_dash_c - - -import dagger - - import logging import os import sys +import dagger +from pipelines.consts import DOCKER_VERSION +from pipelines.helpers.utils import sh_dash_c + async def run_test(poetry_package_path: str, test_directory: str) -> bool: """Runs the tests for the given airbyte-ci package in a Dagger container. diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index 19a4a94783b9a..3043c218aa3c6 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -6,8 +6,7 @@ import pytest from pipelines.models.steps import StepStatus -from pipelines.pipeline.connectors.builds.steps import python_connectors -from pipelines.pipeline.connectors.builds.steps import build_customization +from pipelines.pipeline.connectors.builds.steps import build_customization, python_connectors from pipelines.pipeline.connectors.context import ConnectorContext pytestmark = [ diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index 10e5f58336ec4..ce469e8cb39e2 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -4,16 +4,16 @@ from typing import Callable -import pytest -from click.testing import CliRunner -from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.models.bases import ConnectorWithModifiedFiles -from pipelines.commands.groups import connectors import pipelines.pipeline.connectors.builds.commands import pipelines.pipeline.connectors.commands import pipelines.pipeline.connectors.format.commands import pipelines.pipeline.connectors.publish.commands import pipelines.pipeline.connectors.test.commands +import pytest +from click.testing import CliRunner +from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage +from pipelines.commands.groups import connectors +from pipelines.models.bases import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index bc7456594c439..f1072dad1c951 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -7,10 +7,10 @@ from typing import List import anyio +import pipelines.pipeline.metadata.pipeline import pytest from pipelines import publish from pipelines.models.steps import StepStatus -import pipelines.pipeline.metadata.pipeline pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py index 849d577c27dfb..5a2fda1680999 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py +++ b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py @@ -5,9 +5,9 @@ from pathlib import Path import pytest +from pipelines.helpers.utils import get_exec_result from pipelines.models.contexts import PipelineContext from pipelines.models.steps import MountPath -from pipelines.helpers.utils import get_exec_result from pipelines.pipeline.steps.docker import SimpleDockerStep pytestmark = [ diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_utils.py index e58c164857068..9480d72e7643f 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_utils.py @@ -5,10 +5,10 @@ from pathlib import Path from unittest import mock +import pipelines.helpers.git import pytest from connector_ops.utils import Connector, ConnectorLanguage from pipelines.helpers import utils -import pipelines.helpers.git from tests.utils import pick_a_random_connector From cd5aef3a48f73c7e92ab13f6ad16f1a6143e15c5 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 17:55:20 -0500 Subject: [PATCH 11/38] fix extra numbers --- airbyte-ci/connectors/pipelines/pipelines/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/__init__.py index 00914e3c459dd..44a71b8f1b494 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/__init__.py @@ -28,5 +28,3 @@ ) main_logger = logging.getLogger(__name__) -0 -322+60 From 41ee5e24554d0b3c10dd58998890dbf8a4ce7a2a Mon Sep 17 00:00:00 2001 From: erohmensing Date: Tue, 17 Oct 2023 18:27:19 -0500 Subject: [PATCH 12/38] add python licenses to airbyte ci --- .../connectors/metadata_service/lib/tests/test_spec_cache.py | 4 ++++ airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py | 4 ++++ .../pipelines/pipelines/dagger/actions/connector/hooks.py | 4 ++++ .../pipelines/dagger/actions/connector/normalization.py | 4 ++++ .../pipelines/pipelines/dagger/actions/python/common.py | 1 + .../pipelines/pipelines/dagger/actions/python/pipx.py | 4 ++++ .../pipelines/pipelines/dagger/actions/python/poetry.py | 4 ++++ .../pipelines/pipelines/dagger/actions/system/common.py | 1 + .../pipelines/pipelines/dagger/actions/system/docker.py | 4 ++++ .../pipelines/pipelines/dagger/containers/internal_tools.py | 4 ++++ .../connectors/pipelines/pipelines/dagger/containers/java.py | 1 + .../pipelines/pipelines/dagger/containers/python.py | 4 ++++ .../pipelines/helpers/connectors/metadata_change_helpers.py | 1 + .../pipelines/pipelines/helpers/connectors/modifed.py | 4 ++++ airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py | 4 ++++ airbyte-ci/connectors/pipelines/pipelines/helpers/git.py | 4 ++++ airbyte-ci/connectors/pipelines/pipelines/models/steps.py | 1 + .../pipelines/pipeline/connectors/builds/commands.py | 4 ++++ .../pipelines/pipeline/connectors/builds/pipeline.py | 4 ++++ .../pipeline/connectors/builds/steps/build_customization.py | 1 + .../pipelines/pipeline/connectors/bump_version/commands.py | 4 ++++ .../pipelines/pipeline/connectors/bump_version/pipeline.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/commands.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/list/commands.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/list/pipeline.py | 4 ++++ .../pipeline/connectors/migrate_to_base_image/commands.py | 4 ++++ .../pipeline/connectors/migrate_to_base_image/pipeline.py | 1 + .../pipelines/pipeline/connectors/publish/commands.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/reports.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/test/commands.py | 4 ++++ .../pipelines/pipelines/pipeline/connectors/test/pipeline.py | 4 ++++ .../pipeline/connectors/upgrade_base_image/commands.py | 4 ++++ .../pipeline/connectors/upgrade_base_image/pipeline.py | 4 ++++ .../pipelines/pipelines/pipeline/metadata/commands.py | 4 ++++ .../pipelines/pipelines/pipeline/metadata/pipeline.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/steps/docker.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/steps/gradle.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/steps/no_op.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/steps/poetry.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/test/commands.py | 4 ++++ .../connectors/pipelines/pipelines/pipeline/test/pipeline.py | 4 ++++ .../pipelines/tests/test_builds/dummy_build_customization.py | 1 + .../pipelines/tests/test_tests/test_python_connectors.py | 1 + 43 files changed, 145 insertions(+) diff --git a/airbyte-ci/connectors/metadata_service/lib/tests/test_spec_cache.py b/airbyte-ci/connectors/metadata_service/lib/tests/test_spec_cache.py index d68887c7087cb..9ce15092fcb7a 100644 --- a/airbyte-ci/connectors/metadata_service/lib/tests/test_spec_cache.py +++ b/airbyte-ci/connectors/metadata_service/lib/tests/test_spec_cache.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from unittest.mock import patch import pytest diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py b/airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py index 7149e4858ea83..e3debded28259 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/telemetry.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import getpass import hashlib import os diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py index d987d8202a650..f785daa1e4731 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import importlib.util from dagger import Container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py index cf1e97965c2b2..411ccd64abdcb 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from dagger import Container, Platform from pipelines.pipeline.connectors.context import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index c468efbd4d3f8..64d4ac14397ea 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import re from pathlib import Path from typing import List, Optional diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py index ccc06414549df..14720a20020e5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from typing import List, Optional from dagger import Container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py index 99558a778c67f..4a504e9684182 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import uuid from pathlib import Path from typing import List, Optional diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py index 7c822981a35b2..940d56a742608 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/common.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + from typing import List from dagger import Container diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py index bbd160d14353f..767d6d35ffe44 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import json import uuid from typing import Callable diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index 9941c850073b4..861104ecf5a75 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from dagger import Container, Secret from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py index cf849be7dadb9..aa75462f7ea4b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + from dagger import CacheVolume, Container, File, Platform from pipelines.consts import AMAZONCORRETTO_IMAGE from pipelines.dagger.actions.connector.hooks import finalize_build diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py index 07143e3dfeb13..bb8c75a2f922e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from dagger import CacheVolume, Container from pipelines.consts import CONNECTOR_TESTING_REQUIREMENTS, LICENSE_SHORT_FILE_PATH, PYPROJECT_TOML_FILE_PATH from pipelines.helpers.utils import sh_dash_c diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py index 179cfe4a6e7fb..c70bd4e19a0db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/metadata_change_helpers.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + from pathlib import Path import yaml diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py index e52a5fcc29340..754033efcf648 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from pathlib import Path from typing import FrozenSet, Set, Union diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py index fa19df28ce21f..17e4c016d263f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/gcs.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import json from pathlib import Path from typing import Optional, Tuple diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py index 53613799c228e..0318f2e44d6ef 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/git.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from typing import List, Set import anyio diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py index 25238a3363d22..4c5f9a523da5d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/models/steps.py +++ b/airbyte-ci/connectors/pipelines/pipelines/models/steps.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + from __future__ import annotations import logging diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py index 11bd1f48c395e..58920b0e7d0fe 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py index e69de29bb2d1d..c4a035ec7f7db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py @@ -0,0 +1,4 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py index 03d6f13f9757c..9ab4c063538e1 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import importlib from logging import Logger from types import ModuleType diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py index 814c20f93afee..76a89cd62eca0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py index 023a641df14f9..0d98337c9c5b0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import datetime from copy import deepcopy diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index 93f4f5604b8ca..6b97caa00a6f5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import os from pathlib import Path from typing import List, Set, Tuple diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py index 0d7d7aa7ebb4a..2cf89490f9248 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import click from connector_ops.utils import console from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py index e69de29bb2d1d..c4a035ec7f7db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py @@ -0,0 +1,4 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py index 8daeb04ae2625..3ad6c2ce95d25 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index d6a9adcc4aa18..0feb60116a67c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import textwrap from copy import deepcopy from typing import Optional diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py index b46e34319a614..2573cd435ecff 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines import main_logger diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index 4cd6b98e39863..034b3b3b96c06 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import json import webbrowser from dataclasses import dataclass diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py index ab96c627d8fe0..5d4eb82508471 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import sys import anyio diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py index e69de29bb2d1d..c4a035ec7f7db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py @@ -0,0 +1,4 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py index 0226311dbca40..5b1d83d7907ad 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py index e69de29bb2d1d..c4a035ec7f7db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py @@ -0,0 +1,4 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py index 38dcad7174c07..0c719b8aab76b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index 92078654088ab..bc87a163f462d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import uuid from typing import Optional diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py index 0a4fdd2da5168..1dd5c76c2af26 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from typing import List, Optional import dagger diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py index 1625425ed49df..924fc8807fcfb 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from abc import ABC from typing import ClassVar, List diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py index 9d077cc155dc3..2a054c4df3dca 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from pipelines.models.contexts import PipelineContext from pipelines.models.steps import Step, StepResult, StepStatus diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py index a9c203692c676..3acaa28faff61 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + from pipelines.dagger.actions.python.poetry import with_poetry_module from pipelines.models.contexts import PipelineContext from pipelines.models.steps import Step, StepResult diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py index 05b5fc1c0ce6b..d73b4270b31b3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import anyio import click from pipelines.pipeline.test.pipeline import run_test diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py index b381bf5d043e8..68e9dbb1386d2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py @@ -1,3 +1,7 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + import logging import os import sys diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py b/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py index c583f12a86935..6bc6ce686adc8 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + from __future__ import annotations from typing import TYPE_CHECKING diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index 3d2c25cf0512d..c3307083aa8bd 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -1,6 +1,7 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # + import pytest from connector_ops.utils import Connector from pipelines.models.steps import StepResult From a66caba14719e49661ec8fa118ab0e3032e02a41 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Tue, 17 Oct 2023 16:42:25 -0700 Subject: [PATCH 13/38] Bump version --- airbyte-ci/connectors/pipelines/README.md | 5 +++-- airbyte-ci/connectors/pipelines/pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 24ec291f06d52..16a7bb63ed575 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -397,8 +397,9 @@ This command runs the Python tests for a airbyte-ci poetry package. ## Changelog | Version | PR | Description | -| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | -| 2.0.2 | [#31533](https://github.com/airbytehq/airbyte/pull/31533) | Pip cache volume by python version. | +|---------| ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| 2.0.3 | [#31525](https://github.com/airbytehq/airbyte/pull/31525) | Refactor folder structure | +| 2.0.2 | [#31533](https://github.com/airbytehq/airbyte/pull/31533) | Pip cache volume by python version. | | 2.0.1 | [#31545](https://github.com/airbytehq/airbyte/pull/31545) | Reword the changelog entry when using `migrate-to-base-image`. | | 2.0.0 | [#31424](https://github.com/airbytehq/airbyte/pull/31424) | Remove `airbyte-ci connectors format` command. | | 1.9.4 | [#31478](https://github.com/airbytehq/airbyte/pull/31478) | Fix running tests for connector-ops package. | diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 4c018c04e4461..59fa5a0dc64a1 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "2.0.2" +version = "2.0.3" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "] From 86b2fece2d2f03e3eedf60a9581ad7ceb5f17d05 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Tue, 17 Oct 2023 17:47:28 -0700 Subject: [PATCH 14/38] Add more inits --- .../pipelines/pipelines/dagger/actions/connector/__init__.py | 3 +++ .../connectors/pipelines/pipelines/internal_tools/__init__.py | 3 +++ airbyte-ci/connectors/pipelines/pipelines/models/__init__.py | 3 +++ 3 files changed, 9 insertions(+) create mode 100644 airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py create mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/models/__init__.py new file mode 100644 index 0000000000000..c941b30457953 --- /dev/null +++ b/airbyte-ci/connectors/pipelines/pipelines/models/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# From a42f2f2da5d9627565e4c56472da86c196602ca8 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 14:32:28 -0500 Subject: [PATCH 15/38] move list of internal tool paths to const file --- airbyte-ci/connectors/pipelines/pipelines/consts.py | 6 ++++++ .../pipelines/dagger/containers/internal_tools.py | 2 +- .../pipelines/pipelines/internal_tools/__init__.py | 3 --- .../pipelines/pipelines/internal_tools/internal.py | 11 ----------- .../pipelines/pipelines/pipeline/metadata/pipeline.py | 2 +- 5 files changed, 8 insertions(+), 16 deletions(-) delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py index 94cf572e2829a..1e1b814222641 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/consts.py +++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py @@ -64,3 +64,9 @@ class ContextState(Enum): ERROR = {"github_state": "error", "description": "Something went wrong while running the Pipelines."} SUCCESSFUL = {"github_state": "success", "description": "All Pipelines ran successfully."} FAILURE = {"github_state": "failure", "description": "Pipeline failed."} + + +class INTERNAL_TOOL_PATHS(str, Enum): + CI_CREDENTIALS = "airbyte-ci/connectors/ci_credentials" + CONNECTOR_OPS = "airbyte-ci/connectors/connector_ops" + METADATA_SERVICE = "airbyte-ci/connectors/metadata_service/lib" diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index 861104ecf5a75..c054b2c7c8177 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -5,7 +5,7 @@ from dagger import Container, Secret from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base -from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS +from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.pipeline.connectors.context import PipelineContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py deleted file mode 100644 index c941b30457953..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/internal_tools/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py b/airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py deleted file mode 100644 index c95c98e9e8fc7..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/internal_tools/internal.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from enum import Enum - - -class INTERNAL_TOOL_PATHS(str, Enum): - CI_CREDENTIALS = "airbyte-ci/connectors/ci_credentials" - CONNECTOR_OPS = "airbyte-ci/connectors/connector_ops" - METADATA_SERVICE = "airbyte-ci/connectors/metadata_service/lib" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index bc87a163f462d..97c0c49856644 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -11,7 +11,7 @@ from pipelines.dagger.containers.python import with_python_base from pipelines.helpers.steps import run_steps from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable -from pipelines.internal_tools.internal import INTERNAL_TOOL_PATHS +from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.models.reports import Report from pipelines.models.steps import MountPath, Step, StepResult from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext From 9f4ca1eda9dd4616bbf02789eb0672db80680479 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 20:20:37 +0000 Subject: [PATCH 16/38] Automated Commit - Formatting Changes --- .../pipelines/pipelines/dagger/containers/internal_tools.py | 2 +- .../pipelines/pipelines/pipeline/connectors/builds/pipeline.py | 1 - .../pipelines/pipelines/pipeline/connectors/list/pipeline.py | 1 - .../pipelines/pipelines/pipeline/connectors/test/pipeline.py | 1 - .../pipeline/connectors/upgrade_base_image/pipeline.py | 1 - .../pipelines/pipelines/pipeline/metadata/pipeline.py | 3 +-- 6 files changed, 2 insertions(+), 7 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index c054b2c7c8177..300a1f59a657f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -3,9 +3,9 @@ # from dagger import Container, Secret +from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base -from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.pipeline.connectors.context import PipelineContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py index c4a035ec7f7db..c941b30457953 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py @@ -1,4 +1,3 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py index c4a035ec7f7db..c941b30457953 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py @@ -1,4 +1,3 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py index c4a035ec7f7db..c941b30457953 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py @@ -1,4 +1,3 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py index c4a035ec7f7db..c941b30457953 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py @@ -1,4 +1,3 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py index 97c0c49856644..685127741c258 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py @@ -6,12 +6,11 @@ from typing import Optional import dagger -from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH +from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH, INTERNAL_TOOL_PATHS from pipelines.dagger.actions.python.common import with_pip_packages from pipelines.dagger.containers.python import with_python_base from pipelines.helpers.steps import run_steps from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable -from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.models.reports import Report from pipelines.models.steps import MountPath, Step, StepResult from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext From b4185a6858d100b22faf9e73deed6ccc685e093a Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 13:22:58 -0700 Subject: [PATCH 17/38] Update step imports --- .../pipelines/helpers/connectors/modifed.py | 13 +++- .../pipelines/pipelines/models/bases.py | 23 ------ .../pipelines/pipeline/connectors/commands.py | 2 +- .../pipelines/pipeline/connectors/context.py | 2 +- .../pipeline/connectors/publish/context.py | 2 +- .../connectors/pipelines/tests/test_bases.py | 48 ++++++------ .../test_groups/test_connectors.py | 2 +- .../connectors/pipelines/tests/test_gradle.py | 11 +-- .../pipelines/tests/test_publish.py | 78 +++++++++---------- .../pipelines/tests/test_tests/test_common.py | 3 +- 10 files changed, 87 insertions(+), 97 deletions(-) delete mode 100644 airbyte-ci/connectors/pipelines/pipelines/models/bases.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py index 754033efcf648..70209b1d6e19d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -2,12 +2,14 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from dataclasses import dataclass, field from pathlib import Path from typing import FrozenSet, Set, Union +from anyio import Path from connector_ops.utils import Connector from pipelines import main_logger -from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS +from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS, METADATA_FILE_NAME def get_connector_modified_files(connector: Connector, all_modified_files: Set[Path]) -> FrozenSet[Path]: @@ -58,3 +60,12 @@ def get_modified_connectors(modified_files: Set[Path], all_connectors: Set[Conne if not _is_ignored_file(modified_file): modified_connectors.update(_find_modified_connectors(modified_file, all_connectors, dependency_scanning)) return modified_connectors + + +@dataclass(frozen=True) +class ConnectorWithModifiedFiles(Connector): + modified_files: Set[Path] = field(default_factory=frozenset) + + @property + def has_metadata_change(self) -> bool: + return any(path.name == METADATA_FILE_NAME for path in self.modified_files) diff --git a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py b/airbyte-ci/connectors/pipelines/pipelines/models/bases.py deleted file mode 100644 index 01e769018327a..0000000000000 --- a/airbyte-ci/connectors/pipelines/pipelines/models/bases.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -"""This module declare base / abstract models to be reused in a pipeline lifecycle.""" - -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import Set - -from anyio import Path -from connector_ops.utils import Connector -from pipelines.helpers.utils import METADATA_FILE_NAME - - -@dataclass(frozen=True) -class ConnectorWithModifiedFiles(Connector): - modified_files: Set[Path] = field(default_factory=frozenset) - - @property - def has_metadata_change(self) -> bool: - return any(path.name == METADATA_FILE_NAME for path in self.modified_files) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index 6b97caa00a6f5..a51fcee75837b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -10,7 +10,7 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import get_connector_modified_files, get_modified_connectors -from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles ALL_CONNECTORS = get_all_connectors_in_repo() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py index 5b098b969c70e..0bcc46db251cc 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -17,7 +17,7 @@ from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME -from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.models.contexts import PipelineContext from pipelines.pipeline.connectors.reports import ConnectorReport diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py index de7fe25a169f9..996b9d2a9fd8e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -11,7 +11,7 @@ from pipelines.consts import ContextState from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.helpers.utils import format_duration -from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.pipeline.connectors import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/tests/test_bases.py b/airbyte-ci/connectors/pipelines/tests/test_bases.py index 870723e408df7..a960174c9faf4 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_bases.py +++ b/airbyte-ci/connectors/pipelines/tests/test_bases.py @@ -7,7 +7,7 @@ import anyio import pytest from dagger import DaggerError -from pipelines.models import bases +from pipelines.models import steps pytestmark = [ pytest.mark.anyio, @@ -15,14 +15,14 @@ class TestStep: - class DummyStep(bases.Step): + class DummyStep(steps.Step): title = "Dummy step" max_retries = 3 max_duration = timedelta(seconds=2) - async def _run(self, run_duration: timedelta) -> bases.StepResult: + async def _run(self, run_duration: timedelta) -> steps.StepResult: await anyio.sleep(run_duration.total_seconds()) - return bases.StepResult(self, bases.StepStatus.SUCCESS) + return steps.StepResult(self, steps.StepStatus.SUCCESS) @pytest.fixture def test_context(self, mocker): @@ -31,7 +31,7 @@ def test_context(self, mocker): async def test_run_with_timeout(self, test_context): step = self.DummyStep(test_context) step_result = await step.run(run_duration=step.max_duration - timedelta(seconds=1)) - assert step_result.status == bases.StepStatus.SUCCESS + assert step_result.status == steps.StepStatus.SUCCESS assert step.retry_count == 0 step_result = await step.run(run_duration=step.max_duration + timedelta(seconds=1)) @@ -45,19 +45,19 @@ async def test_run_with_timeout(self, test_context): @pytest.mark.parametrize( "step_status, exc_info, max_retries, max_dagger_error_retries, expect_retry", [ - (bases.StepStatus.SUCCESS, None, 0, 0, False), - (bases.StepStatus.SUCCESS, None, 3, 0, False), - (bases.StepStatus.SUCCESS, None, 0, 3, False), - (bases.StepStatus.SUCCESS, None, 3, 3, False), - (bases.StepStatus.SKIPPED, None, 0, 0, False), - (bases.StepStatus.SKIPPED, None, 3, 0, False), - (bases.StepStatus.SKIPPED, None, 0, 3, False), - (bases.StepStatus.SKIPPED, None, 3, 3, False), - (bases.StepStatus.FAILURE, DaggerError(), 0, 0, False), - (bases.StepStatus.FAILURE, DaggerError(), 0, 3, True), - (bases.StepStatus.FAILURE, None, 0, 0, False), - (bases.StepStatus.FAILURE, None, 0, 3, False), - (bases.StepStatus.FAILURE, None, 3, 0, True), + (steps.StepStatus.SUCCESS, None, 0, 0, False), + (steps.StepStatus.SUCCESS, None, 3, 0, False), + (steps.StepStatus.SUCCESS, None, 0, 3, False), + (steps.StepStatus.SUCCESS, None, 3, 3, False), + (steps.StepStatus.SKIPPED, None, 0, 0, False), + (steps.StepStatus.SKIPPED, None, 3, 0, False), + (steps.StepStatus.SKIPPED, None, 0, 3, False), + (steps.StepStatus.SKIPPED, None, 3, 3, False), + (steps.StepStatus.FAILURE, DaggerError(), 0, 0, False), + (steps.StepStatus.FAILURE, DaggerError(), 0, 3, True), + (steps.StepStatus.FAILURE, None, 0, 0, False), + (steps.StepStatus.FAILURE, None, 0, 3, False), + (steps.StepStatus.FAILURE, None, 3, 0, True), ], ) async def test_run_with_retries(self, mocker, test_context, step_status, exc_info, max_retries, max_dagger_error_retries, expect_retry): @@ -67,7 +67,7 @@ async def test_run_with_retries(self, mocker, test_context, step_status, exc_inf step.max_duration = timedelta(seconds=60) step.retry_delay = timedelta(seconds=0) step._run = mocker.AsyncMock( - side_effect=[bases.StepResult(step, step_status, exc_info=exc_info)] * (max(max_dagger_error_retries, max_retries) + 1) + side_effect=[steps.StepResult(step, step_status, exc_info=exc_info)] * (max(max_dagger_error_retries, max_retries) + 1) ) step_result = await step.run() @@ -87,21 +87,21 @@ def test_context(self, mocker): def test_report_failed_if_it_has_no_step_result(self, test_context): report = bases.Report(test_context, []) assert not report.success - report = bases.Report(test_context, [bases.StepResult(None, bases.StepStatus.FAILURE)]) + report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.FAILURE)]) assert not report.success report = bases.Report( - test_context, [bases.StepResult(None, bases.StepStatus.FAILURE), bases.StepResult(None, bases.StepStatus.SUCCESS)] + test_context, [steps.StepResult(None, steps.StepStatus.FAILURE), steps.StepResult(None, steps.StepStatus.SUCCESS)] ) assert not report.success - report = bases.Report(test_context, [bases.StepResult(None, bases.StepStatus.SUCCESS)]) + report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.SUCCESS)]) assert report.success report = bases.Report( - test_context, [bases.StepResult(None, bases.StepStatus.SUCCESS), bases.StepResult(None, bases.StepStatus.SKIPPED)] + test_context, [steps.StepResult(None, steps.StepStatus.SUCCESS), steps.StepResult(None, steps.StepStatus.SKIPPED)] ) assert report.success - report = bases.Report(test_context, [bases.StepResult(None, bases.StepStatus.SKIPPED)]) + report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.SKIPPED)]) assert report.success diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index ce469e8cb39e2..c7a576795acfd 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -13,7 +13,7 @@ from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.commands.groups import connectors -from pipelines.models.bases import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_gradle.py b/airbyte-ci/connectors/pipelines/tests/test_gradle.py index 1fbbf2e1ce637..2af9655b20838 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_gradle.py +++ b/airbyte-ci/connectors/pipelines/tests/test_gradle.py @@ -5,8 +5,9 @@ from pathlib import Path import pytest -from pipelines import gradle -from pipelines.models import bases +import pipelines.helpers.connectors.modifed +from pipelines.models import steps +from pipelines.pipeline.steps import gradle pytestmark = [ pytest.mark.anyio, @@ -17,15 +18,15 @@ class TestGradleTask: class DummyStep(gradle.GradleTask): gradle_task_name = "dummyTask" - async def _run(self) -> bases.StepResult: - return bases.StepResult(self, bases.StepStatus.SUCCESS) + async def _run(self) -> steps.StepResult: + return steps.StepResult(self, steps.StepStatus.SUCCESS) @pytest.fixture def test_context(self, mocker, dagger_client): return mocker.Mock( secrets_to_mask=[], dagger_client=dagger_client, - connector=bases.ConnectorWithModifiedFiles( + connector=pipelines.helpers.connectors.modifed.ConnectorWithModifiedFiles( "source-postgres", frozenset({Path("airbyte-integrations/connectors/source-postgres/metadata.yaml")}) ), ) diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index f1072dad1c951..114e0596d4667 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -9,7 +9,7 @@ import anyio import pipelines.pipeline.metadata.pipeline import pytest -from pipelines import publish +from pipelines.pipeline.connectors import publish from pipelines.models.steps import StepStatus pytestmark = [ @@ -39,13 +39,13 @@ async def test_run_skipped_when_already_published(self, three_random_connectors_ """We pick three random connectors from the OSS registry. They should be published. We check that the step is skipped.""" for image_name in three_random_connectors_image_names: publish_context.docker_image = image_name - step = publish.CheckConnectorImageDoesNotExist(publish_context) + step = publish.pipeline.CheckConnectorImageDoesNotExist(publish_context) step_result = await step.run() assert step_result.status == StepStatus.SKIPPED async def test_run_success_when_already_published(self, publish_context): publish_context.docker_image = "airbyte/source-pokeapi:0.0.0" - step = publish.CheckConnectorImageDoesNotExist(publish_context) + step = publish.pipeline.CheckConnectorImageDoesNotExist(publish_context) step_result = await step.run() assert step_result.status == StepStatus.SUCCESS @@ -84,13 +84,13 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r ) if not valid_spec: mocker.patch.object( - publish.UploadSpecToCache, "_get_connector_spec", mocker.Mock(side_effect=publish.InvalidSpecOutputError("Invalid spec.")) + publish.pipeline.UploadSpecToCache, "_get_connector_spec", mocker.Mock(side_effect=publish.pipeline.InvalidSpecOutputError("Invalid spec.")) ) - step = publish.UploadSpecToCache(publish_context) + step = publish.pipeline.UploadSpecToCache(publish_context) step_result = await step.run(connector_container) if valid_spec: - publish.upload_to_gcs.assert_called_once_with( + publish.pipeline.upload_to_gcs.assert_called_once_with( publish_context.dagger_client, mocker.ANY, f"specs/{image_name.replace(':', '/')}/spec.json", @@ -99,7 +99,7 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r flags=['--cache-control="no-cache"'], ) - spec_file = publish.upload_to_gcs.call_args.args[1] + spec_file = publish.pipeline.upload_to_gcs.call_args.args[1] uploaded_content = await spec_file.contents() assert json.loads(uploaded_content) == expected_spec @@ -115,42 +115,42 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r assert step_result.status == StepStatus.FAILURE assert step_result.stderr == "Invalid spec." assert step_result.stdout is None - publish.upload_to_gcs.assert_not_called() + publish.pipeline.upload_to_gcs.assert_not_called() def test_parse_spec_output_valid(self, publish_context, random_connector): - step = publish.UploadSpecToCache(publish_context) + step = publish.pipeline.UploadSpecToCache(publish_context) correct_spec_message = json.dumps({"type": "SPEC", "spec": random_connector["spec"]}) spec_output = f'random_stuff\n{{"type": "RANDOM_MESSAGE"}}\n{correct_spec_message}' result = step._parse_spec_output(spec_output) assert json.loads(result) == random_connector["spec"] def test_parse_spec_output_invalid_json(self, publish_context): - step = publish.UploadSpecToCache(publish_context) + step = publish.pipeline.UploadSpecToCache(publish_context) spec_output = "Invalid JSON" - with pytest.raises(publish.InvalidSpecOutputError): + with pytest.raises(publish.pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) def test_parse_spec_output_invalid_key(self, publish_context): - step = publish.UploadSpecToCache(publish_context) + step = publish.pipeline.UploadSpecToCache(publish_context) spec_output = '{"type": "SPEC", "spec": {"invalid_key": "value"}}' - with pytest.raises(publish.InvalidSpecOutputError): + with pytest.raises(publish.pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) def test_parse_spec_output_no_spec(self, publish_context): - step = publish.UploadSpecToCache(publish_context) + step = publish.pipeline.UploadSpecToCache(publish_context) spec_output = '{"type": "OTHER"}' - with pytest.raises(publish.InvalidSpecOutputError): + with pytest.raises(publish.pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) STEPS_TO_PATCH = [ - (publish.metadata, "MetadataValidation"), - (publish.metadata, "MetadataUpload"), + (publish.pipeline.metadata, "MetadataValidation"), + (publish.pipeline.metadata, "MetadataUpload"), (publish, "CheckConnectorImageDoesNotExist"), (publish, "UploadSpecToCache"), (publish, "PushConnectorImageToRegistry"), (publish, "PullConnectorImageFromRegistry"), - (publish.steps, "run_connector_build"), + (publish.pipeline.steps, "run_connector_build"), ] @@ -165,7 +165,7 @@ async def test_run_connector_publish_pipeline_when_failed_validation(mocker, pre context = mocker.MagicMock(pre_release=pre_release) semaphore = anyio.Semaphore(1) - report = await publish.run_connector_publish_pipeline(context, semaphore) + report = await publish.pipeline.run_connector_publish_pipeline(context, semaphore) run_metadata_validation.assert_called_once() # Check that nothing else is called @@ -200,16 +200,16 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) # ensure spec always succeeds - run_upload_spec_to_cache = publish.UploadSpecToCache.return_value.run + run_upload_spec_to_cache = publish.pipeline.UploadSpecToCache.return_value.run run_upload_spec_to_cache.return_value = mocker.Mock(status=StepStatus.SUCCESS) - run_check_connector_image_does_not_exist = publish.CheckConnectorImageDoesNotExist.return_value.run + run_check_connector_image_does_not_exist = publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run run_check_connector_image_does_not_exist.return_value = mocker.Mock(status=check_image_exists_status) - run_metadata_upload = publish.metadata.MetadataUpload.return_value.run + run_metadata_upload = publish.pipeline.metadata.MetadataUpload.return_value.run semaphore = anyio.Semaphore(1) - report = await publish.run_connector_publish_pipeline(publish_context, semaphore) + report = await publish.pipeline.run_connector_publish_pipeline(publish_context, semaphore) run_metadata_validation.assert_called_once() run_check_connector_image_does_not_exist.assert_called_once() @@ -270,7 +270,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( name="metadata_validation_result", status=StepStatus.SUCCESS ) - publish.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( + publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( name="check_connector_image_does_not_exist_result", status=StepStatus.SUCCESS ) @@ -278,22 +278,22 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( built_connector_platform = mocker.Mock() built_connector_platform.values.return_value = ["linux/amd64"] - publish.steps.run_connector_build.return_value = mocker.Mock( + publish.pipeline.steps.run_connector_build.return_value = mocker.Mock( name="build_connector_for_publish_result", status=build_step_status, output_artifact=built_connector_platform ) - publish.PushConnectorImageToRegistry.return_value.run.return_value = mocker.Mock( + publish.pipeline.PushConnectorImageToRegistry.return_value.run.return_value = mocker.Mock( name="push_connector_image_to_registry_result", status=push_step_status ) - publish.PullConnectorImageFromRegistry.return_value.run.return_value = mocker.Mock( + publish.pipeline.PullConnectorImageFromRegistry.return_value.run.return_value = mocker.Mock( name="pull_connector_image_from_registry_result", status=pull_step_status ) - publish.UploadSpecToCache.return_value.run.return_value = mocker.Mock( + publish.pipeline.UploadSpecToCache.return_value.run.return_value = mocker.Mock( name="upload_spec_to_cache_result", status=upload_to_spec_cache_step_status ) - publish.metadata.MetadataUpload.return_value.run.return_value = mocker.Mock( + publish.pipeline.metadata.MetadataUpload.return_value.run.return_value = mocker.Mock( name="metadata_upload_result", status=metadata_upload_step_status ) @@ -301,14 +301,14 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( pre_release=pre_release, ) semaphore = anyio.Semaphore(1) - report = await publish.run_connector_publish_pipeline(context, semaphore) + report = await publish.pipeline.run_connector_publish_pipeline(context, semaphore) steps_to_run = [ pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run, - publish.CheckConnectorImageDoesNotExist.return_value.run, - publish.steps.run_connector_build, - publish.PushConnectorImageToRegistry.return_value.run, - publish.PullConnectorImageFromRegistry.return_value.run, + publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run, + publish.pipeline.steps.run_connector_build, + publish.pipeline.PushConnectorImageToRegistry.return_value.run, + publish.pipeline.PullConnectorImageFromRegistry.return_value.run, ] for i, step_to_run in enumerate(steps_to_run): @@ -325,9 +325,9 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( step_to_run.assert_not_called() break if build_step_status is StepStatus.SUCCESS: - publish.PushConnectorImageToRegistry.return_value.run.assert_called_once_with(["linux/amd64"]) + publish.pipeline.PushConnectorImageToRegistry.return_value.run.assert_called_once_with(["linux/amd64"]) else: - publish.PushConnectorImageToRegistry.return_value.run.assert_not_called() - publish.PullConnectorImageFromRegistry.return_value.run.assert_not_called() - publish.UploadSpecToCache.return_value.run.assert_not_called() - publish.metadata.MetadataUpload.return_value.run.assert_not_called() + publish.pipeline.PushConnectorImageToRegistry.return_value.run.assert_not_called() + publish.pipeline.PullConnectorImageFromRegistry.return_value.run.assert_not_called() + publish.pipeline.UploadSpecToCache.return_value.run.assert_not_called() + publish.pipeline.metadata.MetadataUpload.return_value.run.assert_not_called() diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index 2d9ce079c9ded..57ec2cc8f8c5a 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -11,7 +11,8 @@ import pytest import yaml from freezegun import freeze_time -from pipelines.models.bases import ConnectorWithModifiedFiles, StepStatus +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles +from pipelines.models.bases import StepStatus from pipelines.pipeline.connectors.test.steps import common pytestmark = [ From c64a438b27d443ff9cc335b7efbc2e3c1a7b8f84 Mon Sep 17 00:00:00 2001 From: bnchrch Date: Wed, 18 Oct 2023 20:49:41 +0000 Subject: [PATCH 18/38] Automated Commit - Formatting Changes --- .../pipelines/pipelines/helpers/connectors/modifed.py | 2 +- .../pipelines/pipelines/pipeline/connectors/commands.py | 3 +-- .../pipelines/pipelines/pipeline/connectors/context.py | 2 +- .../pipelines/pipeline/connectors/publish/context.py | 2 +- airbyte-ci/connectors/pipelines/tests/test_gradle.py | 2 +- airbyte-ci/connectors/pipelines/tests/test_publish.py | 6 ++++-- 6 files changed, 9 insertions(+), 8 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py index 70209b1d6e19d..2cf33dd925ca5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/connectors/modifed.py @@ -5,8 +5,8 @@ from dataclasses import dataclass, field from pathlib import Path from typing import FrozenSet, Set, Union -from anyio import Path +from anyio import Path from connector_ops.utils import Connector from pipelines import main_logger from pipelines.helpers.utils import IGNORED_FILE_EXTENSIONS, METADATA_FILE_NAME diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index a51fcee75837b..c38ad85f01123 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -9,8 +9,7 @@ import click from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger -from pipelines.helpers.connectors.modifed import get_connector_modified_files, get_modified_connectors -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors ALL_CONNECTORS = get_all_connectors_in_repo() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py index 0bcc46db251cc..08f88eb66d080 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py @@ -14,10 +14,10 @@ from dagger import Directory from github import PullRequest from pipelines.dagger.actions import secrets +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.models.contexts import PipelineContext from pipelines.pipeline.connectors.reports import ConnectorReport diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py index 996b9d2a9fd8e..8d52cf2d02b50 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -9,9 +9,9 @@ from dagger import Secret from github import PullRequest from pipelines.consts import ContextState +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.helpers.utils import format_duration -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.pipeline.connectors import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/tests/test_gradle.py b/airbyte-ci/connectors/pipelines/tests/test_gradle.py index 2af9655b20838..94bb1ea23f20e 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_gradle.py +++ b/airbyte-ci/connectors/pipelines/tests/test_gradle.py @@ -4,8 +4,8 @@ from pathlib import Path -import pytest import pipelines.helpers.connectors.modifed +import pytest from pipelines.models import steps from pipelines.pipeline.steps import gradle diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 114e0596d4667..039d5b1aea630 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -9,8 +9,8 @@ import anyio import pipelines.pipeline.metadata.pipeline import pytest -from pipelines.pipeline.connectors import publish from pipelines.models.steps import StepStatus +from pipelines.pipeline.connectors import publish pytestmark = [ pytest.mark.anyio, @@ -84,7 +84,9 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r ) if not valid_spec: mocker.patch.object( - publish.pipeline.UploadSpecToCache, "_get_connector_spec", mocker.Mock(side_effect=publish.pipeline.InvalidSpecOutputError("Invalid spec.")) + publish.pipeline.UploadSpecToCache, + "_get_connector_spec", + mocker.Mock(side_effect=publish.pipeline.InvalidSpecOutputError("Invalid spec.")), ) step = publish.pipeline.UploadSpecToCache(publish_context) From e4ba25d964051d30720828641cbc46759bcc11e7 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:07:16 -0700 Subject: [PATCH 19/38] Fix imports --- .../builds/steps/java_connectors.py | 2 +- .../pipeline/connectors/publish/context.py | 2 +- .../pipelines/tests/test_publish.py | 100 +++++++++--------- .../connectors/pipelines/tests/test_utils.py | 17 +-- 4 files changed, 61 insertions(+), 60 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py index 6e638ee915350..4e0376085138a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py @@ -10,7 +10,7 @@ from pipelines.models.steps import StepResult, StepStatus from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipelines.models.steps import GradleTask +from pipelines.pipeline.steps.gradle import GradleTask class BuildConnectorDistributionTar(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py index 8d52cf2d02b50..c5c839ef746e7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py @@ -12,7 +12,7 @@ from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.helpers.utils import format_duration -from pipelines.pipeline.connectors import ConnectorContext +from pipelines.pipeline.connectors.context import ConnectorContext class PublishConnectorContext(ConnectorContext): diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 039d5b1aea630..d1978049dff7e 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -7,10 +7,10 @@ from typing import List import anyio -import pipelines.pipeline.metadata.pipeline import pytest from pipelines.models.steps import StepStatus -from pipelines.pipeline.connectors import publish +from pipelines.pipeline.connectors.publish import pipeline as publish_pipeline +from pipelines.pipeline.metadata import pipeline as metadata_pipeline pytestmark = [ pytest.mark.anyio, @@ -39,13 +39,13 @@ async def test_run_skipped_when_already_published(self, three_random_connectors_ """We pick three random connectors from the OSS registry. They should be published. We check that the step is skipped.""" for image_name in three_random_connectors_image_names: publish_context.docker_image = image_name - step = publish.pipeline.CheckConnectorImageDoesNotExist(publish_context) + step = publish_pipeline.CheckConnectorImageDoesNotExist(publish_context) step_result = await step.run() assert step_result.status == StepStatus.SKIPPED async def test_run_success_when_already_published(self, publish_context): publish_context.docker_image = "airbyte/source-pokeapi:0.0.0" - step = publish.pipeline.CheckConnectorImageDoesNotExist(publish_context) + step = publish_pipeline.CheckConnectorImageDoesNotExist(publish_context) step_result = await step.run() assert step_result.status == StepStatus.SUCCESS @@ -80,19 +80,19 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r upload_exit_code = 0 if successful_upload else 1 mocker.patch.object( - publish, "upload_to_gcs", mocker.AsyncMock(return_value=(upload_exit_code, "upload_to_gcs_stdout", "upload_to_gcs_stderr")) + publish_pipeline, "upload_to_gcs", mocker.AsyncMock(return_value=(upload_exit_code, "upload_to_gcs_stdout", "upload_to_gcs_stderr")) ) if not valid_spec: mocker.patch.object( - publish.pipeline.UploadSpecToCache, + publish_pipeline.UploadSpecToCache, "_get_connector_spec", - mocker.Mock(side_effect=publish.pipeline.InvalidSpecOutputError("Invalid spec.")), + mocker.Mock(side_effect=publish_pipeline.InvalidSpecOutputError("Invalid spec.")), ) - step = publish.pipeline.UploadSpecToCache(publish_context) + step = publish_pipeline.UploadSpecToCache(publish_context) step_result = await step.run(connector_container) if valid_spec: - publish.pipeline.upload_to_gcs.assert_called_once_with( + publish_pipeline.upload_to_gcs.assert_called_once_with( publish_context.dagger_client, mocker.ANY, f"specs/{image_name.replace(':', '/')}/spec.json", @@ -101,7 +101,7 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r flags=['--cache-control="no-cache"'], ) - spec_file = publish.pipeline.upload_to_gcs.call_args.args[1] + spec_file = publish_pipeline.upload_to_gcs.call_args.args[1] uploaded_content = await spec_file.contents() assert json.loads(uploaded_content) == expected_spec @@ -117,42 +117,42 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r assert step_result.status == StepStatus.FAILURE assert step_result.stderr == "Invalid spec." assert step_result.stdout is None - publish.pipeline.upload_to_gcs.assert_not_called() + publish_pipeline.upload_to_gcs.assert_not_called() def test_parse_spec_output_valid(self, publish_context, random_connector): - step = publish.pipeline.UploadSpecToCache(publish_context) + step = publish_pipeline.UploadSpecToCache(publish_context) correct_spec_message = json.dumps({"type": "SPEC", "spec": random_connector["spec"]}) spec_output = f'random_stuff\n{{"type": "RANDOM_MESSAGE"}}\n{correct_spec_message}' result = step._parse_spec_output(spec_output) assert json.loads(result) == random_connector["spec"] def test_parse_spec_output_invalid_json(self, publish_context): - step = publish.pipeline.UploadSpecToCache(publish_context) + step = publish_pipeline.UploadSpecToCache(publish_context) spec_output = "Invalid JSON" - with pytest.raises(publish.pipeline.InvalidSpecOutputError): + with pytest.raises(publish_pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) def test_parse_spec_output_invalid_key(self, publish_context): - step = publish.pipeline.UploadSpecToCache(publish_context) + step = publish_pipeline.UploadSpecToCache(publish_context) spec_output = '{"type": "SPEC", "spec": {"invalid_key": "value"}}' - with pytest.raises(publish.pipeline.InvalidSpecOutputError): + with pytest.raises(publish_pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) def test_parse_spec_output_no_spec(self, publish_context): - step = publish.pipeline.UploadSpecToCache(publish_context) + step = publish_pipeline.UploadSpecToCache(publish_context) spec_output = '{"type": "OTHER"}' - with pytest.raises(publish.pipeline.InvalidSpecOutputError): + with pytest.raises(publish_pipeline.InvalidSpecOutputError): step._parse_spec_output(spec_output) STEPS_TO_PATCH = [ - (publish.pipeline.metadata, "MetadataValidation"), - (publish.pipeline.metadata, "MetadataUpload"), - (publish, "CheckConnectorImageDoesNotExist"), - (publish, "UploadSpecToCache"), - (publish, "PushConnectorImageToRegistry"), - (publish, "PullConnectorImageFromRegistry"), - (publish.pipeline.steps, "run_connector_build"), + (metadata_pipeline, "MetadataValidation"), + (metadata_pipeline, "MetadataUpload"), + (publish_pipeline, "CheckConnectorImageDoesNotExist"), + (publish_pipeline, "UploadSpecToCache"), + (publish_pipeline, "PushConnectorImageToRegistry"), + (publish_pipeline, "PullConnectorImageFromRegistry"), + (publish_pipeline.steps, "run_connector_build"), ] @@ -162,12 +162,12 @@ async def test_run_connector_publish_pipeline_when_failed_validation(mocker, pre for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run + run_metadata_validation = metadata_pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.FAILURE) context = mocker.MagicMock(pre_release=pre_release) semaphore = anyio.Semaphore(1) - report = await publish.pipeline.run_connector_publish_pipeline(context, semaphore) + report = await publish_pipeline.run_connector_publish_pipeline(context, semaphore) run_metadata_validation.assert_called_once() # Check that nothing else is called @@ -198,20 +198,20 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run + run_metadata_validation = metadata_pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) # ensure spec always succeeds - run_upload_spec_to_cache = publish.pipeline.UploadSpecToCache.return_value.run + run_upload_spec_to_cache = publish_pipeline.UploadSpecToCache.return_value.run run_upload_spec_to_cache.return_value = mocker.Mock(status=StepStatus.SUCCESS) - run_check_connector_image_does_not_exist = publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run + run_check_connector_image_does_not_exist = publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run run_check_connector_image_does_not_exist.return_value = mocker.Mock(status=check_image_exists_status) - run_metadata_upload = publish.pipeline.metadata.MetadataUpload.return_value.run + run_metadata_upload = metadata_pipeline.MetadataUpload.return_value.run semaphore = anyio.Semaphore(1) - report = await publish.pipeline.run_connector_publish_pipeline(publish_context, semaphore) + report = await publish_pipeline.run_connector_publish_pipeline(publish_context, semaphore) run_metadata_validation.assert_called_once() run_check_connector_image_does_not_exist.assert_called_once() @@ -269,10 +269,10 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( """We check that the full pipeline is executed as expected when the connector image does not exist and the metadata validation passed.""" for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( + metadata_pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( name="metadata_validation_result", status=StepStatus.SUCCESS ) - publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( + publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( name="check_connector_image_does_not_exist_result", status=StepStatus.SUCCESS ) @@ -280,22 +280,22 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( built_connector_platform = mocker.Mock() built_connector_platform.values.return_value = ["linux/amd64"] - publish.pipeline.steps.run_connector_build.return_value = mocker.Mock( + publish_pipeline.steps.run_connector_build.return_value = mocker.Mock( name="build_connector_for_publish_result", status=build_step_status, output_artifact=built_connector_platform ) - publish.pipeline.PushConnectorImageToRegistry.return_value.run.return_value = mocker.Mock( + publish_pipeline.PushConnectorImageToRegistry.return_value.run.return_value = mocker.Mock( name="push_connector_image_to_registry_result", status=push_step_status ) - publish.pipeline.PullConnectorImageFromRegistry.return_value.run.return_value = mocker.Mock( + publish_pipeline.PullConnectorImageFromRegistry.return_value.run.return_value = mocker.Mock( name="pull_connector_image_from_registry_result", status=pull_step_status ) - publish.pipeline.UploadSpecToCache.return_value.run.return_value = mocker.Mock( + publish_pipeline.UploadSpecToCache.return_value.run.return_value = mocker.Mock( name="upload_spec_to_cache_result", status=upload_to_spec_cache_step_status ) - publish.pipeline.metadata.MetadataUpload.return_value.run.return_value = mocker.Mock( + metadata_pipeline.MetadataUpload.return_value.run.return_value = mocker.Mock( name="metadata_upload_result", status=metadata_upload_step_status ) @@ -303,14 +303,14 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( pre_release=pre_release, ) semaphore = anyio.Semaphore(1) - report = await publish.pipeline.run_connector_publish_pipeline(context, semaphore) + report = await publish_pipeline.run_connector_publish_pipeline(context, semaphore) steps_to_run = [ - pipelines.pipeline.metadata.pipeline.MetadataValidation.return_value.run, - publish.pipeline.CheckConnectorImageDoesNotExist.return_value.run, - publish.pipeline.steps.run_connector_build, - publish.pipeline.PushConnectorImageToRegistry.return_value.run, - publish.pipeline.PullConnectorImageFromRegistry.return_value.run, + metadata_pipeline.MetadataValidation.return_value.run, + publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run, + publish_pipeline.steps.run_connector_build, + publish_pipeline.PushConnectorImageToRegistry.return_value.run, + publish_pipeline.PullConnectorImageFromRegistry.return_value.run, ] for i, step_to_run in enumerate(steps_to_run): @@ -327,9 +327,9 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( step_to_run.assert_not_called() break if build_step_status is StepStatus.SUCCESS: - publish.pipeline.PushConnectorImageToRegistry.return_value.run.assert_called_once_with(["linux/amd64"]) + publish_pipeline.PushConnectorImageToRegistry.return_value.run.assert_called_once_with(["linux/amd64"]) else: - publish.pipeline.PushConnectorImageToRegistry.return_value.run.assert_not_called() - publish.pipeline.PullConnectorImageFromRegistry.return_value.run.assert_not_called() - publish.pipeline.UploadSpecToCache.return_value.run.assert_not_called() - publish.pipeline.metadata.MetadataUpload.return_value.run.assert_not_called() + publish_pipeline.PushConnectorImageToRegistry.return_value.run.assert_not_called() + publish_pipeline.PullConnectorImageFromRegistry.return_value.run.assert_not_called() + publish_pipeline.UploadSpecToCache.return_value.run.assert_not_called() + metadata_pipeline.MetadataUpload.return_value.run.assert_not_called() diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_utils.py index 9480d72e7643f..51680c83e0ab8 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_utils.py @@ -9,6 +9,7 @@ import pytest from connector_ops.utils import Connector, ConnectorLanguage from pipelines.helpers import utils +from pipelines import consts from tests.utils import pick_a_random_connector @@ -26,7 +27,7 @@ "ci_job_key": None, }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_context/my_branch/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_context/my_branch/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -39,7 +40,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -52,7 +53,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -65,7 +66,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -78,7 +79,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashes/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashes/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -91,7 +92,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -104,7 +105,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", ), ( mock.MagicMock( @@ -117,7 +118,7 @@ "ci_job_key": "my_ci_job_key", }, ), - f"{utils.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", + f"{consts.STATIC_REPORT_PREFIX}/command/path/my_ci_job_key/my_branch_with_slashesandspecialcharacters/my_pipeline_start_timestamp/my_git_revision", ), ], ) From 1b621a2e7d798a8e2205a3b92099d488e8c45526 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:12:06 -0700 Subject: [PATCH 20/38] Fix imports --- .../pipelines/pipelines/pipeline/connectors/pipeline.py | 8 ++++++-- .../pipelines/pipeline/connectors/publish/commands.py | 4 ++-- .../tests/test_commands/test_groups/test_connectors.py | 1 - 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py index 58ffeb1e2b41f..607359359a7c6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py @@ -15,8 +15,12 @@ from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import create_and_open_file -from pipelines.models.bases import NoOpStep, Report, StepResult, StepStatus -from pipelines.pipeline.connectors.context import ConnectorContext, ContextState +from pipelines.pipeline.steps.no_op import NoOpStep +from pipelines.models.reports import Report +from pipelines.models.steps import StepResult, StepStatus +from pipelines.models.contexts import ContextState + +from pipelines.pipeline.connectors.context import ConnectorContext GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py index 2573cd435ecff..9f3468a1f9883 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -7,10 +7,10 @@ from pipelines import main_logger from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.commands import connectors -from pipelines.pipeline.connectors.context import ContextState +from pipelines.models.contexts import ContextState from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.publish.context import PublishConnectorContext -from pipelines.publish import reorder_contexts, run_connector_publish_pipeline +from pipelines.pipeline.connectors.publish import reorder_contexts, run_connector_publish_pipeline @connectors.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index c7a576795acfd..86b3591b1a204 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -6,7 +6,6 @@ import pipelines.pipeline.connectors.builds.commands import pipelines.pipeline.connectors.commands -import pipelines.pipeline.connectors.format.commands import pipelines.pipeline.connectors.publish.commands import pipelines.pipeline.connectors.test.commands import pytest From 1d44e62927e109a6fd7d3726e2bbbcbc95f93b11 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 16:13:43 -0500 Subject: [PATCH 21/38] example: add subcommands from other files manually. also make test work --- .../pipelines/pipelines/pipeline/connectors/commands.py | 3 +++ .../pipelines/pipelines/pipeline/connectors/pipeline.py | 7 +++---- .../pipelines/pipeline/connectors/test/commands.py | 6 +++--- .../pipeline/connectors/test/steps/java_connectors.py | 5 +++-- .../pipeline/connectors/test/steps/python_connectors.py | 9 ++++----- 5 files changed, 16 insertions(+), 14 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index c38ad85f01123..5bb9c3ee57597 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -10,6 +10,7 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors +from pipelines.pipeline.connectors.test.commands import test ALL_CONNECTORS = get_all_connectors_in_repo() @@ -199,3 +200,5 @@ def connectors( enable_dependency_scanning, ) log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) + +connectors.add_command(test) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py index 607359359a7c6..a6ddea2bfa9b9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py @@ -12,15 +12,14 @@ import dagger from connector_ops.utils import ConnectorLanguage from dagger import Config -from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT +from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT, ContextState from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import create_and_open_file -from pipelines.pipeline.steps.no_op import NoOpStep + from pipelines.models.reports import Report from pipelines.models.steps import StepResult, StepStatus -from pipelines.models.contexts import ContextState - from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.pipeline.steps.no_op import NoOpStep GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py index 5d4eb82508471..150f82243c639 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -8,14 +8,14 @@ import click from pipelines import main_logger from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.consts import ContextState from pipelines.helpers.github import update_global_commit_status_check_for_tests -from pipelines.pipeline.connectors.commands import connectors -from pipelines.pipeline.connectors.context import ConnectorContext, ContextState +from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline -@connectors.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") +@click.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") @click.option( "--code-tests-only", is_flag=True, diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py index e751746c2077e..64c32104402cf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py @@ -10,7 +10,8 @@ import asyncer from dagger import Directory, File, QueryError from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.dagger.actions.system import docker, secrets +from pipelines.dagger.actions import secrets +from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import StepResult, StepStatus from pipelines.pipeline.connectors.builds.steps.java_connectors import ( @@ -21,7 +22,7 @@ from pipelines.pipeline.connectors.builds.steps.normalization import BuildOrPullNormalization from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests -from pipelines.pipeline.steps import GradleTask +from pipelines.pipeline.steps.gradle import GradleTask class IntegrationTests(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py index 14f024037ed34..bc9df49efce1b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py @@ -11,8 +11,7 @@ import pipelines.dagger.actions.python.common import pipelines.dagger.actions.system.docker from dagger import Container, File -from pipelines.actions import environments -from pipelines.consts import LOCAL_BUILD_PLATFORM +from pipelines.consts import LOCAL_BUILD_PLATFORM, PYPROJECT_TOML_FILE_PATH from pipelines.dagger.actions import secrets from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus @@ -26,9 +25,9 @@ class CodeFormatChecks(Step): title = "Code format checks" - RUN_BLACK_CMD = ["python", "-m", "black", f"--config=/{environments.PYPROJECT_TOML_FILE_PATH}", "--check", "."] - RUN_ISORT_CMD = ["python", "-m", "isort", f"--settings-file=/{environments.PYPROJECT_TOML_FILE_PATH}", "--check-only", "--diff", "."] - RUN_FLAKE_CMD = ["python", "-m", "pflake8", f"--config=/{environments.PYPROJECT_TOML_FILE_PATH}", "."] + RUN_BLACK_CMD = ["python", "-m", "black", f"--config=/{PYPROJECT_TOML_FILE_PATH}", "--check", "."] + RUN_ISORT_CMD = ["python", "-m", "isort", f"--settings-file=/{PYPROJECT_TOML_FILE_PATH}", "--check-only", "--diff", "."] + RUN_FLAKE_CMD = ["python", "-m", "pflake8", f"--config=/{PYPROJECT_TOML_FILE_PATH}", "."] async def _run(self) -> StepResult: """Run a code format check on the container source code. From 555c2e5c39b45f41a3d05bd3f382ae2dd171ae26 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 16:17:24 -0500 Subject: [PATCH 22/38] add connectors publish --- .../pipelines/pipelines/pipeline/connectors/commands.py | 2 ++ .../pipelines/pipeline/connectors/publish/commands.py | 5 ++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index 5bb9c3ee57597..ff34e0f23c404 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -10,6 +10,7 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors +from pipelines.pipeline.connectors.publish.commands import publish from pipelines.pipeline.connectors.test.commands import test ALL_CONNECTORS = get_all_connectors_in_repo() @@ -201,4 +202,5 @@ def connectors( ) log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) +connectors.add_command(publish) connectors.add_command(test) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py index 9f3468a1f9883..b04fd2138f848 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -6,14 +6,13 @@ import click from pipelines import main_logger from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.commands import connectors from pipelines.models.contexts import ContextState from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.publish.context import PublishConnectorContext -from pipelines.pipeline.connectors.publish import reorder_contexts, run_connector_publish_pipeline +from pipelines.pipeline.connectors.publish.pipeline import reorder_contexts, run_connector_publish_pipeline -@connectors.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") +@click.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") @click.option("--pre-release/--main-release", help="Use this flag if you want to publish pre-release images.", default=True, type=bool) @click.option( "--spec-cache-gcs-credentials", From e5275b7a903348ad2e145f320a05766154301fda Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:18:29 -0700 Subject: [PATCH 23/38] Fix imports --- .../pipelines/pipelines/pipeline/connectors/publish/commands.py | 2 +- .../pipelines/pipelines/pipeline/connectors/test/commands.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py index b04fd2138f848..be32e6fbf9070 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py @@ -6,7 +6,7 @@ import click from pipelines import main_logger from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.models.contexts import ContextState +from pipelines.consts import ContextState from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.publish.context import PublishConnectorContext from pipelines.pipeline.connectors.publish.pipeline import reorder_contexts, run_connector_publish_pipeline diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py index 150f82243c639..076647e54a46e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -10,6 +10,7 @@ from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.consts import ContextState from pipelines.helpers.github import update_global_commit_status_check_for_tests +from pipelines.consts import ContextState from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline From b4f4c7b3c90ae11ead8fc89adfb0e136f27c5b14 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:23:36 -0700 Subject: [PATCH 24/38] Fix imports --- .../connectors/pipelines/tests/test_bases.py | 14 +++++++------- .../test_commands/test_groups/test_connectors.py | 2 +- .../pipelines/tests/test_tests/test_common.py | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_bases.py b/airbyte-ci/connectors/pipelines/tests/test_bases.py index a960174c9faf4..282264e244d8c 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_bases.py +++ b/airbyte-ci/connectors/pipelines/tests/test_bases.py @@ -7,7 +7,7 @@ import anyio import pytest from dagger import DaggerError -from pipelines.models import steps +from pipelines.models import steps, reports pytestmark = [ pytest.mark.anyio, @@ -85,23 +85,23 @@ def test_context(self, mocker): return mocker.Mock() def test_report_failed_if_it_has_no_step_result(self, test_context): - report = bases.Report(test_context, []) + report = reports.Report(test_context, []) assert not report.success - report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.FAILURE)]) + report = reports.Report(test_context, [steps.StepResult(None, steps.StepStatus.FAILURE)]) assert not report.success - report = bases.Report( + report = reports.Report( test_context, [steps.StepResult(None, steps.StepStatus.FAILURE), steps.StepResult(None, steps.StepStatus.SUCCESS)] ) assert not report.success - report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.SUCCESS)]) + report = reports.Report(test_context, [steps.StepResult(None, steps.StepStatus.SUCCESS)]) assert report.success - report = bases.Report( + report = reports.Report( test_context, [steps.StepResult(None, steps.StepStatus.SUCCESS), steps.StepResult(None, steps.StepStatus.SKIPPED)] ) assert report.success - report = bases.Report(test_context, [steps.StepResult(None, steps.StepStatus.SKIPPED)]) + report = reports.Report(test_context, [steps.StepResult(None, steps.StepStatus.SKIPPED)]) assert report.success diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index 86b3591b1a204..6f4cc7802a06d 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -11,7 +11,7 @@ import pytest from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.commands.groups import connectors +from pipelines.pipeline.connectors.commands import connectors from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index 57ec2cc8f8c5a..409d2294f6ed7 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -12,7 +12,7 @@ import yaml from freezegun import freeze_time from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles -from pipelines.models.bases import StepStatus +from pipelines.models.steps import StepStatus from pipelines.pipeline.connectors.test.steps import common pytestmark = [ From 299f99f43bf111f18aa591c4e1c9662764ef2c09 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 16:27:24 -0500 Subject: [PATCH 25/38] add all connectors commands and fix truncated help texts for connectors commands --- .../pipelines/pipeline/connectors/builds/commands.py | 3 +-- .../pipeline/connectors/bump_version/commands.py | 3 +-- .../pipelines/pipeline/connectors/commands.py | 10 ++++++++++ .../pipelines/pipeline/connectors/list/commands.py | 3 +-- .../connectors/migrate_to_base_image/commands.py | 5 ++--- .../connectors/migrate_to_base_image/pipeline.py | 2 +- .../pipeline/connectors/upgrade_base_image/commands.py | 3 +-- 7 files changed, 17 insertions(+), 12 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py index 58920b0e7d0fe..35bb35ef1b795 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py @@ -6,12 +6,11 @@ import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.builds.steps import run_connector_build_pipeline -from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -@connectors.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") +@click.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") @click.option( "--use-host-gradle-dist-tar", is_flag=True, diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py index 76a89cd62eca0..64a891bfcf769 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py @@ -6,12 +6,11 @@ import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.pipeline.connectors.bump_version.pipeline import run_connector_version_bump_pipeline -from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -@connectors.command(cls=DaggerPipelineCommand, help="Bump a connector version: update metadata.yaml and changelog.") +@click.command(cls=DaggerPipelineCommand, short_help="Bump a connector version: update metadata.yaml and changelog.") @click.argument("bump-type", type=click.Choice(["patch", "minor", "major"])) @click.argument("pull-request-number", type=str) @click.argument("changelog-entry", type=str) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index ff34e0f23c404..c1fb34e3d62bc 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -10,8 +10,13 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors +from pipelines.pipeline.connectors.builds.commands import build +from pipelines.pipeline.connectors.bump_version.commands import bump_version +from pipelines.pipeline.connectors.migrate_to_base_image.commands import migrate_to_base_image +from pipelines.pipeline.connectors.list.commands import list from pipelines.pipeline.connectors.publish.commands import publish from pipelines.pipeline.connectors.test.commands import test +from pipelines.pipeline.connectors.upgrade_base_image.commands import upgrade_base_image ALL_CONNECTORS = get_all_connectors_in_repo() @@ -202,5 +207,10 @@ def connectors( ) log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) +connectors.add_command(build) +connectors.add_command(bump_version) +connectors.add_command(list) +connectors.add_command(migrate_to_base_image) connectors.add_command(publish) connectors.add_command(test) +connectors.add_command(upgrade_base_image) \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py index 2cf89490f9248..b26b43176aae7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py @@ -5,12 +5,11 @@ import click from connector_ops.utils import console from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.commands import connectors from rich.table import Table from rich.text import Text -@connectors.command(cls=DaggerPipelineCommand, help="List all selected connectors.") +@click.command(cls=DaggerPipelineCommand, help="List all selected connectors.") @click.pass_context def list( ctx: click.Context, diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py index 3ad6c2ce95d25..e7b5d76e582ea 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py @@ -5,15 +5,14 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -@connectors.command( +@click.command( cls=DaggerPipelineCommand, - help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", + short_help="Make the selected connectors use our base image: remove dockerfile, update metadata.yaml and update documentation.", ) @click.argument("pull-request-number", type=str) @click.option( diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py index 0feb60116a67c..3e80b8cfb8b7b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py @@ -11,7 +11,7 @@ from dagger import Directory from jinja2 import Template from pipelines import consts -from pipelines.helpers import metadata_change_helpers +from pipelines.helpers.connectors import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py index 5b1d83d7907ad..abf540e4d7d5e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py @@ -5,13 +5,12 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -@connectors.command(cls=DaggerPipelineCommand, help="Upgrades the base image version used by the selected connectors..") +@click.command(cls=DaggerPipelineCommand, short_help="Upgrades the base image version used by the selected connectors.") @click.option("--set-if-not-exists", default=True) @click.option( "--docker-hub-username", From 4449bc659136604fe45d09c1be9c7b7cc8ae7218 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:36:30 -0700 Subject: [PATCH 26/38] Fix imports test_utils --- airbyte-ci/connectors/pipelines/tests/test_utils.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_utils.py index 51680c83e0ab8..fba31b1ea02e8 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_utils.py @@ -5,10 +5,11 @@ from pathlib import Path from unittest import mock -import pipelines.helpers.git import pytest from connector_ops.utils import Connector, ConnectorLanguage from pipelines.helpers import utils +from pipelines.helpers.connectors.modifed import get_modified_connectors, get_connector_modified_files +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines import consts from tests.utils import pick_a_random_connector @@ -123,7 +124,7 @@ ], ) def test_render_report_output_prefix(ctx, expected): - assert utils.DaggerPipelineCommand.render_report_output_prefix(ctx) == expected + assert DaggerPipelineCommand.render_report_output_prefix(ctx) == expected @pytest.mark.parametrize("enable_dependency_scanning", [True, False]) @@ -137,7 +138,7 @@ def test_get_modified_connectors_with_dependency_scanning(all_connectors, enable ) modified_files.append(modified_java_connector.code_directory / "foo.bar") - modified_connectors = pipelines.helpers.git.get_modified_connectors(modified_files, all_connectors, enable_dependency_scanning) + modified_connectors = get_modified_connectors(modified_files, all_connectors, enable_dependency_scanning) if enable_dependency_scanning: assert not_modified_java_connector in modified_connectors else: @@ -154,7 +155,7 @@ def test_get_connector_modified_files(): other_connector.code_directory / "README.md", } - result = pipelines.helpers.git.get_connector_modified_files(connector, all_modified_files) + result = get_connector_modified_files(connector, all_modified_files) assert result == frozenset({connector.code_directory / "setup.py"}) @@ -166,7 +167,7 @@ def test_no_modified_files_in_connector_directory(): other_connector.code_directory / "README.md", } - result = pipelines.helpers.git.get_connector_modified_files(connector, all_modified_files) + result = get_connector_modified_files(connector, all_modified_files) assert result == frozenset() From 4be0aa7a97871a42781b34893c01ad71be38fa1d Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:40:13 -0700 Subject: [PATCH 27/38] Fix imports test_common --- .../connectors/pipelines/tests/test_tests/test_common.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index 409d2294f6ed7..8298c3d3953d3 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -11,6 +11,7 @@ import pytest import yaml from freezegun import freeze_time +from pipelines.dagger.actions.system import docker from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.models.steps import StepStatus from pipelines.pipeline.connectors.test.steps import common @@ -154,8 +155,8 @@ def get_patched_acceptance_test_step(self, dagger_client, mocker, test_context, test_context.connector_acceptance_test_image = "bash:latest" test_context.connector_secrets = {"config.json": dagger_client.set_secret("config.json", "connector_secret")} - mocker.patch.object(common.environments, "load_image_to_docker_host", return_value="image_sha") - mocker.patch.object(common.environments, "with_bound_docker_host", lambda _, cat_container: cat_container) + mocker.patch.object(docker, "load_image_to_docker_host", return_value="image_sha") + mocker.patch.object(docker, "with_bound_docker_host", lambda _, cat_container: cat_container) return common.AcceptanceTests(test_context) async def test_cat_container_provisioning( From a4ca6e5b34dfd463e8615058eb45754e0a29aac5 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 16:43:10 -0500 Subject: [PATCH 28/38] fix jinja template reference --- .../pipelines/pipelines/pipeline/connectors/reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index 034b3b3b96c06..89665d70298d2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -102,7 +102,7 @@ def post_comment_on_pr(self) -> None: self.pipeline_context.pull_request.create_issue_comment(markdown_comment) async def to_html(self) -> str: - env = Environment(loader=PackageLoader("pipelines.tests"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) + env = Environment(loader=PackageLoader("pipeline.connectors.test.steps"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) template = env.get_template("test_report.html.j2") template.globals["StepStatus"] = StepStatus template.globals["format_duration"] = format_duration From 7506f2ed33ef92e4bebb0b10582280230d3a11a5 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 16:58:47 -0500 Subject: [PATCH 29/38] actually fix the jinja template --- .../pipelines/pipelines/pipeline/connectors/reports.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index 89665d70298d2..ba4f4d0bbe8bf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -102,7 +102,7 @@ def post_comment_on_pr(self) -> None: self.pipeline_context.pull_request.create_issue_comment(markdown_comment) async def to_html(self) -> str: - env = Environment(loader=PackageLoader("pipeline.connectors.test.steps"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) + env = Environment(loader=PackageLoader("pipelines.pipeline.connectors.test.steps"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) template = env.get_template("test_report.html.j2") template.globals["StepStatus"] = StepStatus template.globals["format_duration"] = format_duration From 894a3e29b49587981bafdfb3131d35f1abbbec9a Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 14:59:33 -0700 Subject: [PATCH 30/38] Fix imports test_utils --- .../test_groups/test_connectors.py | 44 ++++++++++++------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index 6f4cc7802a06d..b0aab591368e6 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -9,9 +9,19 @@ import pipelines.pipeline.connectors.publish.commands import pipelines.pipeline.connectors.test.commands import pytest +import click from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.pipeline.connectors.commands import connectors + +from pipelines.pipeline.connectors import commands as connectors_commands +from pipelines.pipeline.connectors import context as connectors_context +from pipelines.pipeline.connectors.publish import context as connectors_publish_context +from pipelines.pipeline.connectors.publish import commands as connectors_publish_command +from pipelines.pipeline.connectors.test import commands as connectors_test_command +from pipelines.pipeline.connectors.builds import commands as connectors_build_command + + from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector @@ -23,7 +33,7 @@ def runner(): def test_get_selected_connectors_by_name_no_file_modification(): connector = pick_a_random_connector() - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(), selected_languages=(), @@ -40,7 +50,7 @@ def test_get_selected_connectors_by_name_no_file_modification(): def test_get_selected_connectors_by_support_level_no_file_modification(): - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=["certified"], selected_languages=(), @@ -54,7 +64,7 @@ def test_get_selected_connectors_by_support_level_no_file_modification(): def test_get_selected_connectors_by_language_no_file_modification(): - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(ConnectorLanguage.LOW_CODE,), @@ -70,7 +80,7 @@ def test_get_selected_connectors_by_language_no_file_modification(): def test_get_selected_connectors_by_name_with_file_modification(): connector = pick_a_random_connector() modified_files = {connector.code_directory / "setup.py"} - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(), selected_languages=(), @@ -89,7 +99,7 @@ def test_get_selected_connectors_by_name_with_file_modification(): def test_get_selected_connectors_by_name_and_support_level_or_languages_leads_to_intersection(): connector = pick_a_random_connector() modified_files = {connector.code_directory / "setup.py"} - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(connector.technical_name,), selected_support_levels=(connector.support_level,), selected_languages=(connector.language,), @@ -106,7 +116,7 @@ def test_get_selected_connectors_with_modified(): first_modified_connector = pick_a_random_connector() second_modified_connector = pick_a_random_connector(other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -123,7 +133,7 @@ def test_get_selected_connectors_with_modified_and_language(): first_modified_connector = pick_a_random_connector(language=ConnectorLanguage.PYTHON) second_modified_connector = pick_a_random_connector(language=ConnectorLanguage.JAVA, other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(ConnectorLanguage.JAVA,), @@ -141,7 +151,7 @@ def test_get_selected_connectors_with_modified_and_support_level(): first_modified_connector = pick_a_random_connector(support_level="community") second_modified_connector = pick_a_random_connector(support_level="certified", other_picked_connectors=[first_modified_connector]) modified_files = {first_modified_connector.code_directory / "setup.py", second_modified_connector.code_directory / "setup.py"} - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=["certified"], selected_languages=(), @@ -163,7 +173,7 @@ def test_get_selected_connectors_with_modified_and_metadata_only(): second_modified_connector.code_directory / METADATA_FILE_NAME, second_modified_connector.code_directory / "setup.py", } - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -189,7 +199,7 @@ def test_get_selected_connectors_with_metadata_only(): second_modified_connector.code_directory / METADATA_FILE_NAME, second_modified_connector.code_directory / "setup.py", } - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -210,7 +220,7 @@ def test_get_selected_connectors_with_metadata_only(): def test_get_selected_connectors_with_metadata_query(): connector = pick_a_random_connector() metadata_query = f"data.dockerRepository == '{connector.metadata['dockerRepository']}'" - selected_connectors = pipelines.pipeline.connectors.commands.get_selected_connectors_with_modified_files( + selected_connectors = connectors_commands.get_selected_connectors_with_modified_files( selected_names=(), selected_support_levels=(), selected_languages=(), @@ -252,9 +262,9 @@ def click_context_obj(): @pytest.mark.parametrize( "command, command_args", [ - (pipelines.pipeline.connectors.test.commands.test, []), + (connectors_test_command.test, []), ( - pipelines.pipeline.connectors.publish.commands.publish, + connectors_publish_command.publish, [ "--spec-cache-gcs-credentials", "test", @@ -270,7 +280,7 @@ def click_context_obj(): "test", ], ), - (pipelines.pipeline.connectors.builds.commands.build, []), + (connectors_build_command.build, []), ], ) def test_commands_do_not_override_connector_selection( @@ -284,10 +294,10 @@ def test_commands_do_not_override_connector_selection( selected_connector = mocker.MagicMock() click_context_obj["selected_connectors_with_modified_files"] = [selected_connector] - mocker.patch.object(connectors.click, "confirm") + mocker.patch.object(click, "confirm") mock_connector_context = mocker.MagicMock() - mocker.patch.object(connectors, "ConnectorContext", mock_connector_context) - mocker.patch.object(connectors, "PublishConnectorContext", mock_connector_context) + mocker.patch.object(connectors_context, "ConnectorContext", mock_connector_context) + mocker.patch.object(connectors_publish_context, "PublishConnectorContext", mock_connector_context) runner.invoke(command, command_args, catch_exceptions=False, obj=click_context_obj) assert mock_connector_context.call_count == 1 # If the connector selection is overriden the context won't be instantiated with the selected connector mock instance From 60f15f348bb24715a2e2153192104a9db00449e0 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 15:08:18 -0700 Subject: [PATCH 31/38] Fix imports test_publish --- .../pipelines/tests/test_publish.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index d1978049dff7e..4712f88e87a1d 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -10,7 +10,6 @@ import pytest from pipelines.models.steps import StepStatus from pipelines.pipeline.connectors.publish import pipeline as publish_pipeline -from pipelines.pipeline.metadata import pipeline as metadata_pipeline pytestmark = [ pytest.mark.anyio, @@ -146,8 +145,8 @@ def test_parse_spec_output_no_spec(self, publish_context): STEPS_TO_PATCH = [ - (metadata_pipeline, "MetadataValidation"), - (metadata_pipeline, "MetadataUpload"), + (publish_pipeline, "MetadataValidation"), + (publish_pipeline, "MetadataUpload"), (publish_pipeline, "CheckConnectorImageDoesNotExist"), (publish_pipeline, "UploadSpecToCache"), (publish_pipeline, "PushConnectorImageToRegistry"), @@ -162,7 +161,7 @@ async def test_run_connector_publish_pipeline_when_failed_validation(mocker, pre for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = metadata_pipeline.MetadataValidation.return_value.run + run_metadata_validation = publish_pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.FAILURE) context = mocker.MagicMock(pre_release=pre_release) @@ -198,7 +197,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - run_metadata_validation = metadata_pipeline.MetadataValidation.return_value.run + run_metadata_validation = publish_pipeline.MetadataValidation.return_value.run run_metadata_validation.return_value = mocker.Mock(status=StepStatus.SUCCESS) # ensure spec always succeeds @@ -208,7 +207,7 @@ async def test_run_connector_publish_pipeline_when_image_exists_or_failed(mocker run_check_connector_image_does_not_exist = publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run run_check_connector_image_does_not_exist.return_value = mocker.Mock(status=check_image_exists_status) - run_metadata_upload = metadata_pipeline.MetadataUpload.return_value.run + run_metadata_upload = publish_pipeline.MetadataUpload.return_value.run semaphore = anyio.Semaphore(1) report = await publish_pipeline.run_connector_publish_pipeline(publish_context, semaphore) @@ -269,7 +268,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( """We check that the full pipeline is executed as expected when the connector image does not exist and the metadata validation passed.""" for module, to_mock in STEPS_TO_PATCH: mocker.patch.object(module, to_mock, return_value=mocker.AsyncMock()) - metadata_pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( + publish_pipeline.MetadataValidation.return_value.run.return_value = mocker.Mock( name="metadata_validation_result", status=StepStatus.SUCCESS ) publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run.return_value = mocker.Mock( @@ -295,7 +294,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( publish_pipeline.UploadSpecToCache.return_value.run.return_value = mocker.Mock( name="upload_spec_to_cache_result", status=upload_to_spec_cache_step_status ) - metadata_pipeline.MetadataUpload.return_value.run.return_value = mocker.Mock( + publish_pipeline.MetadataUpload.return_value.run.return_value = mocker.Mock( name="metadata_upload_result", status=metadata_upload_step_status ) @@ -306,7 +305,7 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( report = await publish_pipeline.run_connector_publish_pipeline(context, semaphore) steps_to_run = [ - metadata_pipeline.MetadataValidation.return_value.run, + publish_pipeline.MetadataValidation.return_value.run, publish_pipeline.CheckConnectorImageDoesNotExist.return_value.run, publish_pipeline.steps.run_connector_build, publish_pipeline.PushConnectorImageToRegistry.return_value.run, @@ -332,4 +331,4 @@ async def test_run_connector_publish_pipeline_when_image_does_not_exist( publish_pipeline.PushConnectorImageToRegistry.return_value.run.assert_not_called() publish_pipeline.PullConnectorImageFromRegistry.return_value.run.assert_not_called() publish_pipeline.UploadSpecToCache.return_value.run.assert_not_called() - metadata_pipeline.MetadataUpload.return_value.run.assert_not_called() + publish_pipeline.MetadataUpload.return_value.run.assert_not_called() From c23224a720ac2c6020d7fa4635a73529c5c48dc9 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 15:16:21 -0700 Subject: [PATCH 32/38] Fix imports test_connectors --- .../tests/test_commands/test_groups/test_connectors.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index b0aab591368e6..2772870770804 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -12,11 +12,8 @@ import click from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.pipeline.connectors.commands import connectors from pipelines.pipeline.connectors import commands as connectors_commands -from pipelines.pipeline.connectors import context as connectors_context -from pipelines.pipeline.connectors.publish import context as connectors_publish_context from pipelines.pipeline.connectors.publish import commands as connectors_publish_command from pipelines.pipeline.connectors.test import commands as connectors_test_command from pipelines.pipeline.connectors.builds import commands as connectors_build_command @@ -296,8 +293,9 @@ def test_commands_do_not_override_connector_selection( mocker.patch.object(click, "confirm") mock_connector_context = mocker.MagicMock() - mocker.patch.object(connectors_context, "ConnectorContext", mock_connector_context) - mocker.patch.object(connectors_publish_context, "PublishConnectorContext", mock_connector_context) + mocker.patch.object(connectors_test_command, "ConnectorContext", mock_connector_context) + mocker.patch.object(connectors_build_command, "ConnectorContext", mock_connector_context) + mocker.patch.object(connectors_publish_command, "PublishConnectorContext", mock_connector_context) runner.invoke(command, command_args, catch_exceptions=False, obj=click_context_obj) assert mock_connector_context.call_count == 1 # If the connector selection is overriden the context won't be instantiated with the selected connector mock instance From 6dffb729b3953bd9b982a458813f3f4972fd8773 Mon Sep 17 00:00:00 2001 From: bnchrch Date: Wed, 18 Oct 2023 22:33:30 +0000 Subject: [PATCH 33/38] Automated Commit - Formatting Changes --- .../pipelines/pipelines/pipeline/connectors/commands.py | 5 +++-- .../pipelines/pipelines/pipeline/connectors/pipeline.py | 1 - .../pipelines/pipelines/pipeline/connectors/reports.py | 7 ++++++- .../pipelines/pipeline/connectors/test/commands.py | 1 - airbyte-ci/connectors/pipelines/tests/test_bases.py | 2 +- .../tests/test_commands/test_groups/test_connectors.py | 9 +++------ airbyte-ci/connectors/pipelines/tests/test_publish.py | 4 +++- airbyte-ci/connectors/pipelines/tests/test_utils.py | 6 +++--- 8 files changed, 19 insertions(+), 16 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py index c1fb34e3d62bc..6b849c0729072 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py @@ -12,8 +12,8 @@ from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors from pipelines.pipeline.connectors.builds.commands import build from pipelines.pipeline.connectors.bump_version.commands import bump_version -from pipelines.pipeline.connectors.migrate_to_base_image.commands import migrate_to_base_image from pipelines.pipeline.connectors.list.commands import list +from pipelines.pipeline.connectors.migrate_to_base_image.commands import migrate_to_base_image from pipelines.pipeline.connectors.publish.commands import publish from pipelines.pipeline.connectors.test.commands import test from pipelines.pipeline.connectors.upgrade_base_image.commands import upgrade_base_image @@ -207,10 +207,11 @@ def connectors( ) log_selected_connectors(ctx.obj["selected_connectors_with_modified_files"]) + connectors.add_command(build) connectors.add_command(bump_version) connectors.add_command(list) connectors.add_command(migrate_to_base_image) connectors.add_command(publish) connectors.add_command(test) -connectors.add_command(upgrade_base_image) \ No newline at end of file +connectors.add_command(upgrade_base_image) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py index a6ddea2bfa9b9..e53a17645a141 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py @@ -15,7 +15,6 @@ from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT, ContextState from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import create_and_open_file - from pipelines.models.reports import Report from pipelines.models.steps import StepResult, StepStatus from pipelines.pipeline.connectors.context import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py index ba4f4d0bbe8bf..4a934b67631ea 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py @@ -102,7 +102,12 @@ def post_comment_on_pr(self) -> None: self.pipeline_context.pull_request.create_issue_comment(markdown_comment) async def to_html(self) -> str: - env = Environment(loader=PackageLoader("pipelines.pipeline.connectors.test.steps"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True) + env = Environment( + loader=PackageLoader("pipelines.pipeline.connectors.test.steps"), + autoescape=select_autoescape(), + trim_blocks=False, + lstrip_blocks=True, + ) template = env.get_template("test_report.html.j2") template.globals["StepStatus"] = StepStatus template.globals["format_duration"] = format_duration diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py index 076647e54a46e..150f82243c639 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py @@ -10,7 +10,6 @@ from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.consts import ContextState from pipelines.helpers.github import update_global_commit_status_check_for_tests -from pipelines.consts import ContextState from pipelines.pipeline.connectors.context import ConnectorContext from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline diff --git a/airbyte-ci/connectors/pipelines/tests/test_bases.py b/airbyte-ci/connectors/pipelines/tests/test_bases.py index 282264e244d8c..5b4547df1e455 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_bases.py +++ b/airbyte-ci/connectors/pipelines/tests/test_bases.py @@ -7,7 +7,7 @@ import anyio import pytest from dagger import DaggerError -from pipelines.models import steps, reports +from pipelines.models import reports, steps pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index 2772870770804..a78ad0e21b5ea 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -4,22 +4,19 @@ from typing import Callable +import click import pipelines.pipeline.connectors.builds.commands import pipelines.pipeline.connectors.commands import pipelines.pipeline.connectors.publish.commands import pipelines.pipeline.connectors.test.commands import pytest -import click from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage - +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.pipeline.connectors import commands as connectors_commands +from pipelines.pipeline.connectors.builds import commands as connectors_build_command from pipelines.pipeline.connectors.publish import commands as connectors_publish_command from pipelines.pipeline.connectors.test import commands as connectors_test_command -from pipelines.pipeline.connectors.builds import commands as connectors_build_command - - -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 4712f88e87a1d..2e7d1d55b8d89 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -79,7 +79,9 @@ async def test_run(self, mocker, dagger_client, valid_spec, successful_upload, r upload_exit_code = 0 if successful_upload else 1 mocker.patch.object( - publish_pipeline, "upload_to_gcs", mocker.AsyncMock(return_value=(upload_exit_code, "upload_to_gcs_stdout", "upload_to_gcs_stderr")) + publish_pipeline, + "upload_to_gcs", + mocker.AsyncMock(return_value=(upload_exit_code, "upload_to_gcs_stdout", "upload_to_gcs_stderr")), ) if not valid_spec: mocker.patch.object( diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_utils.py index fba31b1ea02e8..9d9328f384178 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_utils.py +++ b/airbyte-ci/connectors/pipelines/tests/test_utils.py @@ -7,10 +7,10 @@ import pytest from connector_ops.utils import Connector, ConnectorLanguage -from pipelines.helpers import utils -from pipelines.helpers.connectors.modifed import get_modified_connectors, get_connector_modified_files -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines import consts +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.helpers import utils +from pipelines.helpers.connectors.modifed import get_connector_modified_files, get_modified_connectors from tests.utils import pick_a_random_connector From 549d0e377ef4aebbc4e478603874d00cf17f9a36 Mon Sep 17 00:00:00 2001 From: erohmensing Date: Wed, 18 Oct 2023 17:59:12 -0500 Subject: [PATCH 34/38] rename pipeline to airbyte_ci --- .../{pipeline => airbyte_ci}/__init__.py | 0 .../connectors/__init__.py | 0 .../connectors/builds/__init__.py | 0 .../connectors/builds/commands.py | 6 +++--- .../connectors/builds/pipeline.py | 0 .../connectors/builds/steps/__init__.py | 10 +++++----- .../builds/steps/build_customization.py | 0 .../connectors/builds/steps/common.py | 2 +- .../connectors/builds/steps/java_connectors.py | 6 +++--- .../connectors/builds/steps/normalization.py | 2 +- .../connectors/builds/steps/python_connectors.py | 6 +++--- .../connectors/bump_version/__init__.py | 0 .../connectors/bump_version/commands.py | 6 +++--- .../connectors/bump_version/pipeline.py | 4 ++-- .../connectors/commands.py | 14 +++++++------- .../connectors/context.py | 2 +- .../connectors/list/__init__.py | 0 .../connectors/list/commands.py | 0 .../connectors/list/pipeline.py | 0 .../connectors/migrate_to_base_image/__init__.py | 0 .../connectors/migrate_to_base_image/commands.py | 6 +++--- .../connectors/migrate_to_base_image/pipeline.py | 6 +++--- .../connectors/pipeline.py | 4 ++-- .../connectors/publish/__init__.py | 0 .../connectors/publish/commands.py | 6 +++--- .../connectors/publish/context.py | 2 +- .../connectors/publish/pipeline.py | 8 ++++---- .../connectors/reports.py | 2 +- .../connectors/test/__init__.py | 0 .../connectors/test/commands.py | 6 +++--- .../connectors/test/pipeline.py | 0 .../connectors/test/steps/__init__.py | 12 ++++++------ .../connectors/test/steps/common.py | 0 .../connectors/test/steps/java_connectors.py | 10 +++++----- .../connectors/test/steps/python_connectors.py | 6 +++--- .../test/steps/templates/test_report.html.j2 | 0 .../connectors/upgrade_base_image/__init__.py | 0 .../connectors/upgrade_base_image/commands.py | 6 +++--- .../connectors/upgrade_base_image/pipeline.py | 0 .../metadata/__init__.py | 0 .../metadata/commands.py | 2 +- .../metadata/pipeline.py | 6 +++--- .../{pipeline => airbyte_ci}/steps/__init__.py | 0 .../{pipeline => airbyte_ci}/steps/docker.py | 0 .../{pipeline => airbyte_ci}/steps/git.py | 0 .../{pipeline => airbyte_ci}/steps/gradle.py | 0 .../{pipeline => airbyte_ci}/steps/no_op.py | 0 .../{pipeline => airbyte_ci}/steps/poetry.py | 0 .../{pipeline => airbyte_ci}/test/__init__.py | 0 .../{pipeline => airbyte_ci}/test/commands.py | 2 +- .../{pipeline => airbyte_ci}/test/pipeline.py | 0 .../pipelines/pipelines/cli/airbyte_ci.py | 6 +++--- .../pipelines/dagger/actions/connector/hooks.py | 2 +- .../dagger/actions/connector/normalization.py | 2 +- .../pipelines/dagger/actions/python/common.py | 2 +- .../pipelines/dagger/actions/python/pipx.py | 2 +- .../pipelines/dagger/actions/python/poetry.py | 2 +- .../pipelines/dagger/actions/secrets.py | 2 +- .../pipelines/dagger/actions/system/docker.py | 2 +- .../dagger/containers/internal_tools.py | 2 +- .../pipelines/dagger/containers/java.py | 2 +- .../pipelines/dagger/containers/python.py | 2 +- .../connectors/pipelines/pipelines/hacks.py | 2 +- .../pipelines/pipelines/helpers/utils.py | 2 +- .../tests/test_actions/test_environments.py | 2 +- .../tests/test_builds/test_python_connectors.py | 4 ++-- .../test_commands/test_groups/test_connectors.py | 16 ++++++++-------- .../connectors/pipelines/tests/test_gradle.py | 2 +- .../connectors/pipelines/tests/test_publish.py | 2 +- .../tests/test_steps/test_simple_docker_step.py | 2 +- .../pipelines/tests/test_tests/test_common.py | 2 +- .../tests/test_tests/test_python_connectors.py | 6 +++--- 72 files changed, 104 insertions(+), 104 deletions(-) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/commands.py (89%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/__init__.py (84%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/build_customization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/common.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/java_connectors.py (93%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/normalization.py (95%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/builds/steps/python_connectors.py (95%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/bump_version/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/bump_version/commands.py (89%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/bump_version/pipeline.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/commands.py (94%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/context.py (99%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/list/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/list/commands.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/list/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/migrate_to_base_image/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/migrate_to_base_image/commands.py (90%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/migrate_to_base_image/pipeline.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/pipeline.py (97%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/publish/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/publish/commands.py (94%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/publish/context.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/publish/pipeline.py (98%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/reports.py (99%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/commands.py (94%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/steps/__init__.py (90%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/steps/common.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/steps/java_connectors.py (94%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/steps/python_connectors.py (97%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/test/steps/templates/test_report.html.j2 (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/upgrade_base_image/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/upgrade_base_image/commands.py (90%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/connectors/upgrade_base_image/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/metadata/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/metadata/commands.py (91%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/metadata/pipeline.py (96%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/docker.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/git.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/gradle.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/no_op.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/steps/poetry.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/test/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/test/commands.py (92%) rename airbyte-ci/connectors/pipelines/pipelines/{pipeline => airbyte_ci}/test/pipeline.py (100%) diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py similarity index 89% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py index 35bb35ef1b795..d4aacdc6a3200 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py @@ -5,9 +5,9 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.builds.steps import run_connector_build_pipeline -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.builds.steps import run_connector_build_pipeline +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines @click.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py similarity index 84% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py index a0e0d82a687f5..9332830f7faef 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py @@ -10,12 +10,12 @@ import anyio from connector_ops.utils import ConnectorLanguage from pipelines.models.steps import StepResult -from pipelines.pipeline.connectors.builds.steps import python_connectors -from pipelines.pipeline.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus +from pipelines.airbyte_ci.connectors.builds.steps import python_connectors +from pipelines.airbyte_ci.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.pipeline.connectors.builds.steps import java_connectors -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.connectors.builds.steps import java_connectors +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport class NoBuildStepForLanguageError(Exception): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/build_customization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/build_customization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/build_customization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/common.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/common.py index bfb045a372e1b..5bc6b90140006 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/common.py @@ -10,7 +10,7 @@ from pipelines.consts import BUILD_PLATFORMS from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext class BuildConnectorImagesBase(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py similarity index 93% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py index 4e0376085138a..49090fca9b2d6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py @@ -8,9 +8,9 @@ from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.containers import java from pipelines.models.steps import StepResult, StepStatus -from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.steps.gradle import GradleTask +from pipelines.airbyte_ci.connectors.builds.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.steps.gradle import GradleTask class BuildConnectorDistributionTar(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/normalization.py similarity index 95% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/normalization.py index d238cb71a28fb..ab395c19753f0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/normalization.py @@ -5,7 +5,7 @@ from dagger import Platform from pipelines.dagger.actions.connector import normalization from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext # TODO this class could be deleted diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py similarity index 95% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py index b5ea33d95c00a..738641c4ac2e4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/builds/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py @@ -6,9 +6,9 @@ from dagger import Container, Platform from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed from pipelines.models.steps import StepResult -from pipelines.pipeline.connectors.builds.steps import build_customization -from pipelines.pipeline.connectors.builds.steps.common import BuildConnectorImagesBase -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.builds.steps import build_customization +from pipelines.airbyte_ci.connectors.builds.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.context import ConnectorContext class BuildConnectorImages(BuildConnectorImagesBase): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py similarity index 89% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py index 64a891bfcf769..908d4305e6cd7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py @@ -5,9 +5,9 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.bump_version.pipeline import run_connector_version_bump_pipeline -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.bump_version.pipeline import run_connector_version_bump_pipeline +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines @click.command(cls=DaggerPipelineCommand, short_help="Bump a connector version: update metadata.yaml and changelog.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py index 0d98337c9c5b0..0d11e733b82f7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py @@ -10,8 +10,8 @@ from pipelines import consts from pipelines.helpers.connectors import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport def get_bumped_version(version: str, bump_type: str) -> str: diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py similarity index 94% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py index 6b849c0729072..8e85c2ef183fb 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py @@ -10,13 +10,13 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors -from pipelines.pipeline.connectors.builds.commands import build -from pipelines.pipeline.connectors.bump_version.commands import bump_version -from pipelines.pipeline.connectors.list.commands import list -from pipelines.pipeline.connectors.migrate_to_base_image.commands import migrate_to_base_image -from pipelines.pipeline.connectors.publish.commands import publish -from pipelines.pipeline.connectors.test.commands import test -from pipelines.pipeline.connectors.upgrade_base_image.commands import upgrade_base_image +from pipelines.airbyte_ci.connectors.builds.commands import build +from pipelines.airbyte_ci.connectors.bump_version.commands import bump_version +from pipelines.airbyte_ci.connectors.list.commands import list +from pipelines.airbyte_ci.connectors.migrate_to_base_image.commands import migrate_to_base_image +from pipelines.airbyte_ci.connectors.publish.commands import publish +from pipelines.airbyte_ci.connectors.test.commands import test +from pipelines.airbyte_ci.connectors.upgrade_base_image.commands import upgrade_base_image ALL_CONNECTORS = get_all_connectors_in_repo() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py similarity index 99% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py index 08f88eb66d080..56c97b1f3efba 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py @@ -19,7 +19,7 @@ from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME from pipelines.models.contexts import PipelineContext -from pipelines.pipeline.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.connectors.reports import ConnectorReport class ConnectorContext(PipelineContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/commands.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/commands.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/list/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/list/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py index e7b5d76e582ea..d7541132f93d9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py @@ -5,9 +5,9 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines @click.command( diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py index 3e80b8cfb8b7b..8c037fb28a0cd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py @@ -13,9 +13,9 @@ from pipelines import consts from pipelines.helpers.connectors import metadata_change_helpers from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext -from pipelines.pipeline.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport class UpgradeBaseImageMetadata(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py similarity index 97% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py index e53a17645a141..82f61a1963934 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py @@ -17,8 +17,8 @@ from pipelines.helpers.utils import create_and_open_file from pipelines.models.reports import Report from pipelines.models.steps import StepResult, StepStatus -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.steps.no_op import NoOpStep +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.steps.no_op import NoOpStep GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py similarity index 94% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py index be32e6fbf9070..bd91b97cabd78 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py @@ -7,9 +7,9 @@ from pipelines import main_logger from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.consts import ContextState -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.pipeline.connectors.publish.context import PublishConnectorContext -from pipelines.pipeline.connectors.publish.pipeline import reorder_contexts, run_connector_publish_pipeline +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext +from pipelines.airbyte_ci.connectors.publish.pipeline import reorder_contexts, run_connector_publish_pipeline @click.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py index c5c839ef746e7..241250c9a0055 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py @@ -12,7 +12,7 @@ from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.helpers.utils import format_duration -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext class PublishConnectorContext(ConnectorContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py similarity index 98% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index bb0f79e61772c..005b6ca70d5a0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -13,10 +13,10 @@ from pipelines.dagger.actions.remote_storage import upload_to_gcs from pipelines.dagger.actions.system import docker from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.builds import steps -from pipelines.pipeline.connectors.publish.context import PublishConnectorContext -from pipelines.pipeline.connectors.reports import ConnectorReport -from pipelines.pipeline.metadata.pipeline import MetadataUpload, MetadataValidation +from pipelines.airbyte_ci.connectors.builds import steps +from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation from pydantic import ValidationError diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py similarity index 99% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py index 4a934b67631ea..4762f508d30d0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/reports.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/reports.py @@ -103,7 +103,7 @@ def post_comment_on_pr(self) -> None: async def to_html(self) -> str: env = Environment( - loader=PackageLoader("pipelines.pipeline.connectors.test.steps"), + loader=PackageLoader("pipelines.airbyte_ci.connectors.test.steps"), autoescape=select_autoescape(), trim_blocks=False, lstrip_blocks=True, diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py similarity index 94% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py index 150f82243c639..fbf650bbcf885 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py @@ -10,9 +10,9 @@ from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.consts import ContextState from pipelines.helpers.github import update_global_commit_status_check_for_tests -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines -from pipelines.pipeline.connectors.test.steps import run_connector_test_pipeline +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.test.steps import run_connector_test_pipeline @click.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/__init__.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/__init__.py index baf8381bc7cc1..c655e8104ba38 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/__init__.py @@ -10,12 +10,12 @@ import asyncer from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.models.steps import StepResult -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.reports import ConnectorReport -from pipelines.pipeline.connectors.test.steps import java_connectors -from pipelines.pipeline.metadata.pipeline import MetadataValidation -from pipelines.pipeline.connectors.test.steps import python_connectors -from pipelines.pipeline.connectors.test.steps.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.reports import ConnectorReport +from pipelines.airbyte_ci.connectors.test.steps import java_connectors +from pipelines.airbyte_ci.metadata.pipeline import MetadataValidation +from pipelines.airbyte_ci.connectors.test.steps import python_connectors +from pipelines.airbyte_ci.connectors.test.steps.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck LANGUAGE_MAPPING = { "run_all_tests": { diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/common.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/common.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/common.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py similarity index 94% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py index 64c32104402cf..517db98175fc8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py @@ -14,15 +14,15 @@ from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import StepResult, StepStatus -from pipelines.pipeline.connectors.builds.steps.java_connectors import ( +from pipelines.airbyte_ci.connectors.builds.steps.java_connectors import ( BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path, ) -from pipelines.pipeline.connectors.builds.steps.normalization import BuildOrPullNormalization -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests -from pipelines.pipeline.steps.gradle import GradleTask +from pipelines.airbyte_ci.connectors.builds.steps.normalization import BuildOrPullNormalization +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests +from pipelines.airbyte_ci.steps.gradle import GradleTask class IntegrationTests(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py similarity index 97% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py index bc9df49efce1b..58ae052cdcf7a 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py @@ -15,9 +15,9 @@ from pipelines.dagger.actions import secrets from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.pipeline.connectors.builds.steps.python_connectors import BuildConnectorImages -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.test.steps.common import AcceptanceTests +from pipelines.airbyte_ci.connectors.builds.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests class CodeFormatChecks(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/templates/test_report.html.j2 b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2 similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/test/steps/templates/test_report.html.j2 rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/templates/test_report.html.j2 diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py similarity index 90% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py index abf540e4d7d5e..04f29bda54b95 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py @@ -5,9 +5,9 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline -from pipelines.pipeline.connectors.pipeline import run_connectors_pipelines +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline +from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines @click.command(cls=DaggerPipelineCommand, short_help="Upgrades the base image version used by the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/connectors/upgrade_base_image/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py similarity index 91% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py index 0c719b8aab76b..9a1e5422ae992 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py @@ -5,7 +5,7 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.pipeline.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline +from pipelines.airbyte_ci.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline # MAIN GROUP diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py index 685127741c258..dbe03530dd1e5 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py @@ -13,9 +13,9 @@ from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable from pipelines.models.reports import Report from pipelines.models.steps import MountPath, Step, StepResult -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext -from pipelines.pipeline.steps.docker import SimpleDockerStep -from pipelines.pipeline.steps.poetry import PoetryRunStep +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.steps.docker import SimpleDockerStep +from pipelines.airbyte_ci.steps.poetry import PoetryRunStep # STEPS diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/docker.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/docker.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/docker.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/git.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/git.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/git.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/gradle.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/gradle.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/no_op.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/no_op.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/no_op.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/poetry.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/steps/poetry.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/steps/poetry.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/test/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/commands.py similarity index 92% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/commands.py index d73b4270b31b3..97a370ac3f976 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/commands.py @@ -4,7 +4,7 @@ import anyio import click -from pipelines.pipeline.test.pipeline import run_test +from pipelines.airbyte_ci.test.pipeline import run_test @click.command() diff --git a/airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/pipeline/test/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py index f9e9376c07bae..a2b4970633103 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py @@ -21,9 +21,9 @@ get_modified_files_in_pull_request, ) from pipelines.helpers.utils import get_current_epoch_time, transform_strs_to_paths -from pipelines.pipeline.connectors.commands import connectors -from pipelines.pipeline.metadata.commands import metadata -from pipelines.pipeline.test.commands import test +from pipelines.airbyte_ci.connectors.commands import connectors +from pipelines.airbyte_ci.metadata.commands import metadata +from pipelines.airbyte_ci.test.commands import test # HELPERS diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py index f785daa1e4731..54fb8f674eabf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/hooks.py @@ -6,7 +6,7 @@ from dagger import Container from dagger.engine._version import CLI_VERSION as dagger_engine_version -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext async def finalize_build(context: ConnectorContext, connector_container: Container) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py index 411ccd64abdcb..7defa76027eb8 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/connector/normalization.py @@ -3,7 +3,7 @@ # from dagger import Container, Platform -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext BASE_DESTINATION_NORMALIZATION_BUILD_CONFIGURATION = { "destination-clickhouse": { diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index 64d4ac14397ea..afadd3f15c224 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -9,7 +9,7 @@ from dagger import Container, Directory from pipelines.dagger.containers.python import with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_python_package( diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py index 14720a20020e5..3cb421cc9f66c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py @@ -7,7 +7,7 @@ from dagger import Container from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package from pipelines.dagger.actions.python.poetry import find_local_dependencies_in_pyproject_toml -from pipelines.pipeline.connectors.context import PipelineContext +from pipelines.airbyte_ci.connectors.context import PipelineContext def with_pipx(base_python_container: Container) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py index 4a504e9684182..6d2af0b2b3c80 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -12,7 +12,7 @@ from pipelines.dagger.actions.system.common import with_debian_packages from pipelines.dagger.containers.python import with_python_base from pipelines.helpers.utils import get_file_contents -from pipelines.pipeline.connectors.context import PipelineContext +from pipelines.airbyte_ci.connectors.context import PipelineContext async def find_local_dependencies_in_pyproject_toml( diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py index ec401b3506b22..b497832c91819 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/secrets.py @@ -13,7 +13,7 @@ if TYPE_CHECKING: from dagger import Container - from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext + from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext async def get_secrets_to_mask(ci_credentials_with_downloaded_secrets: Container) -> list[str]: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py index 767d6d35ffe44..c29efdf604993 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py @@ -10,7 +10,7 @@ from pipelines import consts from pipelines.consts import DOCKER_HOST_NAME, DOCKER_HOST_PORT, DOCKER_TMP_VOLUME_NAME from pipelines.helpers.utils import sh_dash_c -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_global_dockerd_service(dagger_client: Client) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index 300a1f59a657f..3531d18bc32d0 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -6,7 +6,7 @@ from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base -from pipelines.pipeline.connectors.context import PipelineContext +from pipelines.airbyte_ci.connectors.context import PipelineContext async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py index aa75462f7ea4b..5f76a111ac3f6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -7,7 +7,7 @@ from pipelines.dagger.actions.connector.hooks import finalize_build from pipelines.dagger.actions.connector.normalization import DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, with_normalization from pipelines.helpers.utils import sh_dash_c -from pipelines.pipeline.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py index bb8c75a2f922e..0cebf299fd499 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py @@ -5,7 +5,7 @@ from dagger import CacheVolume, Container from pipelines.consts import CONNECTOR_TESTING_REQUIREMENTS, LICENSE_SHORT_FILE_PATH, PYPROJECT_TOML_FILE_PATH from pipelines.helpers.utils import sh_dash_c -from pipelines.pipeline.connectors.context import PipelineContext +from pipelines.airbyte_ci.connectors.context import PipelineContext def with_python_base(context: PipelineContext, python_version: str = "3.10") -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/hacks.py b/airbyte-ci/connectors/pipelines/pipelines/hacks.py index 1053b3ad8d858..4cdac0926b1df 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/hacks.py +++ b/airbyte-ci/connectors/pipelines/pipelines/hacks.py @@ -13,7 +13,7 @@ if TYPE_CHECKING: from dagger import Client, Container - from pipelines.pipeline.connectors.context import ConnectorContext + from pipelines.airbyte_ci.connectors.context import ConnectorContext async def cache_latest_cdk(dagger_client: Client, pip_cache_volume_name: str = "pip_cache") -> None: diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py index cab5d2f63ec58..56f2fc61ebbc9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py +++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py @@ -22,7 +22,7 @@ from more_itertools import chunked if TYPE_CHECKING: - from pipelines.pipeline.connectors.context import ConnectorContext + from pipelines.airbyte_ci.connectors.context import ConnectorContext DAGGER_CONFIG = Config(log_output=sys.stderr) AIRBYTE_REPO_URL = "https://github.com/airbytehq/airbyte.git" diff --git a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py index 5999c707fd03d..f1874c97a7ac8 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py +++ b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py @@ -4,7 +4,7 @@ import pytest from pipelines.dagger.actions.python import common -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.context import ConnectorContext pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index 3043c218aa3c6..4f6da0b681a61 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -6,8 +6,8 @@ import pytest from pipelines.models.steps import StepStatus -from pipelines.pipeline.connectors.builds.steps import build_customization, python_connectors -from pipelines.pipeline.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.builds.steps import build_customization, python_connectors +from pipelines.airbyte_ci.connectors.context import ConnectorContext pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index a78ad0e21b5ea..cb813d798ce5d 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -5,18 +5,18 @@ from typing import Callable import click -import pipelines.pipeline.connectors.builds.commands -import pipelines.pipeline.connectors.commands -import pipelines.pipeline.connectors.publish.commands -import pipelines.pipeline.connectors.test.commands +import pipelines.airbyte_ci.connectors.builds.commands +import pipelines.airbyte_ci.connectors.commands +import pipelines.airbyte_ci.connectors.publish.commands +import pipelines.airbyte_ci.connectors.test.commands import pytest from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles -from pipelines.pipeline.connectors import commands as connectors_commands -from pipelines.pipeline.connectors.builds import commands as connectors_build_command -from pipelines.pipeline.connectors.publish import commands as connectors_publish_command -from pipelines.pipeline.connectors.test import commands as connectors_test_command +from pipelines.airbyte_ci.connectors import commands as connectors_commands +from pipelines.airbyte_ci.connectors.builds import commands as connectors_build_command +from pipelines.airbyte_ci.connectors.publish import commands as connectors_publish_command +from pipelines.airbyte_ci.connectors.test import commands as connectors_test_command from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_gradle.py b/airbyte-ci/connectors/pipelines/tests/test_gradle.py index 94bb1ea23f20e..a665731b61817 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_gradle.py +++ b/airbyte-ci/connectors/pipelines/tests/test_gradle.py @@ -7,7 +7,7 @@ import pipelines.helpers.connectors.modifed import pytest from pipelines.models import steps -from pipelines.pipeline.steps import gradle +from pipelines.airbyte_ci.steps import gradle pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 2e7d1d55b8d89..0b2b34632bd21 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -9,7 +9,7 @@ import anyio import pytest from pipelines.models.steps import StepStatus -from pipelines.pipeline.connectors.publish import pipeline as publish_pipeline +from pipelines.airbyte_ci.connectors.publish import pipeline as publish_pipeline pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py index 5a2fda1680999..6ee9b689dbd65 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py +++ b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py @@ -8,7 +8,7 @@ from pipelines.helpers.utils import get_exec_result from pipelines.models.contexts import PipelineContext from pipelines.models.steps import MountPath -from pipelines.pipeline.steps.docker import SimpleDockerStep +from pipelines.airbyte_ci.steps.docker import SimpleDockerStep pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index 8298c3d3953d3..4f56265d9ccf4 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -14,7 +14,7 @@ from pipelines.dagger.actions.system import docker from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.models.steps import StepStatus -from pipelines.pipeline.connectors.test.steps import common +from pipelines.airbyte_ci.connectors.test.steps import common pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index c3307083aa8bd..eb90071e9d292 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -5,9 +5,9 @@ import pytest from connector_ops.utils import Connector from pipelines.models.steps import StepResult -from pipelines.pipeline.connectors.builds.steps.python_connectors import BuildConnectorImages -from pipelines.pipeline.connectors.context import ConnectorContext -from pipelines.pipeline.connectors.test.steps.python_connectors import UnitTests +from pipelines.airbyte_ci.connectors.builds.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.test.steps.python_connectors import UnitTests pytestmark = [ pytest.mark.anyio, From 7e40db2a665e0442a40385cd124d6e21b6161f51 Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 15:59:17 -0700 Subject: [PATCH 35/38] Update .gitignore --- airbyte-ci/connectors/pipelines/.gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/airbyte-ci/connectors/pipelines/.gitignore b/airbyte-ci/connectors/pipelines/.gitignore index 6878ec3ac2919..956087dbf4e55 100644 --- a/airbyte-ci/connectors/pipelines/.gitignore +++ b/airbyte-ci/connectors/pipelines/.gitignore @@ -1,2 +1,3 @@ pipeline_reports -.venv \ No newline at end of file +.venv +!pipelines/airbyte_ci/connectors/build/ \ No newline at end of file From e495869ea7381042f9cc850479d6c7c2fa48feec Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 16:01:22 -0700 Subject: [PATCH 36/38] Rename builds to build --- .../airbyte_ci/connectors/{builds => build}/__init__.py | 0 .../airbyte_ci/connectors/{builds => build}/commands.py | 2 +- .../airbyte_ci/connectors/{builds => build}/pipeline.py | 0 .../connectors/{builds => build}/steps/__init__.py | 6 +++--- .../{builds => build}/steps/build_customization.py | 0 .../airbyte_ci/connectors/{builds => build}/steps/common.py | 0 .../connectors/{builds => build}/steps/java_connectors.py | 2 +- .../connectors/{builds => build}/steps/normalization.py | 0 .../connectors/{builds => build}/steps/python_connectors.py | 4 ++-- .../pipelines/pipelines/airbyte_ci/connectors/commands.py | 2 +- .../pipelines/airbyte_ci/connectors/publish/pipeline.py | 2 +- .../airbyte_ci/connectors/test/steps/java_connectors.py | 4 ++-- .../airbyte_ci/connectors/test/steps/python_connectors.py | 2 +- .../pipelines/tests/test_builds/test_python_connectors.py | 2 +- .../tests/test_commands/test_groups/test_connectors.py | 4 ++-- .../pipelines/tests/test_tests/test_python_connectors.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/commands.py (96%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/__init__.py (89%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/build_customization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/common.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/java_connectors.py (97%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/normalization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{builds => build}/steps/python_connectors.py (96%) diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py index d4aacdc6a3200..a1f3d3a2afaf4 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py @@ -5,7 +5,7 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.airbyte_ci.connectors.builds.steps import run_connector_build_pipeline +from pipelines.airbyte_ci.connectors.build.steps import run_connector_build_pipeline from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py similarity index 89% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py index 9332830f7faef..b5243e8bbf89c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py @@ -10,10 +10,10 @@ import anyio from connector_ops.utils import ConnectorLanguage from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.builds.steps import python_connectors -from pipelines.airbyte_ci.connectors.builds.steps.common import LoadContainerToLocalDockerHost, StepStatus +from pipelines.airbyte_ci.connectors.build.steps import python_connectors +from pipelines.airbyte_ci.connectors.build.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.airbyte_ci.connectors.builds.steps import java_connectors +from pipelines.airbyte_ci.connectors.build.steps import java_connectors from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/build_customization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/build_customization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/build_customization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/build_customization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/common.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/common.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/common.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py similarity index 97% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py index 49090fca9b2d6..9a584ad3b2141 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py @@ -8,7 +8,7 @@ from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.containers import java from pipelines.models.steps import StepResult, StepStatus -from pipelines.airbyte_ci.connectors.builds.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.build.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.steps.gradle import GradleTask diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/normalization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/normalization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/normalization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py index 738641c4ac2e4..f6b82453deb60 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/builds/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py @@ -6,8 +6,8 @@ from dagger import Container, Platform from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.builds.steps import build_customization -from pipelines.airbyte_ci.connectors.builds.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.build.steps import build_customization +from pipelines.airbyte_ci.connectors.build.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py index 8e85c2ef183fb..d9dd80da091ae 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py @@ -10,7 +10,7 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors -from pipelines.airbyte_ci.connectors.builds.commands import build +from pipelines.airbyte_ci.connectors.build.commands import build from pipelines.airbyte_ci.connectors.bump_version.commands import bump_version from pipelines.airbyte_ci.connectors.list.commands import list from pipelines.airbyte_ci.connectors.migrate_to_base_image.commands import migrate_to_base_image diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index 005b6ca70d5a0..47723a20e001c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -13,7 +13,7 @@ from pipelines.dagger.actions.remote_storage import upload_to_gcs from pipelines.dagger.actions.system import docker from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.builds import steps +from pipelines.airbyte_ci.connectors.build import steps from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py index 517db98175fc8..74cfad86617a3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py @@ -14,12 +14,12 @@ from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import StepResult, StepStatus -from pipelines.airbyte_ci.connectors.builds.steps.java_connectors import ( +from pipelines.airbyte_ci.connectors.build.steps.java_connectors import ( BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path, ) -from pipelines.airbyte_ci.connectors.builds.steps.normalization import BuildOrPullNormalization +from pipelines.airbyte_ci.connectors.build.steps.normalization import BuildOrPullNormalization from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests from pipelines.airbyte_ci.steps.gradle import GradleTask diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py index 58ae052cdcf7a..bfd66aacf1d76 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py @@ -15,7 +15,7 @@ from pipelines.dagger.actions import secrets from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.builds.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.build.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index 4f6da0b681a61..f36a8efc175c4 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -6,7 +6,7 @@ import pytest from pipelines.models.steps import StepStatus -from pipelines.airbyte_ci.connectors.builds.steps import build_customization, python_connectors +from pipelines.airbyte_ci.connectors.build.steps import build_customization, python_connectors from pipelines.airbyte_ci.connectors.context import ConnectorContext pytestmark = [ diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index cb813d798ce5d..593a95290e3be 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -5,7 +5,7 @@ from typing import Callable import click -import pipelines.airbyte_ci.connectors.builds.commands +import pipelines.airbyte_ci.connectors.build.commands import pipelines.airbyte_ci.connectors.commands import pipelines.airbyte_ci.connectors.publish.commands import pipelines.airbyte_ci.connectors.test.commands @@ -14,7 +14,7 @@ from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.airbyte_ci.connectors import commands as connectors_commands -from pipelines.airbyte_ci.connectors.builds import commands as connectors_build_command +from pipelines.airbyte_ci.connectors.build import commands as connectors_build_command from pipelines.airbyte_ci.connectors.publish import commands as connectors_publish_command from pipelines.airbyte_ci.connectors.test import commands as connectors_test_command from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index eb90071e9d292..da55d0144c7dc 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -5,7 +5,7 @@ import pytest from connector_ops.utils import Connector from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.builds.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.build.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.python_connectors import UnitTests From eb6dc662d67236366c72934e44217c5d5724162a Mon Sep 17 00:00:00 2001 From: Ben Church Date: Wed, 18 Oct 2023 16:14:38 -0700 Subject: [PATCH 37/38] Rename build to build_image --- airbyte-ci/connectors/pipelines/.gitignore | 1 - .../connectors/{build => build_image}/__init__.py | 0 .../connectors/{build => build_image}/commands.py | 2 +- .../connectors/{build => build_image}/pipeline.py | 0 .../connectors/{build => build_image}/steps/__init__.py | 6 +++--- .../{build => build_image}/steps/build_customization.py | 0 .../connectors/{build => build_image}/steps/common.py | 0 .../{build => build_image}/steps/java_connectors.py | 2 +- .../{build => build_image}/steps/normalization.py | 0 .../{build => build_image}/steps/python_connectors.py | 4 ++-- .../pipelines/pipelines/airbyte_ci/connectors/commands.py | 2 +- .../pipelines/airbyte_ci/connectors/publish/pipeline.py | 2 +- .../airbyte_ci/connectors/test/steps/java_connectors.py | 4 ++-- .../airbyte_ci/connectors/test/steps/python_connectors.py | 2 +- .../pipelines/pipelines/airbyte_ci/test/pipeline.py | 2 +- .../pipelines/tests/test_builds/test_python_connectors.py | 2 +- .../tests/test_commands/test_groups/test_connectors.py | 4 ++-- .../pipelines/tests/test_tests/test_python_connectors.py | 2 +- 18 files changed, 17 insertions(+), 18 deletions(-) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/__init__.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/commands.py (96%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/pipeline.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/__init__.py (88%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/build_customization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/common.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/java_connectors.py (96%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/normalization.py (100%) rename airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/{build => build_image}/steps/python_connectors.py (96%) diff --git a/airbyte-ci/connectors/pipelines/.gitignore b/airbyte-ci/connectors/pipelines/.gitignore index 956087dbf4e55..d17bbbefa1938 100644 --- a/airbyte-ci/connectors/pipelines/.gitignore +++ b/airbyte-ci/connectors/pipelines/.gitignore @@ -1,3 +1,2 @@ pipeline_reports .venv -!pipelines/airbyte_ci/connectors/build/ \ No newline at end of file diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/__init__.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/__init__.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py index a1f3d3a2afaf4..50736976e9535 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py @@ -5,7 +5,7 @@ import anyio import click from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.airbyte_ci.connectors.build.steps import run_connector_build_pipeline +from pipelines.airbyte_ci.connectors.build_image.steps import run_connector_build_pipeline from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/pipeline.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/pipeline.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/pipeline.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py similarity index 88% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py index b5243e8bbf89c..5bbc035fe1bd3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/__init__.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py @@ -10,10 +10,10 @@ import anyio from connector_ops.utils import ConnectorLanguage from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.build.steps import python_connectors -from pipelines.airbyte_ci.connectors.build.steps.common import LoadContainerToLocalDockerHost, StepStatus +from pipelines.airbyte_ci.connectors.build_image.steps import python_connectors +from pipelines.airbyte_ci.connectors.build_image.steps.common import LoadContainerToLocalDockerHost, StepStatus from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.airbyte_ci.connectors.build.steps import java_connectors +from pipelines.airbyte_ci.connectors.build_image.steps import java_connectors from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/build_customization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/build_customization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/build_customization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/build_customization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/common.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py index 9a584ad3b2141..7738884984c38 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py @@ -8,7 +8,7 @@ from pipelines.consts import LOCAL_BUILD_PLATFORM from pipelines.dagger.containers import java from pipelines.models.steps import StepResult, StepStatus -from pipelines.airbyte_ci.connectors.build.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.steps.gradle import GradleTask diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py similarity index 100% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/normalization.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py similarity index 96% rename from airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py rename to airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py index f6b82453deb60..6006b3f913fea 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py @@ -6,8 +6,8 @@ from dagger import Container, Platform from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.build.steps import build_customization -from pipelines.airbyte_ci.connectors.build.steps.common import BuildConnectorImagesBase +from pipelines.airbyte_ci.connectors.build_image.steps import build_customization +from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py index d9dd80da091ae..2f8f906c133a3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py @@ -10,7 +10,7 @@ from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors -from pipelines.airbyte_ci.connectors.build.commands import build +from pipelines.airbyte_ci.connectors.build_image.commands import build from pipelines.airbyte_ci.connectors.bump_version.commands import bump_version from pipelines.airbyte_ci.connectors.list.commands import list from pipelines.airbyte_ci.connectors.migrate_to_base_image.commands import migrate_to_base_image diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index 47723a20e001c..b90127607daa9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -13,7 +13,7 @@ from pipelines.dagger.actions.remote_storage import upload_to_gcs from pipelines.dagger.actions.system import docker from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.build import steps +from pipelines.airbyte_ci.connectors.build_image import steps from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py index 74cfad86617a3..414be8cf6bae6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py @@ -14,12 +14,12 @@ from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import StepResult, StepStatus -from pipelines.airbyte_ci.connectors.build.steps.java_connectors import ( +from pipelines.airbyte_ci.connectors.build_image.steps.java_connectors import ( BuildConnectorDistributionTar, BuildConnectorImages, dist_tar_directory_path, ) -from pipelines.airbyte_ci.connectors.build.steps.normalization import BuildOrPullNormalization +from pipelines.airbyte_ci.connectors.build_image.steps.normalization import BuildOrPullNormalization from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests from pipelines.airbyte_ci.steps.gradle import GradleTask diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py index bfd66aacf1d76..307ede47ac973 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py @@ -15,7 +15,7 @@ from pipelines.dagger.actions import secrets from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.build.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py index 68e9dbb1386d2..16a8e322d7c6d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/test/pipeline.py @@ -48,7 +48,7 @@ async def run_test(poetry_package_path: str, test_directory: str) -> bool: "/airbyte", dagger_client.host().directory( ".", - exclude=["**/__pycache__", "**/.pytest_cache", "**/.venv", "**.log", "**/build", "**/.gradle"], + exclude=["**/__pycache__", "**/.pytest_cache", "**/.venv", "**.log", "**/.gradle"], include=directories_to_mount, ), ) diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index f36a8efc175c4..aa602f2d7a1d9 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -6,7 +6,7 @@ import pytest from pipelines.models.steps import StepStatus -from pipelines.airbyte_ci.connectors.build.steps import build_customization, python_connectors +from pipelines.airbyte_ci.connectors.build_image.steps import build_customization, python_connectors from pipelines.airbyte_ci.connectors.context import ConnectorContext pytestmark = [ diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index 593a95290e3be..c080d0e92f9d5 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -5,7 +5,7 @@ from typing import Callable import click -import pipelines.airbyte_ci.connectors.build.commands +import pipelines.airbyte_ci.connectors.build_image.commands import pipelines.airbyte_ci.connectors.commands import pipelines.airbyte_ci.connectors.publish.commands import pipelines.airbyte_ci.connectors.test.commands @@ -14,7 +14,7 @@ from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.airbyte_ci.connectors import commands as connectors_commands -from pipelines.airbyte_ci.connectors.build import commands as connectors_build_command +from pipelines.airbyte_ci.connectors.build_image import commands as connectors_build_command from pipelines.airbyte_ci.connectors.publish import commands as connectors_publish_command from pipelines.airbyte_ci.connectors.test import commands as connectors_test_command from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index da55d0144c7dc..171e6ce865e7c 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -5,7 +5,7 @@ import pytest from connector_ops.utils import Connector from pipelines.models.steps import StepResult -from pipelines.airbyte_ci.connectors.build.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.python_connectors import UnitTests From aab73debbc526f3ebd46416b3d4b38cf591a20ee Mon Sep 17 00:00:00 2001 From: bnchrch Date: Wed, 18 Oct 2023 23:29:23 +0000 Subject: [PATCH 38/38] Automated Commit - Formatting Changes --- .../airbyte_ci/connectors/build_image/commands.py | 2 +- .../airbyte_ci/connectors/build_image/steps/common.py | 2 +- .../connectors/build_image/steps/java_connectors.py | 6 +++--- .../connectors/build_image/steps/normalization.py | 2 +- .../connectors/build_image/steps/python_connectors.py | 4 ++-- .../airbyte_ci/connectors/bump_version/commands.py | 2 +- .../airbyte_ci/connectors/bump_version/pipeline.py | 4 ++-- .../pipelines/airbyte_ci/connectors/commands.py | 2 +- .../pipelines/airbyte_ci/connectors/context.py | 2 +- .../connectors/migrate_to_base_image/commands.py | 2 +- .../connectors/migrate_to_base_image/pipeline.py | 4 ++-- .../pipelines/airbyte_ci/connectors/pipeline.py | 4 ++-- .../airbyte_ci/connectors/publish/commands.py | 4 ++-- .../pipelines/airbyte_ci/connectors/publish/context.py | 2 +- .../airbyte_ci/connectors/publish/pipeline.py | 6 +++--- .../pipelines/airbyte_ci/connectors/test/commands.py | 6 +++--- .../connectors/test/steps/java_connectors.py | 10 +++++----- .../connectors/test/steps/python_connectors.py | 6 +++--- .../connectors/upgrade_base_image/commands.py | 2 +- .../pipelines/airbyte_ci/metadata/commands.py | 2 +- .../pipelines/airbyte_ci/metadata/pipeline.py | 6 +++--- .../connectors/pipelines/pipelines/cli/airbyte_ci.py | 6 +++--- .../pipelines/dagger/actions/python/common.py | 2 +- .../pipelines/pipelines/dagger/actions/python/pipx.py | 2 +- .../pipelines/dagger/actions/python/poetry.py | 2 +- .../pipelines/dagger/actions/system/docker.py | 2 +- .../pipelines/dagger/containers/internal_tools.py | 2 +- .../pipelines/pipelines/dagger/containers/java.py | 2 +- .../pipelines/pipelines/dagger/containers/python.py | 2 +- .../pipelines/tests/test_actions/test_environments.py | 2 +- .../tests/test_builds/test_python_connectors.py | 2 +- .../tests/test_commands/test_groups/test_connectors.py | 2 +- airbyte-ci/connectors/pipelines/tests/test_gradle.py | 2 +- airbyte-ci/connectors/pipelines/tests/test_publish.py | 2 +- .../tests/test_steps/test_simple_docker_step.py | 2 +- .../pipelines/tests/test_tests/test_common.py | 2 +- .../tests/test_tests/test_python_connectors.py | 2 +- 37 files changed, 58 insertions(+), 58 deletions(-) diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py index 50736976e9535..0cdf469c9d9cc 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py @@ -4,10 +4,10 @@ import anyio import click -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.airbyte_ci.connectors.build_image.steps import run_connector_build_pipeline from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand @click.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py index 5bc6b90140006..267238ee23478 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py @@ -7,10 +7,10 @@ import docker from dagger import Container, ExecError, Platform, QueryError +from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.consts import BUILD_PLATFORMS from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.context import ConnectorContext class BuildConnectorImagesBase(Step, ABC): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py index 7738884984c38..704ce6fa3849d 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py @@ -5,12 +5,12 @@ from typing import List, Optional, Tuple, Union from dagger import Container, Directory, ExecError, File, Host, Platform, QueryError -from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.dagger.containers import java -from pipelines.models.steps import StepResult, StepStatus from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.steps.gradle import GradleTask +from pipelines.consts import LOCAL_BUILD_PLATFORM +from pipelines.dagger.containers import java +from pipelines.models.steps import StepResult, StepStatus class BuildConnectorDistributionTar(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py index ab395c19753f0..8461375732e10 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/normalization.py @@ -3,9 +3,9 @@ # from dagger import Platform +from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.dagger.actions.connector import normalization from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.context import ConnectorContext # TODO this class could be deleted diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py index 6006b3f913fea..8b8e8b9a18551 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/python_connectors.py @@ -4,11 +4,11 @@ from dagger import Container, Platform -from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed -from pipelines.models.steps import StepResult from pipelines.airbyte_ci.connectors.build_image.steps import build_customization from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.dagger.actions.python.common import apply_python_development_overrides, with_python_connector_installed +from pipelines.models.steps import StepResult class BuildConnectorImages(BuildConnectorImagesBase): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py index 908d4305e6cd7..1da52905c82db 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/commands.py @@ -4,10 +4,10 @@ import anyio import click -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.airbyte_ci.connectors.bump_version.pipeline import run_connector_version_bump_pipeline from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand @click.command(cls=DaggerPipelineCommand, short_help="Bump a connector version: update metadata.yaml and changelog.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py index 0d11e733b82f7..f2b7f266788a2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/bump_version/pipeline.py @@ -8,10 +8,10 @@ import semver from dagger import Container from pipelines import consts -from pipelines.helpers.connectors import metadata_change_helpers -from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport +from pipelines.helpers.connectors import metadata_change_helpers +from pipelines.models.steps import Step, StepResult, StepStatus def get_bumped_version(version: str, bump_type: str) -> str: diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py index 2f8f906c133a3..fa66dae667589 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/commands.py @@ -9,7 +9,6 @@ import click from connector_ops.utils import ConnectorLanguage, SupportLevelEnum, get_all_connectors_in_repo from pipelines import main_logger -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors from pipelines.airbyte_ci.connectors.build_image.commands import build from pipelines.airbyte_ci.connectors.bump_version.commands import bump_version from pipelines.airbyte_ci.connectors.list.commands import list @@ -17,6 +16,7 @@ from pipelines.airbyte_ci.connectors.publish.commands import publish from pipelines.airbyte_ci.connectors.test.commands import test from pipelines.airbyte_ci.connectors.upgrade_base_image.commands import upgrade_base_image +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles, get_connector_modified_files, get_modified_connectors ALL_CONNECTORS = get_all_connectors_in_repo() diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py index 56c97b1f3efba..1c7dc06155096 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py @@ -13,13 +13,13 @@ from asyncer import asyncify from dagger import Directory from github import PullRequest +from pipelines.airbyte_ci.connectors.reports import ConnectorReport from pipelines.dagger.actions import secrets from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.github import update_commit_status_check from pipelines.helpers.slack import send_message_to_webhook from pipelines.helpers.utils import METADATA_FILE_NAME from pipelines.models.contexts import PipelineContext -from pipelines.airbyte_ci.connectors.reports import ConnectorReport class ConnectorContext(PipelineContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py index d7541132f93d9..b57afc0e00059 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/commands.py @@ -4,10 +4,10 @@ import anyio import click -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.migrate_to_base_image.pipeline import run_connector_migration_to_base_image_pipeline from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand @click.command( diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py index 8c037fb28a0cd..22ad8424980a7 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/migrate_to_base_image/pipeline.py @@ -11,11 +11,11 @@ from dagger import Directory from jinja2 import Template from pipelines import consts -from pipelines.helpers.connectors import metadata_change_helpers -from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.airbyte_ci.connectors.bump_version.pipeline import AddChangelogEntry, BumpDockerImageTagInMetadata, get_bumped_version from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport +from pipelines.helpers.connectors import metadata_change_helpers +from pipelines.models.steps import Step, StepResult, StepStatus class UpgradeBaseImageMetadata(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py index 82f61a1963934..538ab673b92b9 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/pipeline.py @@ -12,13 +12,13 @@ import dagger from connector_ops.utils import ConnectorLanguage from dagger import Config +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.steps.no_op import NoOpStep from pipelines.consts import DOCKER_CLI_IMAGE, DOCKER_HOST_NAME, DOCKER_HOST_PORT, ContextState from pipelines.dagger.actions.system import docker from pipelines.helpers.utils import create_and_open_file from pipelines.models.reports import Report from pipelines.models.steps import StepResult, StepStatus -from pipelines.airbyte_ci.connectors.context import ConnectorContext -from pipelines.airbyte_ci.steps.no_op import NoOpStep GITHUB_GLOBAL_CONTEXT = "[POC please ignore] Connectors CI" GITHUB_GLOBAL_DESCRIPTION = "Running connectors tests" diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py index bd91b97cabd78..a13e4507eeabd 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/commands.py @@ -5,11 +5,11 @@ import anyio import click from pipelines import main_logger -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.consts import ContextState from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.publish.pipeline import reorder_contexts, run_connector_publish_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.consts import ContextState @click.command(cls=DaggerPipelineCommand, help="Publish all images for the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py index 241250c9a0055..8c15ff3cb32c3 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py @@ -8,11 +8,11 @@ from dagger import Secret from github import PullRequest +from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.consts import ContextState from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.helpers.gcs import sanitize_gcs_credentials from pipelines.helpers.utils import format_duration -from pipelines.airbyte_ci.connectors.context import ConnectorContext class PublishConnectorContext(ConnectorContext): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py index b90127607daa9..ffb754cf47f7e 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py @@ -10,13 +10,13 @@ from airbyte_protocol.models.airbyte_protocol import ConnectorSpecification from dagger import Container, ExecError, File, ImageLayerCompression, QueryError from pipelines import consts -from pipelines.dagger.actions.remote_storage import upload_to_gcs -from pipelines.dagger.actions.system import docker -from pipelines.models.steps import Step, StepResult, StepStatus from pipelines.airbyte_ci.connectors.build_image import steps from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext from pipelines.airbyte_ci.connectors.reports import ConnectorReport from pipelines.airbyte_ci.metadata.pipeline import MetadataUpload, MetadataValidation +from pipelines.dagger.actions.remote_storage import upload_to_gcs +from pipelines.dagger.actions.system import docker +from pipelines.models.steps import Step, StepResult, StepStatus from pydantic import ValidationError diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py index fbf650bbcf885..a8586b1dd3a8f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/commands.py @@ -7,12 +7,12 @@ import anyio import click from pipelines import main_logger -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand -from pipelines.consts import ContextState -from pipelines.helpers.github import update_global_commit_status_check_for_tests from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines from pipelines.airbyte_ci.connectors.test.steps import run_connector_test_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand +from pipelines.consts import ContextState +from pipelines.helpers.github import update_global_commit_status_check_for_tests @click.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py index 414be8cf6bae6..2d1100bc7473c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py @@ -9,11 +9,6 @@ import anyio import asyncer from dagger import Directory, File, QueryError -from pipelines.consts import LOCAL_BUILD_PLATFORM -from pipelines.dagger.actions import secrets -from pipelines.dagger.actions.system import docker -from pipelines.helpers.utils import export_container_to_tarball -from pipelines.models.steps import StepResult, StepStatus from pipelines.airbyte_ci.connectors.build_image.steps.java_connectors import ( BuildConnectorDistributionTar, BuildConnectorImages, @@ -23,6 +18,11 @@ from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests from pipelines.airbyte_ci.steps.gradle import GradleTask +from pipelines.consts import LOCAL_BUILD_PLATFORM +from pipelines.dagger.actions import secrets +from pipelines.dagger.actions.system import docker +from pipelines.helpers.utils import export_container_to_tarball +from pipelines.models.steps import StepResult, StepStatus class IntegrationTests(GradleTask): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py index 307ede47ac973..640aa4ba8ae58 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/python_connectors.py @@ -11,13 +11,13 @@ import pipelines.dagger.actions.python.common import pipelines.dagger.actions.system.docker from dagger import Container, File +from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages +from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests from pipelines.consts import LOCAL_BUILD_PLATFORM, PYPROJECT_TOML_FILE_PATH from pipelines.dagger.actions import secrets from pipelines.helpers.utils import export_container_to_tarball from pipelines.models.steps import Step, StepResult, StepStatus -from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages -from pipelines.airbyte_ci.connectors.context import ConnectorContext -from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests class CodeFormatChecks(Step): diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py index 04f29bda54b95..7c857bf617a49 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/upgrade_base_image/commands.py @@ -4,10 +4,10 @@ import anyio import click -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.migrate_to_base_image.pipeline import run_connector_base_image_upgrade_pipeline from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand @click.command(cls=DaggerPipelineCommand, short_help="Upgrades the base image version used by the selected connectors.") diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py index 9a1e5422ae992..f89aa6b42cfdf 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/commands.py @@ -4,8 +4,8 @@ import anyio import click -from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand from pipelines.airbyte_ci.metadata.pipeline import run_metadata_orchestrator_deploy_pipeline +from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand # MAIN GROUP diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py index dbe03530dd1e5..4592363b1c55c 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/metadata/pipeline.py @@ -6,6 +6,9 @@ from typing import Optional import dagger +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext +from pipelines.airbyte_ci.steps.docker import SimpleDockerStep +from pipelines.airbyte_ci.steps.poetry import PoetryRunStep from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH, INTERNAL_TOOL_PATHS from pipelines.dagger.actions.python.common import with_pip_packages from pipelines.dagger.containers.python import with_python_base @@ -13,9 +16,6 @@ from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable from pipelines.models.reports import Report from pipelines.models.steps import MountPath, Step, StepResult -from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext -from pipelines.airbyte_ci.steps.docker import SimpleDockerStep -from pipelines.airbyte_ci.steps.poetry import PoetryRunStep # STEPS diff --git a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py index a2b4970633103..ce6f9d48d209f 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py +++ b/airbyte-ci/connectors/pipelines/pipelines/cli/airbyte_ci.py @@ -10,6 +10,9 @@ import click from github import PullRequest from pipelines import main_logger +from pipelines.airbyte_ci.connectors.commands import connectors +from pipelines.airbyte_ci.metadata.commands import metadata +from pipelines.airbyte_ci.test.commands import test from pipelines.cli.telemetry import track_command from pipelines.consts import LOCAL_PIPELINE_PACKAGE_PATH, CIContext from pipelines.helpers import github @@ -21,9 +24,6 @@ get_modified_files_in_pull_request, ) from pipelines.helpers.utils import get_current_epoch_time, transform_strs_to_paths -from pipelines.airbyte_ci.connectors.commands import connectors -from pipelines.airbyte_ci.metadata.commands import metadata -from pipelines.airbyte_ci.test.commands import test # HELPERS diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py index afadd3f15c224..72f4e98722c84 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/common.py @@ -7,9 +7,9 @@ from typing import List, Optional from dagger import Container, Directory +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext from pipelines.dagger.containers.python import with_python_base, with_testing_dependencies from pipelines.helpers.utils import check_path_in_workdir, get_file_contents -from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_python_package( diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py index 3cb421cc9f66c..856ce2d566cc2 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/pipx.py @@ -5,9 +5,9 @@ from typing import List, Optional from dagger import Container +from pipelines.airbyte_ci.connectors.context import PipelineContext from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package from pipelines.dagger.actions.python.poetry import find_local_dependencies_in_pyproject_toml -from pipelines.airbyte_ci.connectors.context import PipelineContext def with_pipx(base_python_container: Container) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py index 6d2af0b2b3c80..df555f18b7505 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/python/poetry.py @@ -8,11 +8,11 @@ import toml from dagger import Container, Directory +from pipelines.airbyte_ci.connectors.context import PipelineContext from pipelines.dagger.actions.python.common import with_pip_packages, with_python_package from pipelines.dagger.actions.system.common import with_debian_packages from pipelines.dagger.containers.python import with_python_base from pipelines.helpers.utils import get_file_contents -from pipelines.airbyte_ci.connectors.context import PipelineContext async def find_local_dependencies_in_pyproject_toml( diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py index c29efdf604993..44e940dcf1da6 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/actions/system/docker.py @@ -8,9 +8,9 @@ from dagger import Client, Container, File from pipelines import consts +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext from pipelines.consts import DOCKER_HOST_NAME, DOCKER_HOST_PORT, DOCKER_TMP_VOLUME_NAME from pipelines.helpers.utils import sh_dash_c -from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_global_dockerd_service(dagger_client: Client) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py index 3531d18bc32d0..2eb4241894391 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/internal_tools.py @@ -3,10 +3,10 @@ # from dagger import Container, Secret +from pipelines.airbyte_ci.connectors.context import PipelineContext from pipelines.consts import INTERNAL_TOOL_PATHS from pipelines.dagger.actions.python.pipx import with_installed_pipx_package from pipelines.dagger.containers.python import with_python_base -from pipelines.airbyte_ci.connectors.context import PipelineContext async def with_ci_credentials(context: PipelineContext, gsm_secret: Secret) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py index 5f76a111ac3f6..d143314ce91bb 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/java.py @@ -3,11 +3,11 @@ # from dagger import CacheVolume, Container, File, Platform +from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext from pipelines.consts import AMAZONCORRETTO_IMAGE from pipelines.dagger.actions.connector.hooks import finalize_build from pipelines.dagger.actions.connector.normalization import DESTINATION_NORMALIZATION_BUILD_CONFIGURATION, with_normalization from pipelines.helpers.utils import sh_dash_c -from pipelines.airbyte_ci.connectors.context import ConnectorContext, PipelineContext def with_integration_base(context: PipelineContext, build_platform: Platform) -> Container: diff --git a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py index 0cebf299fd499..4bc137939faca 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py +++ b/airbyte-ci/connectors/pipelines/pipelines/dagger/containers/python.py @@ -3,9 +3,9 @@ # from dagger import CacheVolume, Container +from pipelines.airbyte_ci.connectors.context import PipelineContext from pipelines.consts import CONNECTOR_TESTING_REQUIREMENTS, LICENSE_SHORT_FILE_PATH, PYPROJECT_TOML_FILE_PATH from pipelines.helpers.utils import sh_dash_c -from pipelines.airbyte_ci.connectors.context import PipelineContext def with_python_base(context: PipelineContext, python_version: str = "3.10") -> Container: diff --git a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py index f1874c97a7ac8..fbfcb4391f379 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py +++ b/airbyte-ci/connectors/pipelines/tests/test_actions/test_environments.py @@ -3,8 +3,8 @@ # import pytest -from pipelines.dagger.actions.python import common from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.dagger.actions.python import common pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py index aa602f2d7a1d9..96d6fc79807ef 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py @@ -5,9 +5,9 @@ from pathlib import Path import pytest -from pipelines.models.steps import StepStatus from pipelines.airbyte_ci.connectors.build_image.steps import build_customization, python_connectors from pipelines.airbyte_ci.connectors.context import ConnectorContext +from pipelines.models.steps import StepStatus pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py index c080d0e92f9d5..aa38aa5efa612 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_commands/test_groups/test_connectors.py @@ -12,11 +12,11 @@ import pytest from click.testing import CliRunner from connector_ops.utils import METADATA_FILE_NAME, ConnectorLanguage -from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.airbyte_ci.connectors import commands as connectors_commands from pipelines.airbyte_ci.connectors.build_image import commands as connectors_build_command from pipelines.airbyte_ci.connectors.publish import commands as connectors_publish_command from pipelines.airbyte_ci.connectors.test import commands as connectors_test_command +from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from tests.utils import pick_a_random_connector diff --git a/airbyte-ci/connectors/pipelines/tests/test_gradle.py b/airbyte-ci/connectors/pipelines/tests/test_gradle.py index a665731b61817..82f47f803117e 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_gradle.py +++ b/airbyte-ci/connectors/pipelines/tests/test_gradle.py @@ -6,8 +6,8 @@ import pipelines.helpers.connectors.modifed import pytest -from pipelines.models import steps from pipelines.airbyte_ci.steps import gradle +from pipelines.models import steps pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_publish.py b/airbyte-ci/connectors/pipelines/tests/test_publish.py index 0b2b34632bd21..e2b7bac964ab1 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_publish.py +++ b/airbyte-ci/connectors/pipelines/tests/test_publish.py @@ -8,8 +8,8 @@ import anyio import pytest -from pipelines.models.steps import StepStatus from pipelines.airbyte_ci.connectors.publish import pipeline as publish_pipeline +from pipelines.models.steps import StepStatus pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py index 6ee9b689dbd65..a4b23cbaea51a 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py +++ b/airbyte-ci/connectors/pipelines/tests/test_steps/test_simple_docker_step.py @@ -5,10 +5,10 @@ from pathlib import Path import pytest +from pipelines.airbyte_ci.steps.docker import SimpleDockerStep from pipelines.helpers.utils import get_exec_result from pipelines.models.contexts import PipelineContext from pipelines.models.steps import MountPath -from pipelines.airbyte_ci.steps.docker import SimpleDockerStep pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py index 4f56265d9ccf4..075a286444b57 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_common.py @@ -11,10 +11,10 @@ import pytest import yaml from freezegun import freeze_time +from pipelines.airbyte_ci.connectors.test.steps import common from pipelines.dagger.actions.system import docker from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles from pipelines.models.steps import StepStatus -from pipelines.airbyte_ci.connectors.test.steps import common pytestmark = [ pytest.mark.anyio, diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py index 171e6ce865e7c..b2467ecbe3128 100644 --- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py +++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py @@ -4,10 +4,10 @@ import pytest from connector_ops.utils import Connector -from pipelines.models.steps import StepResult from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages from pipelines.airbyte_ci.connectors.context import ConnectorContext from pipelines.airbyte_ci.connectors.test.steps.python_connectors import UnitTests +from pipelines.models.steps import StepResult pytestmark = [ pytest.mark.anyio,