diff --git a/.github/workflows/publish_connectors.yml b/.github/workflows/publish_connectors.yml
index ded1f9fb11ae..16b84b191cc3 100644
--- a/.github/workflows/publish_connectors.yml
+++ b/.github/workflows/publish_connectors.yml
@@ -18,6 +18,10 @@ on:
type: string
default: ci-runner-connector-publish-large-dagger-0-6-4
required: true
+ airbyte-ci-binary-url:
+ description: "URL to airbyte-ci binary"
+ required: false
+ default: https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci
jobs:
publish_connectors:
name: Publish connectors
@@ -62,6 +66,7 @@ jobs:
s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }}
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
subcommand: "connectors ${{ github.event.inputs.connectors-options }} publish ${{ github.event.inputs.publish-options }}"
+ airbyte_ci_binary_url: ${{ github.event.inputs.airbyte-ci-binary-url }}
set-instatus-incident-on-failure:
name: Create Instatus Incident on Failure
diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md
index a0adab57d120..abae5c2fb1b5 100644
--- a/airbyte-ci/connectors/pipelines/README.md
+++ b/airbyte-ci/connectors/pipelines/README.md
@@ -124,18 +124,17 @@ At this point you can run `airbyte-ci` commands.
#### Options
-| Option | Default value | Mapped environment variable | Description |
-| ------------------------------------------ | ---------------------------------------------------------------------------------------------- | ----------------------------- | ------------------------------------------------------------------------------------------- |
-| `--enable-dagger-run/--disable-dagger-run` | `--enable-dagger-run`` | | Disables the Dagger terminal UI. | | |
-| `--enable-auto-update/--disable-auto-update` | `--enable-auto-update`` | | Disables the auto update prompt | | |
-| `--is-local/--is-ci` | `--is-local` | | Determines the environment in which the CLI runs: local environment or CI environment. |
-| `--git-branch` | The checked out git branch name | `CI_GIT_BRANCH` | The git branch on which the pipelines will run. |
-| `--git-revision` | The current branch head | `CI_GIT_REVISION` | The commit hash on which the pipelines will run. |
-| `--diffed-branch` | `origin/master` | | Branch to which the git diff will happen to detect new or modified files. |
-| `--gha-workflow-run-id` | | | GHA CI only - The run id of the GitHub action workflow |
-| `--ci-context` | `manual` | | The current CI context: `manual` for manual run, `pull_request`, `nightly_builds`, `master` |
-| `--pipeline-start-timestamp` | Current epoch time | `CI_PIPELINE_START_TIMESTAMP` | Start time of the pipeline as epoch time. Used for pipeline run duration computation. |
-| `--show-dagger-logs/--hide-dagger-logs` | `--hide-dagger-logs` | | Flag to show or hide the dagger logs. |
+| Option | Default value | Mapped environment variable | Description |
+| ------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------- | ----------------------------- | ------------------------------------------------------------------------------------------- |
+| `--enable-dagger-run/--disable-dagger-run` | `--enable-dagger-run`` | | Disables the Dagger terminal UI. | | | | |
+| `--is-local/--is-ci` | `--is-local` | | Determines the environment in which the CLI runs: local environment or CI environment. |
+| `--git-branch` | The checked out git branch name | `CI_GIT_BRANCH` | The git branch on which the pipelines will run. |
+| `--git-revision` | The current branch head | `CI_GIT_REVISION` | The commit hash on which the pipelines will run. |
+| `--diffed-branch` | `origin/master` | | Branch to which the git diff will happen to detect new or modified files. |
+| `--gha-workflow-run-id` | | | GHA CI only - The run id of the GitHub action workflow |
+| `--ci-context` | `manual` | | The current CI context: `manual` for manual run, `pull_request`, `nightly_builds`, `master` |
+| `--pipeline-start-timestamp` | Current epoch time | `CI_PIPELINE_START_TIMESTAMP` | Start time of the pipeline as epoch time. Used for pipeline run duration computation. |
+| `--show-dagger-logs/--hide-dagger-logs` | `--hide-dagger-logs` | | Flag to show or hide the dagger logs. |
### `connectors` command subgroup
@@ -255,6 +254,9 @@ It's mainly purposed for local use.
Build a single connector:
`airbyte-ci connectors --name=source-pokeapi build`
+Build a single connector for multiple architectures:
+`airbyte-ci connectors --name=source-pokeapi build --architecture=linux/amd64 --architecture=linux/arm64`
+
Build multiple connectors:
`airbyte-ci connectors --name=source-pokeapi --name=source-bigquery build`
@@ -291,11 +293,17 @@ flowchart TD
distTar-->connector
normalization--"if supports normalization"-->connector
- load[Load to docker host with :dev tag, current platform]
+ load[Load to docker host with :dev tag]
spec[Get spec]
connector-->spec--"if success"-->load
```
+### Options
+
+| Option | Multiple | Default value | Description |
+| --------------------- | -------- | -------------- | ----------------------------------------------------------------- |
+| `--architecture`/`-a` | True | Local platform | Defines for which architecture the connector image will be built. |
+
### `connectors publish` command
Run a publish pipeline for one or multiple connectors.
It's mainly purposed for CI use to release a connector update.
@@ -434,6 +442,7 @@ This command runs the Python tests for a airbyte-ci poetry package.
## Changelog
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
+| 2.9.0 | [#32816](https://github.com/airbytehq/airbyte/pull/32816) | Add `--architecture` option to connector build. |
| 2.8.0 | [#31930](https://github.com/airbytehq/airbyte/pull/31930) | Move pipx install to `airbyte-ci-dev`, and add auto-update feature targeting binary |
| 2.7.3 | [#32847](https://github.com/airbytehq/airbyte/pull/32847) | Improve --modified behaviour for pull requests. |
| 2.7.2 | [#32839](https://github.com/airbytehq/airbyte/pull/32839) | Revert changes in v2.7.1. |
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py
index c15260489392..3c4e04c6028b 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/commands.py
@@ -2,11 +2,16 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
+from typing import List
+
import asyncclick as click
+import dagger
+from pipelines import main_logger
from pipelines.airbyte_ci.connectors.build_image.steps import run_connector_build_pipeline
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines
from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
+from pipelines.consts import BUILD_PLATFORMS, LOCAL_BUILD_PLATFORM
@click.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.")
@@ -17,10 +22,20 @@
default=False,
type=bool,
)
+@click.option(
+ "-a",
+ "--architecture",
+ "build_architectures",
+ help="Architecture for which to build the connector image. If not specified, the image will be built for the local architecture.",
+ multiple=True,
+ default=[LOCAL_BUILD_PLATFORM],
+ type=click.Choice(BUILD_PLATFORMS, case_sensitive=True),
+)
@click.pass_context
-async def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool:
+async def build(ctx: click.Context, use_host_gradle_dist_tar: bool, build_architectures: List[str]) -> bool:
"""Runs a build pipeline for the selected connectors."""
-
+ build_platforms = [dagger.Platform(architecture) for architecture in build_architectures]
+ main_logger.info(f"Building connectors for {build_platforms}, use --architecture to change this.")
connectors_contexts = [
ConnectorContext(
pipeline_name=f"Build connector {connector.technical_name}",
@@ -41,6 +56,7 @@ async def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool:
use_host_gradle_dist_tar=use_host_gradle_dist_tar,
s3_build_cache_access_key_id=ctx.obj.get("s3_build_cache_access_key_id"),
s3_build_cache_secret_key=ctx.obj.get("s3_build_cache_secret_key"),
+ targeted_platforms=build_platforms,
)
for connector in ctx.obj["selected_connectors_with_modified_files"]
]
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
index 5bbc035fe1bd..e18a6e3d38f6 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/__init__.py
@@ -5,17 +5,13 @@
from __future__ import annotations
-import platform
-
import anyio
from connector_ops.utils import ConnectorLanguage
-from pipelines.models.steps import StepResult
-from pipelines.airbyte_ci.connectors.build_image.steps import python_connectors
+from pipelines.airbyte_ci.connectors.build_image.steps import java_connectors, python_connectors
from pipelines.airbyte_ci.connectors.build_image.steps.common import LoadContainerToLocalDockerHost, StepStatus
-from pipelines.consts import LOCAL_BUILD_PLATFORM
-from pipelines.airbyte_ci.connectors.build_image.steps import java_connectors
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
+from pipelines.models.steps import StepResult
class NoBuildStepForLanguageError(Exception):
@@ -41,7 +37,7 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
Args:
context (ConnectorContext): The initialized connector context.
-
+ semaphore (anyio.Semaphore): The semaphore to use to limit the number of concurrent builds.
Returns:
ConnectorReport: The reports holding builds results.
"""
@@ -49,9 +45,10 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
async with semaphore:
async with context:
build_result = await run_connector_build(context)
+ per_platform_built_containers = build_result.output_artifact
step_results.append(build_result)
if context.is_local and build_result.status is StepStatus.SUCCESS:
- load_image_result = await LoadContainerToLocalDockerHost(context, LOCAL_BUILD_PLATFORM, build_result.output_artifact).run()
+ load_image_result = await LoadContainerToLocalDockerHost(context, per_platform_built_containers).run()
step_results.append(load_image_result)
context.report = ConnectorReport(context, step_results, name="BUILD RESULTS")
return context.report
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
index 267238ee2347..eed5ed96c292 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/common.py
@@ -2,14 +2,14 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
+import json
from abc import ABC
from typing import List, Tuple
import docker
from dagger import Container, ExecError, Platform, QueryError
from pipelines.airbyte_ci.connectors.context import ConnectorContext
-from pipelines.consts import BUILD_PLATFORMS
-from pipelines.helpers.utils import export_container_to_tarball
+from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.models.steps import Step, StepResult, StepStatus
@@ -22,8 +22,8 @@ class BuildConnectorImagesBase(Step, ABC):
def title(self):
return f"Build {self.context.connector.technical_name} docker image for platform(s) {', '.join(self.build_platforms)}"
- def __init__(self, context: ConnectorContext, *build_platforms: List[Platform]) -> None:
- self.build_platforms = build_platforms if build_platforms else BUILD_PLATFORMS
+ def __init__(self, context: ConnectorContext) -> None:
+ self.build_platforms: List[Platform] = context.targeted_platforms
super().__init__(context)
async def _run(self, *args) -> StepResult:
@@ -58,26 +58,40 @@ async def _build_connector(self, platform: Platform, *args) -> Container:
class LoadContainerToLocalDockerHost(Step):
IMAGE_TAG = "dev"
- def __init__(self, context: ConnectorContext, platform: Platform, containers: dict[Platform, Container]) -> None:
+ def __init__(self, context: ConnectorContext, containers: dict[Platform, Container]) -> None:
super().__init__(context)
- self.platform = platform
- self.container = containers[platform]
+ self.containers = containers
@property
def title(self):
- return f"Load {self.image_name}:{self.IMAGE_TAG} for platform {self.platform} to the local docker host."
+ return f"Load {self.image_name}:{self.IMAGE_TAG} to the local docker host."
@property
def image_name(self) -> Tuple:
return f"airbyte/{self.context.connector.technical_name}"
async def _run(self) -> StepResult:
- _, exported_tarball_path = await export_container_to_tarball(self.context, self.container)
- client = docker.from_env()
+ container_variants = list(self.containers.values())
+ _, exported_tar_path = await export_containers_to_tarball(self.context, container_variants)
+ if not exported_tar_path:
+ return StepResult(
+ self,
+ StepStatus.FAILURE,
+ stderr=f"Failed to export the connector image {self.image_name}:{self.IMAGE_TAG} to a tarball.",
+ )
try:
- with open(exported_tarball_path, "rb") as tarball_content:
- new_image = client.images.load(tarball_content.read())[0]
- new_image.tag(self.image_name, tag=self.IMAGE_TAG)
- return StepResult(self, StepStatus.SUCCESS)
- except ConnectionError:
- return StepResult(self, StepStatus.FAILURE, stderr="The connection to the local docker host failed.")
+ client = docker.from_env()
+ response = client.api.import_image_from_file(str(exported_tar_path), repository=self.image_name, tag=self.IMAGE_TAG)
+ try:
+ image_sha = json.loads(response)["status"]
+ except (json.JSONDecodeError, KeyError):
+ return StepResult(
+ self,
+ StepStatus.FAILURE,
+ stderr=f"Failed to import the connector image {self.image_name}:{self.IMAGE_TAG} to your Docker host: {response}",
+ )
+ return StepResult(
+ self, StepStatus.SUCCESS, stdout=f"Loaded image {self.image_name}:{self.IMAGE_TAG} to your Docker host ({image_sha})."
+ )
+ except docker.errors.DockerException as e:
+ return StepResult(self, StepStatus.FAILURE, stderr=f"Something went wrong while interacting with the local docker client: {e}")
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
index 704ce6fa3849..f8a4c7ed0d61 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/build_image/steps/java_connectors.py
@@ -2,13 +2,10 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
-from typing import List, Optional, Tuple, Union
-
-from dagger import Container, Directory, ExecError, File, Host, Platform, QueryError
+from dagger import Container, Directory, File, Platform, QueryError
from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.steps.gradle import GradleTask
-from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.containers import java
from pipelines.models.steps import StepResult, StepStatus
@@ -56,7 +53,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
# Special case: use a local dist tar to speed up local development.
dist_dir = await context.dagger_client.host().directory(dist_tar_directory_path(context), include=["*.tar"])
# Speed things up by only building for the local platform.
- return await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_dir)
+ return await BuildConnectorImages(context).run(dist_dir)
# Default case: distribution tar is built by the dagger pipeline.
build_connector_tar_result = await BuildConnectorDistributionTar(context).run()
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py
index 8a2115a41135..3553cafa8ac1 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/context.py
@@ -6,14 +6,15 @@
from datetime import datetime
from types import TracebackType
-from typing import Optional
+from typing import Iterable, Optional
import yaml
from anyio import Path
from asyncer import asyncify
-from dagger import Directory, Secret
+from dagger import Directory, Platform, Secret
from github import PullRequest
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
+from pipelines.consts import BUILD_PLATFORMS
from pipelines.dagger.actions import secrets
from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles
from pipelines.helpers.github import update_commit_status_check
@@ -60,6 +61,7 @@ def __init__(
s3_build_cache_access_key_id: Optional[str] = None,
s3_build_cache_secret_key: Optional[str] = None,
concurrent_cat: Optional[bool] = False,
+ targeted_platforms: Optional[Iterable[Platform]] = BUILD_PLATFORMS,
):
"""Initialize a connector context.
@@ -88,6 +90,7 @@ def __init__(
s3_build_cache_access_key_id (Optional[str], optional): Gradle S3 Build Cache credentials. Defaults to None.
s3_build_cache_secret_key (Optional[str], optional): Gradle S3 Build Cache credentials. Defaults to None.
concurrent_cat (bool, optional): Whether to run the CAT tests in parallel. Defaults to False.
+ targeted_platforms (Optional[Iterable[Platform]], optional): The platforms to build the connector image for. Defaults to BUILD_PLATFORMS.
"""
self.pipeline_name = pipeline_name
@@ -110,6 +113,7 @@ def __init__(
self.s3_build_cache_access_key_id = s3_build_cache_access_key_id
self.s3_build_cache_secret_key = s3_build_cache_secret_key
self.concurrent_cat = concurrent_cat
+ self.targeted_platforms = targeted_platforms
super().__init__(
pipeline_name=pipeline_name,
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
index ffb754cf47f7..2a5908bdb150 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/pipeline.py
@@ -205,7 +205,7 @@ async def _run(self, built_connector: Container) -> StepResult:
return StepResult(self, status=StepStatus.SUCCESS, stdout="Uploaded connector spec to spec cache bucket.")
-## Pipeline
+# Pipeline
async def run_connector_publish_pipeline(context: PublishConnectorContext, semaphore: anyio.Semaphore) -> ConnectorReport:
diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
index ca334022e10f..f3393e0b18f0 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/test/steps/java_connectors.py
@@ -21,7 +21,7 @@
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions import secrets
from pipelines.dagger.actions.system import docker
-from pipelines.helpers.utils import export_container_to_tarball
+from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.models.steps import StepResult, StepStatus
@@ -101,15 +101,15 @@ async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[Step
context.logger.info(f"This connector supports normalization: will build {normalization_image}.")
build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run()
normalization_container = build_normalization_results.output_artifact
- normalization_tar_file, _ = await export_container_to_tarball(
- context, normalization_container, tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar"
+ normalization_tar_file, _ = await export_containers_to_tarball(
+ context, [normalization_container], tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar"
)
step_results.append(build_normalization_results)
else:
normalization_tar_file = None
connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM]
- connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container)
+ connector_image_tar_file, _ = await export_containers_to_tarball(context, [connector_container])
async with asyncer.create_task_group() as docker_build_dependent_group:
soon_integration_tests_results = docker_build_dependent_group.soonify(IntegrationTests(context).run)(
diff --git a/airbyte-ci/connectors/pipelines/pipelines/consts.py b/airbyte-ci/connectors/pipelines/pipelines/consts.py
index 4ccaf7522644..ba1db767c0dd 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/consts.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/consts.py
@@ -2,7 +2,6 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
-import os
import platform
from enum import Enum
@@ -20,7 +19,7 @@
"pytest-custom_exit_code",
]
-BUILD_PLATFORMS = [Platform("linux/amd64"), Platform("linux/arm64")]
+BUILD_PLATFORMS = (Platform("linux/amd64"), Platform("linux/arm64"))
PLATFORM_MACHINE_TO_DAGGER_PLATFORM = {
"x86_64": Platform("linux/amd64"),
@@ -28,7 +27,8 @@
"aarch64": Platform("linux/amd64"),
"amd64": Platform("linux/amd64"),
}
-LOCAL_BUILD_PLATFORM = PLATFORM_MACHINE_TO_DAGGER_PLATFORM[platform.machine()]
+LOCAL_MACHINE_TYPE = platform.machine()
+LOCAL_BUILD_PLATFORM = PLATFORM_MACHINE_TO_DAGGER_PLATFORM[LOCAL_MACHINE_TYPE]
AMAZONCORRETTO_IMAGE = "amazoncorretto:17.0.8-al2023"
NODE_IMAGE = "node:18.18.0-slim"
GO_IMAGE = "golang:1.17"
diff --git a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
index d2709257e449..509ff15e423c 100644
--- a/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
+++ b/airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
@@ -267,31 +267,31 @@ async def execute_concurrently(steps: List[Callable], concurrency=5):
return [task.value for task in tasks]
-async def export_container_to_tarball(
- context: ConnectorContext, container: Container, tar_file_name: Optional[str] = None
+async def export_containers_to_tarball(
+ context: ConnectorContext, container_variants: List[Container], tar_file_name: Optional[str] = None
) -> Tuple[Optional[File], Optional[Path]]:
"""Save the container image to the host filesystem as a tar archive.
- Exporting a container image as a tar archive allows user to have a dagger built container image available on their host filesystem.
- They can load this tar file to their main docker host with 'docker load'.
- This mechanism is also used to share dagger built containers with other steps like AcceptanceTest that have their own dockerd service.
- We 'docker load' this tar file to AcceptanceTest's docker host to make sure the container under test image is available for testing.
+ Exports a list of container variants to a tarball file.
+ The list of container variants should be platform/os specific variants of the same container image.
+ The tarball file is saved to the host filesystem in the directory specified by the host_image_export_dir_path attribute of the context.
+
+ Args:
+ context (ConnectorContext): The current connector context.
+ container_variants (List[Container]): The list of container variants to export.
+ tar_file_name (Optional[str], optional): The name of the tar archive file. Defaults to None.
Returns:
Tuple[Optional[File], Optional[Path]]: A tuple with the file object holding the tar archive on the host and its path.
"""
- if tar_file_name is None:
- tar_file_name = f"{context.connector.technical_name}_{context.git_revision}.tar"
- tar_file_name = slugify(tar_file_name)
+ tar_file_name = f"{slugify(context.connector.technical_name)}_{context.git_revision}.tar" if tar_file_name is None else tar_file_name
local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}")
- export_success = await container.export(str(local_path), forced_compression=ImageLayerCompression.Gzip)
+ export_success = await context.dagger_client.container().export(
+ str(local_path), platform_variants=container_variants, forced_compression=ImageLayerCompression.Gzip
+ )
if export_success:
- exported_file = (
- context.dagger_client.host().directory(context.host_image_export_dir_path, include=[tar_file_name]).file(tar_file_name)
- )
- return exported_file, local_path
- else:
- return None, None
+ return context.dagger_client.host().file(str(local_path)), local_path
+ return None, None
def format_duration(time_delta: datetime.timedelta) -> str:
diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml
index 13c7814009a9..e15e04b45a1d 100644
--- a/airbyte-ci/connectors/pipelines/pyproject.toml
+++ b/airbyte-ci/connectors/pipelines/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "pipelines"
-version = "2.8.0"
+version = "2.9.0"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte "]
diff --git a/airbyte-ci/connectors/pipelines/pytest.ini b/airbyte-ci/connectors/pipelines/pytest.ini
index 0bd08b038c23..b228671b5fa2 100644
--- a/airbyte-ci/connectors/pipelines/pytest.ini
+++ b/airbyte-ci/connectors/pipelines/pytest.ini
@@ -1,2 +1,4 @@
[pytest]
addopts = --cov=pipelines
+markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
diff --git a/airbyte-ci/connectors/pipelines/tests/test_build_image/__init__.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/__init__.py
new file mode 100644
index 000000000000..c941b3045795
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/tests/test_build_image/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/dummy_build_customization.py
similarity index 100%
rename from airbyte-ci/connectors/pipelines/tests/test_builds/dummy_build_customization.py
rename to airbyte-ci/connectors/pipelines/tests/test_build_image/dummy_build_customization.py
diff --git a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
similarity index 80%
rename from airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py
rename to airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
index 96d6fc79807e..bb8ac23a10ea 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_builds/test_python_connectors.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_python_connectors.py
@@ -7,6 +7,7 @@
import pytest
from pipelines.airbyte_ci.connectors.build_image.steps import build_customization, python_connectors
from pipelines.airbyte_ci.connectors.context import ConnectorContext
+from pipelines.consts import BUILD_PLATFORMS
from pipelines.models.steps import StepStatus
pytestmark = [
@@ -15,9 +16,13 @@
class TestBuildConnectorImage:
+ @pytest.fixture
+ def all_platforms(self):
+ return BUILD_PLATFORMS
+
@pytest.fixture
def test_context(self, mocker):
- return mocker.Mock(secrets_to_mask=[])
+ return mocker.Mock(secrets_to_mask=[], targeted_platforms=BUILD_PLATFORMS)
@pytest.fixture
def test_context_with_connector_with_base_image(self, test_context):
@@ -45,7 +50,9 @@ def connector_with_base_image_with_build_customization(self, connector_with_base
(connector_with_base_image_no_build_customization.code_directory / "build_customization.py").unlink()
@pytest.fixture
- def test_context_with_real_connector_using_base_image(self, connector_with_base_image_no_build_customization, dagger_client):
+ def test_context_with_real_connector_using_base_image(
+ self, connector_with_base_image_no_build_customization, dagger_client, current_platform
+ ):
context = ConnectorContext(
pipeline_name="test build",
connector=connector_with_base_image_no_build_customization,
@@ -54,13 +61,14 @@ def test_context_with_real_connector_using_base_image(self, connector_with_base_
report_output_prefix="test",
is_local=True,
use_remote_secrets=True,
+ targeted_platforms=[current_platform],
)
context.dagger_client = dagger_client
return context
@pytest.fixture
def test_context_with_real_connector_using_base_image_with_build_customization(
- self, connector_with_base_image_with_build_customization, dagger_client
+ self, connector_with_base_image_with_build_customization, dagger_client, current_platform
):
context = ConnectorContext(
pipeline_name="test build",
@@ -70,6 +78,7 @@ def test_context_with_real_connector_using_base_image_with_build_customization(
report_output_prefix="test",
is_local=True,
use_remote_secrets=True,
+ targeted_platforms=[current_platform],
)
context.dagger_client = dagger_client
return context
@@ -82,7 +91,7 @@ def connector_without_base_image(self, all_connectors):
pytest.skip("No connector without a connectorBuildOptions.baseImage metadata found")
@pytest.fixture
- def test_context_with_real_connector_without_base_image(self, connector_without_base_image, dagger_client):
+ def test_context_with_real_connector_without_base_image(self, connector_without_base_image, dagger_client, current_platform):
context = ConnectorContext(
pipeline_name="test build",
connector=connector_without_base_image,
@@ -91,25 +100,28 @@ def test_context_with_real_connector_without_base_image(self, connector_without_
report_output_prefix="test",
is_local=True,
use_remote_secrets=True,
+ targeted_platforms=[current_platform],
)
context.dagger_client = dagger_client
return context
- async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_connector_with_base_image, current_platform):
+ async def test__run_using_base_image_with_mocks(self, mocker, test_context_with_connector_with_base_image, all_platforms):
container_built_from_base = mocker.AsyncMock()
mocker.patch.object(
python_connectors.BuildConnectorImages, "_build_from_base_image", mocker.AsyncMock(return_value=container_built_from_base)
)
mocker.patch.object(python_connectors.BuildConnectorImages, "get_step_result", mocker.AsyncMock())
- step = python_connectors.BuildConnectorImages(test_context_with_connector_with_base_image, current_platform)
+ step = python_connectors.BuildConnectorImages(test_context_with_connector_with_base_image)
step_result = await step._run()
- step._build_from_base_image.assert_called_once()
- container_built_from_base.with_exec.assert_called_once_with(["spec"])
+ assert step._build_from_base_image.call_count == len(all_platforms)
+ container_built_from_base.with_exec.assert_called_with(["spec"])
assert step_result.status is StepStatus.SUCCESS
- assert step_result.output_artifact[current_platform] == container_built_from_base
+ for platform in all_platforms:
+ assert step_result.output_artifact[platform] == container_built_from_base
+ @pytest.mark.slow
async def test_building_from_base_image_for_real(self, test_context_with_real_connector_using_base_image, current_platform):
- step = python_connectors.BuildConnectorImages(test_context_with_real_connector_using_base_image, current_platform)
+ step = python_connectors.BuildConnectorImages(test_context_with_real_connector_using_base_image)
step_result = await step._run()
step_result.status is StepStatus.SUCCESS
built_container = step_result.output_artifact[current_platform]
@@ -127,31 +139,31 @@ async def test_building_from_base_image_for_real(self, test_context_with_real_co
== test_context_with_real_connector_using_base_image.connector.metadata["dockerRepository"]
)
+ @pytest.mark.slow
async def test_building_from_base_image_with_customization_for_real(
self, test_context_with_real_connector_using_base_image_with_build_customization, current_platform
):
- step = python_connectors.BuildConnectorImages(
- test_context_with_real_connector_using_base_image_with_build_customization, current_platform
- )
+ step = python_connectors.BuildConnectorImages(test_context_with_real_connector_using_base_image_with_build_customization)
step_result = await step._run()
step_result.status is StepStatus.SUCCESS
built_container = step_result.output_artifact[current_platform]
assert await built_container.env_variable("MY_PRE_BUILD_ENV_VAR") == "my_pre_build_env_var_value"
assert await built_container.env_variable("MY_POST_BUILD_ENV_VAR") == "my_post_build_env_var_value"
- async def test__run_using_base_dockerfile_with_mocks(self, mocker, test_context_with_connector_without_base_image, current_platform):
+ async def test__run_using_base_dockerfile_with_mocks(self, mocker, test_context_with_connector_without_base_image, all_platforms):
container_built_from_dockerfile = mocker.AsyncMock()
mocker.patch.object(
python_connectors.BuildConnectorImages, "_build_from_dockerfile", mocker.AsyncMock(return_value=container_built_from_dockerfile)
)
- step = python_connectors.BuildConnectorImages(test_context_with_connector_without_base_image, current_platform)
+ step = python_connectors.BuildConnectorImages(test_context_with_connector_without_base_image)
step_result = await step._run()
- step._build_from_dockerfile.assert_called_once()
- container_built_from_dockerfile.with_exec.assert_called_once_with(["spec"])
+ assert step._build_from_dockerfile.call_count == len(all_platforms)
+ container_built_from_dockerfile.with_exec.assert_called_with(["spec"])
assert step_result.status is StepStatus.SUCCESS
- assert step_result.output_artifact[current_platform] == container_built_from_dockerfile
+ for platform in all_platforms:
+ assert step_result.output_artifact[platform] == container_built_from_dockerfile
- async def test_building_from_dockerfile_for_real(self, test_context_with_real_connector_without_base_image, current_platform):
- step = python_connectors.BuildConnectorImages(test_context_with_real_connector_without_base_image, current_platform)
+ async def test_building_from_dockerfile_for_real(self, test_context_with_real_connector_without_base_image):
+ step = python_connectors.BuildConnectorImages(test_context_with_real_connector_without_base_image)
step_result = await step._run()
step_result.status is StepStatus.SUCCESS
diff --git a/airbyte-ci/connectors/pipelines/tests/test_build_image/test_steps/test_common.py b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_steps/test_common.py
new file mode 100644
index 000000000000..aabef3803374
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/tests/test_build_image/test_steps/test_common.py
@@ -0,0 +1,89 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
+
+import os
+from typing import Dict
+
+import dagger
+import docker
+import pytest
+from pipelines.airbyte_ci.connectors.build_image.steps import common
+from pipelines.consts import BUILD_PLATFORMS
+from pipelines.models.steps import StepStatus
+
+pytestmark = [
+ pytest.mark.anyio,
+]
+
+
+@pytest.mark.slow
+class TestLoadContainerToLocalDockerHost:
+ @pytest.fixture(scope="class")
+ def certified_connector(self, all_connectors):
+ for connector in all_connectors:
+ if connector.support_level == "certified":
+ return connector
+ pytest.skip("No certified connector found")
+
+ @pytest.fixture
+ def built_containers(self, dagger_client, certified_connector) -> Dict[dagger.Platform, dagger.Container]:
+ return {
+ platform: dagger_client.container(platform=platform).from_(f'{certified_connector.metadata["dockerRepository"]}:latest')
+ for platform in BUILD_PLATFORMS
+ }
+
+ @pytest.fixture
+ def test_context(self, mocker, dagger_client, certified_connector, tmp_path):
+ return mocker.Mock(
+ secrets_to_mask=[], dagger_client=dagger_client, connector=certified_connector, host_image_export_dir_path=tmp_path
+ )
+
+ @pytest.fixture
+ def step(self, test_context, built_containers):
+ return common.LoadContainerToLocalDockerHost(test_context, built_containers)
+
+ @pytest.fixture
+ def bad_docker_host(self):
+ original_docker_host = os.environ.get("DOCKER_HOST")
+ yield "tcp://localhost:9999"
+ if original_docker_host:
+ os.environ["DOCKER_HOST"] = original_docker_host
+ else:
+ del os.environ["DOCKER_HOST"]
+
+ async def test_run(self, test_context, step):
+ """Test that the step runs successfully and that the image is loaded in the local docker host."""
+ docker_client = docker.from_env()
+ step.IMAGE_TAG = "test-load-container"
+ try:
+ docker_client.images.remove(f"{test_context.connector.metadata['dockerRepository']}:{step.IMAGE_TAG}")
+ except docker.errors.ImageNotFound:
+ pass
+ result = await step.run()
+ assert result.status is StepStatus.SUCCESS
+ docker_client.images.get(f"{test_context.connector.metadata['dockerRepository']}:{step.IMAGE_TAG}")
+ docker_client.images.remove(f"{test_context.connector.metadata['dockerRepository']}:{step.IMAGE_TAG}")
+
+ async def test_run_export_failure(self, step, mocker):
+ """Test that the step fails if the export of the container fails."""
+ mocker.patch.object(common, "export_containers_to_tarball", return_value=(None, None))
+ result = await step.run()
+ assert result.status is StepStatus.FAILURE
+ assert "Failed to export the connector image" in result.stderr
+
+ async def test_run_connection_error(self, step, bad_docker_host):
+ """Test that the step fails if the connection to the docker host fails."""
+ os.environ["DOCKER_HOST"] = bad_docker_host
+ result = await step.run()
+ assert result.status is StepStatus.FAILURE
+ assert "Something went wrong while interacting with the local docker client" in result.stderr
+
+ async def test_run_import_failure(self, step, mocker):
+ """Test that the step fails if the docker import of the tar fails."""
+ mock_docker_client = mocker.MagicMock()
+ mock_docker_client.api.import_image_from_file.return_value = "bad response"
+ mocker.patch.object(common.docker, "from_env", return_value=mock_docker_client)
+ result = await step.run()
+ assert result.status is StepStatus.FAILURE
+ assert "Failed to import the connector image" in result.stderr
diff --git a/airbyte-ci/connectors/pipelines/tests/test_helpers/__init__.py b/airbyte-ci/connectors/pipelines/tests/test_helpers/__init__.py
new file mode 100644
index 000000000000..c941b3045795
--- /dev/null
+++ b/airbyte-ci/connectors/pipelines/tests/test_helpers/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-ci/connectors/pipelines/tests/test_utils.py b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py
similarity index 80%
rename from airbyte-ci/connectors/pipelines/tests/test_utils.py
rename to airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py
index 9d9328f38417..31c6434da797 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_utils.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_helpers/test_utils.py
@@ -5,6 +5,7 @@
from pathlib import Path
from unittest import mock
+import dagger
import pytest
from connector_ops.utils import Connector, ConnectorLanguage
from pipelines import consts
@@ -190,3 +191,48 @@ def test_sh_dash_c():
assert utils.sh_dash_c(["foo", "bar"]) == ["sh", "-c", "set -o xtrace && foo && bar"]
assert utils.sh_dash_c(["foo"]) == ["sh", "-c", "set -o xtrace && foo"]
assert utils.sh_dash_c([]) == ["sh", "-c", "set -o xtrace"]
+
+
+@pytest.mark.anyio
+@pytest.mark.parametrize("tar_file_name", [None, "custom_tar_name.tar"])
+async def test_export_containers_to_tarball(mocker, dagger_client, tmp_path, tar_file_name):
+ context = mocker.Mock(
+ dagger_client=dagger_client,
+ connector=mocker.Mock(technical_name="my_connector"),
+ host_image_export_dir_path=tmp_path,
+ git_revision="my_git_revision",
+ )
+ container_variants = [
+ dagger_client.container(platform=dagger.Platform("linux/arm64")).from_("bash:latest"),
+ dagger_client.container(platform=dagger.Platform("linux/amd64")).from_("bash:latest"),
+ ]
+ expected_tar_file_path = tmp_path / "my_connector_my_git_revision.tar" if tar_file_name is None else tmp_path / tar_file_name
+ exported_tar_file, exported_tar_file_path = await utils.export_containers_to_tarball(
+ context, container_variants, tar_file_name=tar_file_name
+ )
+ assert exported_tar_file_path == expected_tar_file_path
+ assert await exported_tar_file.size() == expected_tar_file_path.stat().st_size
+
+
+@pytest.mark.anyio
+async def test_export_containers_to_tarball_failure(mocker, tmp_path):
+ mock_dagger_client = mocker.Mock()
+ mock_export = mocker.AsyncMock(return_value=False)
+ mock_dagger_client.container.return_value.export = mock_export
+
+ context = mocker.Mock(
+ dagger_client=mock_dagger_client,
+ connector=mocker.Mock(technical_name="my_connector"),
+ host_image_export_dir_path=tmp_path,
+ git_revision="my_git_revision",
+ )
+
+ container_variants = mocker.Mock()
+ exported_tar_file, exported_tar_file_path = await utils.export_containers_to_tarball(context, container_variants)
+ mock_export.assert_called_once_with(
+ str(tmp_path / "my_connector_my_git_revision.tar"),
+ platform_variants=container_variants,
+ forced_compression=dagger.ImageLayerCompression.Gzip,
+ )
+ assert exported_tar_file is None
+ assert exported_tar_file_path is None
diff --git a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
index 8bcde17e715e..da63c33fb01c 100644
--- a/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
+++ b/airbyte-ci/connectors/pipelines/tests/test_tests/test_python_connectors.py
@@ -28,7 +28,7 @@ def certified_connector_with_setup(self, all_connectors):
pytest.skip("No certified connector with setup.py found.")
@pytest.fixture
- def context_for_certified_connector_with_setup(self, certified_connector_with_setup, dagger_client):
+ def context_for_certified_connector_with_setup(self, certified_connector_with_setup, dagger_client, current_platform):
context = ConnectorContext(
pipeline_name="test unit tests",
connector=certified_connector_with_setup,
@@ -37,6 +37,7 @@ def context_for_certified_connector_with_setup(self, certified_connector_with_se
report_output_prefix="test",
is_local=True,
use_remote_secrets=True,
+ targeted_platforms=[current_platform],
)
context.dagger_client = dagger_client
context.connector_secrets = {}
@@ -44,11 +45,11 @@ def context_for_certified_connector_with_setup(self, certified_connector_with_se
@pytest.fixture
async def certified_container_with_setup(self, context_for_certified_connector_with_setup, current_platform):
- result = await BuildConnectorImages(context_for_certified_connector_with_setup, current_platform).run()
+ result = await BuildConnectorImages(context_for_certified_connector_with_setup).run()
return result.output_artifact[current_platform]
@pytest.fixture
- def context_for_connector_with_poetry(self, connector_with_poetry, dagger_client):
+ def context_for_connector_with_poetry(self, connector_with_poetry, dagger_client, current_platform):
context = ConnectorContext(
pipeline_name="test unit tests",
connector=connector_with_poetry,
@@ -57,6 +58,7 @@ def context_for_connector_with_poetry(self, connector_with_poetry, dagger_client
report_output_prefix="test",
is_local=True,
use_remote_secrets=True,
+ targeted_platforms=[current_platform],
)
context.dagger_client = dagger_client
context.connector_secrets = {}
@@ -64,7 +66,7 @@ def context_for_connector_with_poetry(self, connector_with_poetry, dagger_client
@pytest.fixture
async def container_with_poetry(self, context_for_connector_with_poetry, current_platform):
- result = await BuildConnectorImages(context_for_connector_with_poetry, current_platform).run()
+ result = await BuildConnectorImages(context_for_connector_with_poetry).run()
return result.output_artifact[current_platform]
async def test__run_for_setup_py(self, context_for_certified_connector_with_setup, certified_container_with_setup):