From d9dbf84481994fe7d606fac93cc20c7da9f7ad52 Mon Sep 17 00:00:00 2001 From: Evan Tahler Date: Tue, 6 Feb 2024 12:42:03 -0800 Subject: [PATCH] Remove connector ops team (#34867) Co-authored-by: Augustin --- .github/CODEOWNERS | 6 +- .github/workflows/publish-command.yml | 48 ----- .../connector_ops/required_reviewer_checks.py | 6 +- airbyte-ci/connectors/pipelines/README.md | 183 +++++++++++------- .../airbyte_ci/connectors/publish/context.py | 2 +- .../connectors/pipelines/pyproject.toml | 2 +- 6 files changed, 116 insertions(+), 131 deletions(-) delete mode 100644 .github/workflows/publish-command.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1c76017e418d..b3b9368f31d4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -9,7 +9,7 @@ # CDK and Connector Acceptance Tests /airbyte-cdk/python @airbytehq/connector-extensibility /airbyte-integrations/connector-templates/ @airbytehq/connector-extensibility -/airbyte-integrations/bases/connector-acceptance-test/ @airbytehq/connector-operations @lazebnyi @oustynova +/airbyte-integrations/bases/connector-acceptance-test/ @airbytehq/connector-extensibility @lazebnyi @oustynova # Protocol related items /docs/understanding-airbyte/airbyte-protocol.md @airbytehq/protocol-reviewers @@ -62,7 +62,7 @@ airbyte-cdk/java/airbyte-cdk/typing-deduping/ @airbytehq/destinations /airbyte-integrations/connectors/destination-tidb/ @airbytehq/destinations # Build customization file change -/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-operations +/airbyte-integrations/connectors/**/build_customization.py @airbytehq/connector-extensibility # airbyte-ci -/airbyte-ci @airbytehq/connector-operations +/airbyte-ci @airbytehq/connector-extensibility diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml deleted file mode 100644 index a99757ba4ebe..000000000000 --- a/.github/workflows/publish-command.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: Deprecation message for publish slash command -on: - workflow_dispatch: - inputs: - repo: - description: "Repo to check out code from. Defaults to the main airbyte repo. Set this when building connectors from forked repos." - required: false - default: "airbytehq/airbyte" - gitref: - description: "The git ref to check out from the specified repository." - required: false - default: master - connector: - description: "Airbyte Connector" - required: true - comment-id: - description: "The comment-id of the slash command. Used to update the comment with the status." - required: false - parallel: - description: "Switching this to true will spin up 5 build agents instead of 1 and allow multi connector publishes to run in parallel" - required: true - default: "false" - run-tests: - description: "Should run tests when publishing" - required: true - default: "true" - pre-release: - description: "Should publish a pre-release version" - required: true - default: "false" - -jobs: - write-deprecation-message: - name: Set up git comment - if: github.event.inputs.comment-id - runs-on: ubuntu-latest - steps: - - name: Print deprecation message - if: github.event.inputs.comment-id - uses: peter-evans/create-or-update-comment@v1 - with: - comment-id: ${{ github.event.inputs.comment-id }} - body: | - > :warning: The publish slash command is now deprecated.
- The connector publication happens on merge to the master branch.
- Please use /legacy-publish if you need to publish normalization images.
- Please join the #connector-publish-updates slack channel to track ongoing publish pipelines.
- Please reach out to the @dev-connector-ops team if you need support in publishing a connector. diff --git a/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py b/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py index 3d96341910fd..d2575314e633 100644 --- a/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py +++ b/airbyte-ci/connectors/connector_ops/connector_ops/required_reviewer_checks.py @@ -7,9 +7,9 @@ import yaml from connector_ops import utils -BACKWARD_COMPATIBILITY_REVIEWERS = {"connector-operations", "connector-extensibility"} -TEST_STRICTNESS_LEVEL_REVIEWERS = {"connector-operations"} -BYPASS_REASON_REVIEWERS = {"connector-operations"} +BACKWARD_COMPATIBILITY_REVIEWERS = {"connector-extensibility"} +TEST_STRICTNESS_LEVEL_REVIEWERS = {"connector-extensibility"} +BYPASS_REASON_REVIEWERS = {"connector-extensibility"} STRATEGIC_PYTHON_CONNECTOR_REVIEWERS = {"gl-python"} BREAKING_CHANGE_REVIEWERS = {"breaking-change-reviewers"} REVIEW_REQUIREMENTS_FILE_PATH = ".github/connector_org_review_requirements.yaml" diff --git a/airbyte-ci/connectors/pipelines/README.md b/airbyte-ci/connectors/pipelines/README.md index 2f7a05cad0fc..f18fdc144c05 100644 --- a/airbyte-ci/connectors/pipelines/README.md +++ b/airbyte-ci/connectors/pipelines/README.md @@ -2,12 +2,15 @@ ## What is it? -`airbyte-ci` is a command line interface to run CI/CD pipelines. -The goal of this CLI is to offer developers a tool to run these pipelines locally and in a CI context with the same guarantee. -It can prevent unnecessary commit -> push cycles developers typically go through when they when to test their changes against a remote CI. -This is made possible thanks to the use of [Dagger](https://dagger.io), a CI/CD engine relying on Docker Buildkit to provide reproducible builds. -Our pipeline are declared with Python code, the main entrypoint is [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connector_ops/connector_ops/pipelines/commands/airbyte_ci.py). -This documentation should be helpful for both local and CI use of the CLI. We indeed [power connector testing in the CI with this CLI](https://github.com/airbytehq/airbyte/blob/master/.github/workflows/connector_integration_test_single_dagger.yml#L78). +`airbyte-ci` is a command line interface to run CI/CD pipelines. The goal of this CLI is to offer +developers a tool to run these pipelines locally and in a CI context with the same guarantee. It can +prevent unnecessary commit -> push cycles developers typically go through when they when to test +their changes against a remote CI. This is made possible thanks to the use of +[Dagger](https://dagger.io), a CI/CD engine relying on Docker Buildkit to provide reproducible +builds. Our pipeline are declared with Python code, the main entrypoint is +[here](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connector_ops/connector_ops/pipelines/commands/airbyte_ci.py). +This documentation should be helpful for both local and CI use of the CLI. We indeed +[power connector testing in the CI with this CLI](https://github.com/airbytehq/airbyte/blob/master/.github/workflows/connector_integration_test_single_dagger.yml#L78). ## How to install @@ -27,8 +30,8 @@ make tools.airbyte-ci.install ### Setting up connector secrets access If you plan to use Airbyte CI to run CAT (Connector Acceptance Tests), we recommend setting up GSM -access so that Airbyte CI can pull remote secrets from GSM. For setup instructions, see the -CI Credentials package (which Airbyte CI uses under the hood) README's +access so that Airbyte CI can pull remote secrets from GSM. For setup instructions, see the CI +Credentials package (which Airbyte CI uses under the hood) README's [Get GSM Access](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/ci_credentials/README.md#get-gsm-access) instructions. @@ -81,29 +84,36 @@ poetry shell cd ../../ ``` -**Alternatively**, you can install airbyte-ci with pipx so that the entrypoint is available in your PATH: +**Alternatively**, you can install airbyte-ci with pipx so that the entrypoint is available in your +PATH: ```bash make tools.airbyte-ci.install ``` -However, this will not automatically install the dependencies for the local dependencies of airbyte-ci, or respect the lockfile. +However, this will not automatically install the dependencies for the local dependencies of +airbyte-ci, or respect the lockfile. Its often best to use the `poetry` steps instead. #### Running Tests From `airbyte-ci/connectors/pipelines`: + ```bash poetry run pytest tests ``` You can also run a subset of tests: + ```bash poetry run pytest pipelines/models/steps.py ``` -More options, such as running test by keyword matching, are available - see the [pytest CLI documentation](https://docs.pytest.org/en/6.2.x/usage.html) for all the available options.``` +More options, such as running test by keyword matching, are available - see the +[pytest CLI documentation](https://docs.pytest.org/en/6.2.x/usage.html) for all the available +options.``` + #### Checking Code Format (Pipelines) ```bash @@ -153,7 +163,8 @@ At this point you can run `airbyte-ci` commands. ### `airbyte-ci` command group -**The main command group option has sensible defaults. In local use cases you're not likely to pass options to the `airbyte-ci` command group.** +**The main command group option has sensible defaults. In local use cases you're not likely to pass +options to the `airbyte-ci` command group.** #### Options @@ -172,7 +183,6 @@ At this point you can run `airbyte-ci` commands. | `--pipeline-start-timestamp` | Current epoch time | `CI_PIPELINE_START_TIMESTAMP` | Start time of the pipeline as epoch time. Used for pipeline run duration computation. | | `--show-dagger-logs/--hide-dagger-logs` | `--hide-dagger-logs` | | Flag to show or hide the dagger logs. | - ### `connectors` command subgroup Available commands: @@ -230,17 +240,13 @@ Run a test pipeline for one or multiple connectors. #### Examples -Test a single connector: -`airbyte-ci connectors --name=source-pokeapi test` +Test a single connector: `airbyte-ci connectors --name=source-pokeapi test` -Test multiple connectors: -`airbyte-ci connectors --name=source-pokeapi --name=source-bigquery test` +Test multiple connectors: `airbyte-ci connectors --name=source-pokeapi --name=source-bigquery test` -Test certified connectors: -`airbyte-ci connectors --support-level=certified test` +Test certified connectors: `airbyte-ci connectors --support-level=certified test` -Test connectors changed on the current branch: -`airbyte-ci connectors --modified test` +Test connectors changed on the current branch: `airbyte-ci connectors --modified test` Run acceptance test only on the modified connectors, just run its full refresh tests: `airbyte-ci connectors --modified test --only-step="acceptance" --acceptance.-k=test_full_refresh` @@ -283,38 +289,42 @@ flowchart TD #### Options | Option | Multiple | Default value | Description | -| ------------------------------------------------------- | -------- | ------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| ------------------------------------------------------- | -------- | ------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | | `--skip-step/-x` | True | | Skip steps by id e.g. `-x unit -x acceptance` | | `--only-step/-k` | True | | Only run specific steps by id e.g. `-k unit -k acceptance` | | `--fail-fast` | False | False | Abort after any tests fail, rather than continuing to run additional tests. Use this setting to confirm a known bug is fixed (or not), or when you only require a pass/fail result. | | `--code-tests-only` | True | False | Skip any tests not directly related to code updates. For instance, metadata checks, version bump checks, changelog verification, etc. Use this setting to help focus on code quality during development. | | `--concurrent-cat` | False | False | Make CAT tests run concurrently using pytest-xdist. Be careful about source or destination API rate limits. | | `--.=` | True | | You can pass extra parameters for specific test steps. More details in the extra parameters section below | -| `--ci-requirements` | False | | | Output the CI requirements as a JSON payload. It is used to determine the CI runner to use. +| `--ci-requirements` | False | | | Output the CI requirements as a JSON payload. It is used to determine the CI runner to use. | Note: -- The above options are implemented for Java connectors but may not be available for Python connectors. If an option is not supported, the pipeline will not fail but instead the 'default' behavior will be executed. +- The above options are implemented for Java connectors but may not be available for Python + connectors. If an option is not supported, the pipeline will not fail but instead the 'default' + behavior will be executed. #### Extra parameters + You can pass extra parameters to the following steps: -* `unit` -* `integration` -* `acceptance` -This allows you to override the default parameters of these steps. -For example, you can only run the `test_read` test of the acceptance test suite with: -`airbyte-ci connectors --name=source-pokeapi test --acceptance.-k=test_read` -Here the `-k` parameter is passed to the pytest command running acceptance tests. -Please keep in mind that the extra parameters are not validated by the CLI: if you pass an invalid parameter, you'll face a late failure during the pipeline execution. +- `unit` +- `integration` +- `acceptance` + +This allows you to override the default parameters of these steps. For example, you can only run the +`test_read` test of the acceptance test suite with: +`airbyte-ci connectors --name=source-pokeapi test --acceptance.-k=test_read` Here the `-k` parameter +is passed to the pytest command running acceptance tests. Please keep in mind that the extra +parameters are not validated by the CLI: if you pass an invalid parameter, you'll face a late +failure during the pipeline execution. ### `connectors build` command -Run a build pipeline for one or multiple connectors and export the built docker image to the local docker host. -It's mainly purposed for local use. +Run a build pipeline for one or multiple connectors and export the built docker image to the local +docker host. It's mainly purposed for local use. -Build a single connector: -`airbyte-ci connectors --name=source-pokeapi build` +Build a single connector: `airbyte-ci connectors --name=source-pokeapi build` Build a single connector with a custom image tag: `airbyte-ci connectors --name=source-pokeapi build --tag=my-custom-tag` @@ -330,11 +340,9 @@ You will get: Build multiple connectors: `airbyte-ci connectors --name=source-pokeapi --name=source-bigquery build` -Build certified connectors: -`airbyte-ci connectors --support-level=certified build` +Build certified connectors: `airbyte-ci connectors --support-level=certified build` -Build connectors changed on the current branch: -`airbyte-ci connectors --modified build` +Build connectors changed on the current branch: `airbyte-ci connectors --modified build` #### What it runs @@ -378,8 +386,8 @@ flowchart TD ### `connectors publish` command -Run a publish pipeline for one or multiple connectors. -It's mainly purposed for CI use to release a connector update. +Run a publish pipeline for one or multiple connectors. It's mainly purposed for CI use to release a +connector update. ### Examples @@ -397,10 +405,9 @@ Publish all connectors modified in the head commit: `airbyte-ci connectors --mod | `--slack-webhook` | False | | `SLACK_WEBHOOK` | The Slack webhook URL to send notifications to. | | `--slack-channel` | False | | `SLACK_CHANNEL` | The Slack channel name to send notifications to. | | `--ci-requirements` | False | | | Output the CI requirements as a JSON payload. It is used to determine the CI runner to use. | -| `--python-registry-token` | False | | `PYTHON_REGISTRY_TOKEN` | The API token to authenticate with the registry. For pypi, the `pypi-` prefix needs to be specified | -| `--python-registry-url` | False | https://upload.pypi.org/legacy/ | `PYTHON_REGISTRY_URL` | The python registry to publish to. Defaults to main pypi | -| `--python-registry-check-url` | False | https://pypi.org/pypi | `PYTHON_REGISTRY_CHECK_URL` | The python registry url to check whether a package is published already | - +| `--python-registry-token` | False | | `PYTHON_REGISTRY_TOKEN` | The API token to authenticate with the registry. For pypi, the `pypi-` prefix needs to be specified | +| `--python-registry-url` | False | https://upload.pypi.org/legacy/ | `PYTHON_REGISTRY_URL` | The python registry to publish to. Defaults to main pypi | +| `--python-registry-check-url` | False | https://pypi.org/pypi | `PYTHON_REGISTRY_CHECK_URL` | The python registry url to check whether a package is published already | I've added an empty "Default" column, and you can fill in the default values as needed. @@ -422,15 +429,20 @@ flowchart TD #### Python registry publishing -If `remoteRegistries.pypi.enabled` in the connector metadata is set to `true`, the connector will be published to the python registry. -To do so, the `--python-registry-token` and `--python-registry-url` options are used to authenticate with the registry and publish the connector. -If the current version of the connector is already published to the registry, the publish will be skipped (the `--python-registry-check-url` is used for the check). +If `remoteRegistries.pypi.enabled` in the connector metadata is set to `true`, the connector will be +published to the python registry. To do so, the `--python-registry-token` and +`--python-registry-url` options are used to authenticate with the registry and publish the +connector. If the current version of the connector is already published to the registry, the publish +will be skipped (the `--python-registry-check-url` is used for the check). On a pre-release, the connector will be published as a `.dev` version. -The `remoteRegistries.pypi.packageName` field holds the name of the used package name. It should be set to `airbyte-source-`. Certified Python connectors are required to have PyPI publishing enabled. +The `remoteRegistries.pypi.packageName` field holds the name of the used package name. It should be +set to `airbyte-source-`. Certified Python connectors are required to have PyPI +publishing enabled. An example `remoteRegistries` entry in a connector `metadata.yaml` looks like this: + ```yaml remoteRegistries: pypi: @@ -444,7 +456,8 @@ Bump the version of the selected connectors. ### Examples -Bump source-openweather: `airbyte-ci connectors --name=source-openweather bump_version patch ""` +Bump source-openweather: +`airbyte-ci connectors --name=source-openweather bump_version patch ""` #### Arguments @@ -460,7 +473,8 @@ Upgrade the CDK version of the selected connectors by updating the dependency in ### Examples -Upgrade for source-openweather: `airbyte-ci connectors --name=source-openweather upgrade_cdk ` +Upgrade for source-openweather: +`airbyte-ci connectors --name=source-openweather upgrade_cdk ` #### Arguments @@ -474,7 +488,8 @@ Modify the selected connector metadata to use the latest base image version. ### Examples -Upgrade the base image for source-openweather: `airbyte-ci connectors --name=source-openweather upgrade_base_image` +Upgrade the base image for source-openweather: +`airbyte-ci connectors --name=source-openweather upgrade_base_image` ### Options @@ -495,7 +510,8 @@ Make a connector using a Dockerfile migrate to the base image by: ### Examples -Migrate source-openweather to use the base image: `airbyte-ci connectors --name=source-openweather migrate_to_base_image` +Migrate source-openweather to use the base image: +`airbyte-ci connectors --name=source-openweather migrate_to_base_image` ### Arguments @@ -524,15 +540,20 @@ Available commands: ### `format check all` command -This command runs formatting checks, but does not format the code in place. It will exit 1 as soon as a failure is encountered. To fix errors, use `airbyte-ci format fix all`. +This command runs formatting checks, but does not format the code in place. It will exit 1 as soon +as a failure is encountered. To fix errors, use `airbyte-ci format fix all`. -Running `airbyte-ci format check` will run checks on all different types of code. Run `airbyte-ci format check --help` for subcommands to check formatting for only certain types of files. +Running `airbyte-ci format check` will run checks on all different types of code. Run +`airbyte-ci format check --help` for subcommands to check formatting for only certain types of +files. ### `format fix all` command -This command runs formatting checks and reformats any code that would be reformatted, so it's recommended to stage changes you might have before running this command. +This command runs formatting checks and reformats any code that would be reformatted, so it's +recommended to stage changes you might have before running this command. -Running `airbyte-ci format fix all` will format all of the different types of code. Run `airbyte-ci format fix --help` for subcommands to format only certain types of files. +Running `airbyte-ci format fix all` will format all of the different types of code. Run +`airbyte-ci format fix --help` for subcommands to format only certain types of files. ### `poetry` command subgroup @@ -548,13 +569,16 @@ Available commands: ### Examples -- Publish a python package: `airbyte-ci poetry --package-path=path/to/package publish --publish-name=my-package --publish-version="1.2.3" --python-registry-token="..." --registry-url="http://host.docker.internal:8012/"` +- Publish a python package: + `airbyte-ci poetry --package-path=path/to/package publish --publish-name=my-package --publish-version="1.2.3" --python-registry-token="..." --registry-url="http://host.docker.internal:8012/"` ### `publish` command -This command publishes poetry packages (using `pyproject.toml`) or python packages (using `setup.py`) to a python registry. +This command publishes poetry packages (using `pyproject.toml`) or python packages (using +`setup.py`) to a python registry. -For poetry packages, the package name and version can be taken from the `pyproject.toml` file or be specified as options. +For poetry packages, the package name and version can be taken from the `pyproject.toml` file or be +specified as options. #### Options @@ -573,8 +597,8 @@ Available commands: ### `metadata deploy orchestrator` command -This command deploys the metadata service orchestrator to production. -The `DAGSTER_CLOUD_METADATA_API_TOKEN` environment variable must be set. +This command deploys the metadata service orchestrator to production. The +`DAGSTER_CLOUD_METADATA_API_TOKEN` environment variable must be set. #### Example @@ -606,13 +630,14 @@ This command runs the Python tests for a airbyte-ci poetry package. | `--ci-requirements` | False | | | Output the CI requirements as a JSON payload. It is used to determine the CI runner to use. | #### Examples + You can pass multiple `-c/--poetry-run-command` options to run multiple commands. E.G.: running `pytest` and `mypy`: `airbyte-ci test airbyte-ci/connectors/pipelines --poetry-run-command='pytest tests' --poetry-run-command='mypy pipelines'` -E.G.: passing the environment variable `GCP_GSM_CREDENTIALS` environment variable to the container running the poetry command: -`airbyte-ci test airbyte-lib --pass-env-var='GCP_GSM_CREDENTIALS'` +E.G.: passing the environment variable `GCP_GSM_CREDENTIALS` environment variable to the container +running the poetry command: `airbyte-ci test airbyte-lib --pass-env-var='GCP_GSM_CREDENTIALS'` E.G.: running `pytest` on a specific test folder: `airbyte-ci tests airbyte-integrations/bases/connector-acceptance-test --poetry-run-command='pytest tests/unit_tests'` @@ -621,6 +646,7 @@ E.G.: running `pytest` on a specific test folder: | Version | PR | Description | | ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| 3.10.4 | [#34867](https://github.com/airbytehq/airbyte/pull/34867) | Remove connector ops team | | 3.10.3 | [#34836](https://github.com/airbytehq/airbyte/pull/34836) | Add check for python registry publishing enabled for certified python sources. | | 3.10.2 | [#34044](https://github.com/airbytehq/airbyte/pull/34044) | Add pypi validation testing. | | 3.10.1 | [#34756](https://github.com/airbytehq/airbyte/pull/34756) | Enable connectors tests in draft PRs. | @@ -756,8 +782,9 @@ E.G.: running `pytest` on a specific test folder: ## More info -This project is owned by the Connectors Operations team. -We share project updates and remaining stories before its release to production in this [EPIC](https://github.com/airbytehq/airbyte/issues/24403). +This project is owned by the Connectors Operations team. We share project updates and remaining +stories before its release to production in this +[EPIC](https://github.com/airbytehq/airbyte/issues/24403). # Troubleshooting @@ -810,13 +837,16 @@ make tools.airbyte-ci.install To fix this, you can either: -- Ensure that airbyte-ci is installed with pipx. Run `pipx list` to check if airbyte-ci is installed. +- Ensure that airbyte-ci is installed with pipx. Run `pipx list` to check if airbyte-ci is + installed. - Run `pipx ensurepath` to add the pipx binary directory to your PATH. -- Add the pipx binary directory to your PATH manually. The pipx binary directory is usually `~/.local/bin`. +- Add the pipx binary directory to your PATH manually. The pipx binary directory is usually + `~/.local/bin`. ### python3.10 not found -If you get the following error when running `pipx install --editable --force --python=python3.10 airbyte-ci/connectors/pipelines/`: +If you get the following error when running +`pipx install --editable --force --python=python3.10 airbyte-ci/connectors/pipelines/`: ```bash $ pipx install --editable --force --python=python3.10 airbyte-ci/connectors/pipelines/ @@ -828,13 +858,15 @@ It means that you don't have Python 3.10 installed on your system. To fix this, you can either: - Install Python 3.10 with pyenv. Run `pyenv install 3.10` to install the latest Python version. -- Install Python 3.10 with your system package manager. For instance, on Ubuntu you can run `sudo apt install python3.10`. -- Ensure that Python 3.10 is in your PATH. Run `which python3.10` to check if Python 3.10 is installed and in your PATH. +- Install Python 3.10 with your system package manager. For instance, on Ubuntu you can run + `sudo apt install python3.10`. +- Ensure that Python 3.10 is in your PATH. Run `which python3.10` to check if Python 3.10 is + installed and in your PATH. ### Any type of pipeline failure -First you should check that the version of the CLI you are using is the latest one. -You can check the version of the CLI with the `--version` option: +First you should check that the version of the CLI you are using is the latest one. You can check +the version of the CLI with the `--version` option: ```bash $ airbyte-ci --version @@ -847,7 +879,8 @@ and compare it with the version in the pyproject.toml file: $ cat airbyte-ci/connectors/pipelines/pyproject.toml | grep version ``` -If you get any type of pipeline failure, you can run the pipeline with the `--show-dagger-logs` option to get more information about the failure. +If you get any type of pipeline failure, you can run the pipeline with the `--show-dagger-logs` +option to get more information about the failure. ```bash $ airbyte-ci --show-dagger-logs connectors --name=source-pokeapi test diff --git a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py index a497629daec1..57473eee215b 100644 --- a/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py +++ b/airbyte-ci/connectors/pipelines/pipelines/airbyte_ci/connectors/publish/context.py @@ -133,5 +133,5 @@ def create_slack_message(self) -> str: assert self.report is not None, "Report should be set when state is successful" message += f"⏲️ Run duration: {format_duration(self.report.run_duration)}\n" if self.state is ContextState.FAILURE: - message += "\ncc. " # @dev-connector-ops + message += "\ncc. " return message diff --git a/airbyte-ci/connectors/pipelines/pyproject.toml b/airbyte-ci/connectors/pipelines/pyproject.toml index 70d4e4c0affa..799d0db007e3 100644 --- a/airbyte-ci/connectors/pipelines/pyproject.toml +++ b/airbyte-ci/connectors/pipelines/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "pipelines" -version = "3.10.3" +version = "3.10.4" description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines" authors = ["Airbyte "]